# NOT RUN {
# detect character vector with ASCII strings
ascii <- "I can eat glass and it doesn't hurt me."
ced_enc_detect(ascii)
ced_enc_detect(charToRaw(ascii))
# detect character vector with UTF-8 strings
utf8 <- "\u4e0b\u5348\u597d"
print(utf8)
ced_enc_detect(utf8)
ced_enc_detect(charToRaw(utf8))
# path to examples
ex_path <- system.file("test.txt", package = "ced")
ex_txt <- read.dcf(ex_path, all = TRUE)
# russian text
print(ex_txt[["France"]])
ced_enc_detect(ex_txt[["Russian"]])
ced_enc_detect(iconv(ex_txt[["Russian"]], "utf8", "ibm866"))
ced_enc_detect(iconv(ex_txt[["Russian"]], "utf8", "windows-1251"))
ced_enc_detect(iconv(ex_txt[["Russian"]], "utf8", "koi8-r"))
# chinese text
print(ex_txt[["Chinese"]])
ced_enc_detect(ex_txt[["Chinese"]])
ced_enc_detect(iconv(ex_txt[["Chinese"]], "utf8", "gb18030"))
# korean text
print(ex_txt[["Korean"]])
ced_enc_detect(ex_txt[["Korean"]])
ced_enc_detect(iconv(ex_txt[["Korean"]], "utf8", "uhc"))
ced_enc_detect(iconv(ex_txt[["Korean"]], "utf8", "iso-2022-kr"))
# japanese text
print(ex_txt[["Japanese"]])
ced_enc_detect(ex_txt[["Japanese"]])
ced_enc_detect(iconv(ex_txt[["Japanese"]], "utf8", "shift_jis"))
ced_enc_detect(iconv(ex_txt[["Japanese"]], "utf8", "iso-2022-jp"))
# }
# NOT RUN {
# detect encoding of the web pages content
if (require("curl")) {
detect_enc_url <- function(u) ced_enc_detect(curl_fetch_memory(u)$content)
detect_enc_url("https://www.corriere.it")
detect_enc_url("https://www.vk.com")
detect_enc_url("https://www.qq.com")
detect_enc_url("https://kakaku.com")
detect_enc_url("https://etoland.co.kr")
}
# }
Run the code above in your browser using DataLab