if (FALSE) {
# Load model
model <- model_load("path/to/model.gguf")
# Tokenize then detokenize (round-trip)
original_text <- "Hello, how are you today?"
tokens <- tokenize(model, original_text)
recovered_text <- detokenize(model, tokens)
print(recovered_text) # Should match original_text
# Generate and display text
ctx <- context_create(model)
generated_text <- generate(ctx, "The weather is", max_tokens = 10)
# Inspect individual tokens
single_token <- c(123) # Some token ID
token_text <- detokenize(model, single_token)
print(paste("Token", single_token, "represents:", token_text))
}
Run the code above in your browser using DataLab