# NOT RUN {
data("movie_review")
N = 100
tokens = movie_review$review[1:N] %>% tolower %>% word_tokenizer
dtm = create_dtm(itoken(tokens), hash_vectorizer())
n_topics = 10
lsa_1 = LatentSemanticAnalysis$new(n_topics)
fit(dtm, lsa_1) # or lsa_1$fit(dtm)
d1 = lsa_1$transform(dtm)
lsa_2 = LatentSemanticAnalysis$new(n_topics)
d2 = lsa_2$fit_transform(dtm)
all.equal(d1, d2)
# the same, but wrapped with S3 methods
all.equal(fit_transform(dtm, lsa_2), fit_transform(dtm, lsa_1))
# }
Run the code above in your browser using DataLab