# NOT RUN {
# get information on an index
index_get(index='shakespeare')
index_get(index='shakespeare', features=c('settings','mappings'))
index_get(index='shakespeare', features='aliases')
index_get(index='shakespeare', features='warmers')
# check for index existence
index_exists(index='shakespeare')
index_exists(index='plos')
# create an index
if (index_exists('twitter')) index_delete('twitter')
index_create(index='twitter')
if (index_exists('things')) index_delete('things')
index_create(index='things')
if (index_exists('plos')) index_delete('plos')
index_create(index='plos')
# re-create an index
index_recreate("deer")
index_recreate("deer", verbose = FALSE)
# delete an index
if (index_exists('plos')) index_delete(index='plos')
## with a body
body <- '{
"settings" : {
"index" : {
"number_of_shards" : 3,
"number_of_replicas" : 2
}
}
}'
if (index_exists('alsothat')) index_delete('alsothat')
index_create(index='alsothat', body=body)
## with mappings
body <- '{
"mappings": {
"record": {
"properties": {
"location" : {"type" : "geo_point"}
}
}
}
}'
if (!index_exists('gbifnewgeo')) index_create(index='gbifnewgeo', body=body)
gbifgeo <- system.file("examples", "gbif_geosmall.json", package = "elastic")
docs_bulk(gbifgeo)
# close an index
index_create('plos')
index_close('plos')
# open an index
index_open('plos')
# Get stats on an index
index_stats('plos')
index_stats(c('plos','gbif'))
index_stats(c('plos','gbif'), metric='refresh')
index_stats(metric = "indices")
index_stats('shakespeare', metric='completion')
index_stats('shakespeare', metric='completion', completion_fields = "completion")
index_stats('shakespeare', metric='fielddata')
index_stats('shakespeare', metric='fielddata', fielddata_fields = "evictions")
index_stats('plos', level="indices")
index_stats('plos', level="cluster")
index_stats('plos', level="shards")
# Get segments information that a Lucene index (shard level) is built with
index_segments()
index_segments('plos')
index_segments(c('plos','gbif'))
# Get recovery information that provides insight into on-going index shard recoveries
index_recovery()
index_recovery('plos')
index_recovery(c('plos','gbif'))
index_recovery("plos", detailed = TRUE)
index_recovery("plos", active_only = TRUE)
# Optimize an index, or many indices
index_optimize('plos')
index_optimize(c('plos','gbif'))
# Upgrade one or more indices to the latest format. The upgrade process converts any
# segments written with previous formats.
index_upgrade('plos')
index_upgrade(c('plos','gbif'))
# Performs the analysis process on a text and return the tokens breakdown of the text.
index_analyze(text = 'this is a test', analyzer='standard')
index_analyze(text = 'this is a test', analyzer='whitespace')
index_analyze(text = 'this is a test', analyzer='stop')
index_analyze(text = 'this is a test', tokenizer='keyword', filters='lowercase')
index_analyze(text = 'this is a test', tokenizer='keyword', filters='lowercase',
char_filters='html_strip')
index_analyze(text = 'this is a test', index = 'plos')
index_analyze(text = 'this is a test', index = 'shakespeare')
index_analyze(text = 'this is a test', index = 'shakespeare', config=verbose())
## NGram tokenizer
body <- '{
"settings" : {
"analysis" : {
"analyzer" : {
"my_ngram_analyzer" : {
"tokenizer" : "my_ngram_tokenizer"
}
},
"tokenizer" : {
"my_ngram_tokenizer" : {
"type" : "nGram",
"min_gram" : "2",
"max_gram" : "3",
"token_chars": [ "letter", "digit" ]
}
}
}
}
}'
if(index_exists("shakespeare2")) {
index_delete("shakespeare2")
}
tokenizer_set(index = "shakespeare2", body=body)
index_analyze(text = "art thouh", index = "shakespeare2", analyzer='my_ngram_analyzer')
# Explicitly flush one or more indices.
index_flush(index = "plos")
index_flush(index = "shakespeare")
index_flush(index = c("plos","shakespeare"))
index_flush(index = "plos", wait_if_ongoing = TRUE)
library('httr')
index_flush(index = "plos", config=verbose())
# Clear either all caches or specific cached associated with one ore more indices.
index_clear_cache()
index_clear_cache(index = "plos")
index_clear_cache(index = "shakespeare")
index_clear_cache(index = c("plos","shakespeare"))
index_clear_cache(filter = TRUE)
library('httr')
index_clear_cache(config=verbose())
# Index settings
## get settings
index_settings()
index_settings("_all")
index_settings('gbif')
index_settings(c('gbif','plos'))
index_settings('*s')
## update settings
if (index_exists('foobar')) index_delete('foobar')
index_create("foobar")
settings <- list(index = list(number_of_replicas = 4))
index_settings_update("foobar", body = settings)
index_get("foobar")$foobar$settings
# }
Run the code above in your browser using DataLab