Learn R Programming

LLMR (version 0.2.3)

llm_config: Create LLM Configuration

Description

Creates a configuration object for interacting with a specified LLM API provider.

Creates a configuration object for interacting with a specified LLM API provider.

Usage

llm_config(provider, model, api_key, trouble_shooting = FALSE, ...)

llm_config(provider, model, api_key, trouble_shooting = FALSE, ...)

Value

An object of class `llm_config` containing API and model parameters.

An object of class `llm_config` containing API and model parameters.

Arguments

provider

A string specifying the API provider. Supported providers include: "openai" for OpenAI, "anthropic" for Anthropic, "groq" for Groq, "together" for Together AI, "deepseek" for DeepSeek, "voyage" for Voyage AI, "gemini" for Google Gemini.

model

The model name to use. This depends on the provider.

api_key

Your API key for the provider.

trouble_shooting

Prints out all api calls. USE WITH EXTREME CAUTION as it prints your API key.

...

Additional model-specific parameters (e.g., `temperature`, `max_tokens`, etc.).

Examples

Run this code
if (FALSE) {
  # OpenAI Example (chat)
  openai_config <- llm_config(
    provider = "openai",
    model = "gpt-4o-mini",
    api_key = Sys.getenv("OPENAI_KEY"),
    temperature = 0.7,
    max_tokens = 500
  )

  # OpenAI Embedding Example (overwriting api_url):
  openai_embed_config <- llm_config(
    provider = "openai",
    model = "text-embedding-3-small",
    api_key = Sys.getenv("OPENAI_KEY"),
    temperature = 0.3,
    api_url = "https://api.openai.com/v1/embeddings"
  )

  text_input <- c("Political science is a useful subject",
                  "We love sociology",
                  "German elections are different",
                  "A student was always curious.")

  embed_response <- call_llm(openai_embed_config, text_input)
  # parse_embeddings() can then be used to convert the embedding results.

  # Voyage AI Example:
  voyage_config <- llm_config(
    provider = "voyage",
    model = "voyage-large-2",
    api_key = Sys.getenv("VOYAGE_API_KEY")
  )

  embedding_response <- call_llm(voyage_config, text_input)
  embeddings <- parse_embeddings(embedding_response)
  # Additional processing:
  embeddings |> cor() |> print()
}
if (FALSE) {
  # OpenAI Example (chat)
  openai_config <- llm_config(
    provider = "openai",
    model = "gpt-4o-mini",
    api_key = Sys.getenv("OPENAI_KEY"),
    temperature = 0.7,
    max_tokens = 500
  )

  # OpenAI Embedding Example (overwriting api_url):
  openai_embed_config <- llm_config(
    provider = "openai",
    model = "text-embedding-3-small",
    api_key = Sys.getenv("OPENAI_KEY"),
    temperature = 0.3,
    api_url = "https://api.openai.com/v1/embeddings"
  )

  text_input <- c("Political science is a useful subject",
                  "We love sociology",
                  "German elections are different",
                  "A student was always curious.")

  embed_response <- call_llm(openai_embed_config, text_input)
  # parse_embeddings() can then be used to convert the embedding results.

  # Voyage AI Example:
  voyage_config <- llm_config(
    provider = "voyage",
    model = "voyage-large-2",
    api_key = Sys.getenv("VOYAGE_API_KEY")
  )

  embedding_response <- call_llm(voyage_config, text_input)
  embeddings <- parse_embeddings(embedding_response)
  embeddings |> cor() |> print()
}

Run the code above in your browser using DataLab