if (FALSE) {
# Simple: same model for both
llm <- llm_openai(model = "gpt-4o")
result <- AutoFlow(llm, "Load mtcars and plot mpg vs hp")
# Optimized: lightweight RAG, powerful ReAct
rag <- llm_openai(model = "gpt-3.5-turbo") # Fast & cheap
react <- llm_openai(model = "gpt-4o") # Powerful
result <- AutoFlow(
react_llm = react,
task_prompt = "Perform PCA on iris dataset",
rag_llm = rag
)
# Cross-provider: DeepSeek RAG + Claude ReAct
rag <- chat_deepseek(model = "deepseek-chat")
react <- chat_anthropic(model = "claude-sonnet-4-20250514")
result <- AutoFlow(react, "Complex analysis", rag_llm = rag)
# Batch evaluation with shared RAG
rag <- chat_deepseek(model = "deepseek-chat")
react <- chat_openai(model = "gpt-4o")
for (task in tasks) {
result <- AutoFlow(react, task, rag_llm = rag, verbose = FALSE)
}
}
Run the code above in your browser using DataLab