Simple chat with LLMR

knitr::opts_chunk$set(
  collapse = TRUE, comment = "#>",
  eval = identical(tolower(Sys.getenv("LLMR_RUN_VIGNETTES", "false")), "true")
)

This vignette shows basic chat usage with four providers and model names: - OpenAI: gpt-5-nano - Anthropic: claude-sonnet-4-20250514 - Gemini: gemini-2.5-flash - Groq: openai/gpt-oss-20b

You will need API keys in these environment variables: OPENAI_API_KEY, ANTHROPIC_API_KEY, GEMINI_API_KEY, GROQ_API_KEY.

To run these examples locally, set a local flag: - Sys.setenv(LLMR_RUN_VIGNETTES = “true”) - or add LLMR_RUN_VIGNETTES=true to ~/.Renviron

OpenAI: gpt-5-nano

library(LLMR)

cfg_openai <- llm_config(
  provider = "openai",
  model    = "gpt-5-nano",
  
)

chat_oai <- chat_session(cfg_openai, system = "Be concise.")
chat_oai$send("Say a warm hello in one short sentence.")
#> Hello there, it's wonderful to see you!
#> [model=gpt-5-nano | finish=stop | sent=22 rec=274 tot=296 | t=2.414s]
chat_oai$send("Now say it in Esperanto.")
#> Saluton, estas mirinde vidi vin!
#> [model=gpt-5-nano | finish=stop | sent=47 rec=274 tot=321 | t=2.792s]

Anthropic: claude-sonnet-4-20250514

cfg_anthropic <- llm_config(
  provider = "anthropic",
  model    = "claude-sonnet-4-20250514",
  max_tokens = 512   # avoid warnings; Anthropic requires max_tokens
)

chat_claude <- chat_session(cfg_anthropic, system = "Be concise.")
chat_claude$send("Name one interesting fact about honey bees.")
#> Honey bees communicate the location of food sources through a "waggle dance" - when a forager bee returns to the hive, it performs specific movements that tell other bees the direction and distance to flowers, with the angle of the dance indicating direction relative to the sun and the duration indicating how far to fly.
#> [model=claude-sonnet-4-20250514 | finish=stop | sent=16 rec=69 tot=85 | t=4.081s]

Gemini: gemini-2.5-flash

cfg_gemini <- llm_config(
  provider = "gemini",
  model    = "gemini-2.5-flash",
  
)

chat_gem <- chat_session(cfg_gemini, system = "Be concise.")
chat_gem$send("Give me a single-sentence fun fact about volcanoes.")
#> Some volcanoes can actually create diamonds deep within the Earth.
#> [model=gemini-2.5-flash | finish=stop | sent=15 rec=11 tot=26 | t=4.538s]

Groq: openai/gpt-oss-20b

cfg_groq <- llm_config(
  provider = "groq",
  model    = "openai/gpt-oss-20b",
  
)

chat_groq <- chat_session(cfg_groq, system = "Be concise.")
chat_groq$send("Share a short fun fact about octopuses.")
#> Octopuses can taste with their arms—each of the 10 arms has hundreds of tiny “taste” receptors along the suckers, letting them sample food before they even bring it to their mouth.
#> [model=openai/gpt-oss-20b | finish=stop | sent=87 rec=68 tot=155 | t=0.394s]

Structured chat in one call (OpenAI example)

schema <- list(
  type = "object",
  properties = list(
    answer     = list(type = "string"),
    confidence = list(type = "number")
  ),
  required = list("answer", "confidence"),
  additionalProperties = FALSE
)

chat_oai$send_structured(
  "Return an answer and a confidence score (0-1) about: Why is the sky blue?",
  schema
)
#> {"answer":"The sky looks blue because sunlight scatters off air molecules, and shorter wavelengths (blue) scatter more than longer wavelengths; since violet is absorbed and the Sun isn’t mostly violet, the scattered blue light comes from every direction.", "confidence":0.9}
#> [model=gpt-5-nano | finish=stop | sent=129 rec=709 tot=838 | t=5.204s]