OpenAI-compatible LLM backend using the chat completions API via Req.
Works with OpenAI, Ollama, Together, and any OpenAI-compatible provider.
Configuration
# config/config.exs
config :adk, :openai_api_key, "sk-..."
config :adk, :openai_base_url, "https://api.openai.com/v1" # optional
# Or via environment variables
OPENAI_API_KEY=sk-...
OPENAI_BASE_URL=http://localhost:11434/v1 # e.g. OllamaUsage
config :adk, :llm_backend, ADK.LLM.OpenAI
ADK.LLM.OpenAI.generate("gpt-4o", %{
instruction: "You are helpful.",
messages: [%{role: :user, parts: [%{text: "Hello"}]}]
})