Skip to content

Instantly share code, notes, and snippets.

@clutchski
Created March 26, 2026 20:26
Show Gist options
  • Select an option

  • Save clutchski/e71399c49770c7fa292f0dae09a978ad to your computer and use it in GitHub Desktop.

Select an option

Save clutchski/e71399c49770c7fa292f0dae09a978ad to your computer and use it in GitHub Desktop.
ADK Python + Braintrust Gateway: simple multi-provider calls (no tools)
"""Test multiple models through the Braintrust gateway using ADK's AnthropicLlm.
The gateway handles protocol translation, so we use AnthropicLlm for both
Anthropic and OpenAI models.
"""
import os
os.environ["ANTHROPIC_BASE_URL"] = "https://gateway.braintrust.dev"
os.environ["ANTHROPIC_API_KEY"] = os.environ["BRAINTRUST_API_KEY"]
import asyncio
from google.adk.models.anthropic_llm import AnthropicLlm
from google.adk.models.llm_request import LlmRequest
from google.genai import types
MODELS = [
"claude-haiku-4-5",
"gpt-4o-mini",
]
async def call_model(model: str, prompt: str) -> str:
llm = AnthropicLlm(model=model)
request = LlmRequest(
model=model,
contents=[types.Content(role="user", parts=[types.Part.from_text(text=prompt)])],
config=types.GenerateContentConfig(system_instruction="Be concise."),
)
async for response in llm.generate_content_async(request):
if response.content and response.content.parts:
return " ".join(p.text for p in response.content.parts if p.text)
return ""
async def main():
prompt = "What is the capital of France? One sentence."
for model in MODELS:
print(f"\n{model}:")
print(f" {await call_model(model, prompt)}")
if __name__ == "__main__":
asyncio.run(main())
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment