Skip to content

Instantly share code, notes, and snippets.

@clutchski
Created March 26, 2026 20:36
Show Gist options
  • Select an option

  • Save clutchski/0c668c263befec201065ffa8f75a19bc to your computer and use it in GitHub Desktop.

Select an option

Save clutchski/0c668c263befec201065ffa8f75a19bc to your computer and use it in GitHub Desktop.
ADK Python + Braintrust Gateway: Gemini class calling 3 providers (Gemini, Claude, GPT)
"""Test multiple models through the Braintrust gateway using ADK's Gemini class.
The gateway handles protocol translation, so we route Gemini through
the gateway and it can call any provider's models.
"""
import asyncio
import os
from functools import cached_property
from google.adk.models.google_llm import Gemini
from google.adk.models.llm_request import LlmRequest
from google.genai import Client, types
GATEWAY_URL = "https://gateway.braintrust.dev"
MODELS = [
"gemini-2.5-flash",
"claude-haiku-4-5",
"gpt-4o-mini",
]
class BraintrustGemini(Gemini):
"""Gemini routed through the Braintrust gateway."""
@cached_property
def api_client(self) -> Client:
# api_key is needed to satisfy the Client constructor validation.
# The gateway authenticates via the x-goog-api-key header that
# the Client sends automatically from api_key.
return Client(
api_key=os.environ["BRAINTRUST_API_KEY"],
http_options=types.HttpOptions(
base_url=self.base_url,
headers=self._tracking_headers(),
),
)
async def call_model(model: str, prompt: str) -> str:
llm = BraintrustGemini(model=model, base_url=GATEWAY_URL)
request = LlmRequest(
model=model,
contents=[types.Content(role="user", parts=[types.Part.from_text(text=prompt)])],
config=types.GenerateContentConfig(system_instruction="Be concise."),
)
async for response in llm.generate_content_async(request):
if response.content and response.content.parts:
return " ".join(p.text for p in response.content.parts if p.text)
return ""
async def main():
prompt = "What is the capital of France? One sentence."
for model in MODELS:
print(f"\n{model}:")
print(f" {await call_model(model, prompt)}")
if __name__ == "__main__":
asyncio.run(main())
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment