Skip to content

Instantly share code, notes, and snippets.

@lestan
Created June 29, 2025 03:07
Show Gist options
  • Select an option

  • Save lestan/537bccb5567f729fa1c374f040a68294 to your computer and use it in GitHub Desktop.

Select an option

Save lestan/537bccb5567f729fa1c374f040a68294 to your computer and use it in GitHub Desktop.
CopilotKit with Ollama and OpenAI API compatibility
import {
CopilotRuntime,
copilotRuntimeNextJSAppRouterEndpoint,
LangChainAdapter,
OpenAIAdapter,
langGraphPlatformEndpoint
} from "@copilotkit/runtime";
import { NextRequest } from "next/server";
import { ChatOpenAI } from "@langchain/openai";
import config from "../../config";
// using langchain to create a chat model using the ollama with OpenAI API compaitibility
const lc_ollamaModel = new ChatOpenAI({
model: "qwen3:14b", // the local model to use
configuration: {
baseURL: "http://localhost:11434/v1", // the ollama proxy URL
},
});
const copilotKitModel = lc_ollamaModel;
const langchainServiceAdapter = new LangChainAdapter({
chainFn: async ({ messages, tools }) => {
console.log("tools", tools);
console.log("messages", messages);
return copilotKitModel.bindTools(tools).stream(messages);
}
});
const runtime = new CopilotRuntime({
remoteEndpoints: [
langGraphPlatformEndpoint({
deploymentUrl: config.agent.url,
langsmithApiKey: process.env.LANGSMITH_API_KEY || "", // only used in LangGraph Platform (cloud) deployments
agents: [{
name: config.agent.name,
description: config.agent.description
}]
})
]
});
export const POST = async (req: NextRequest) => {
const { handleRequest } = copilotRuntimeNextJSAppRouterEndpoint({
runtime,
serviceAdapter: langchainServiceAdapter,
endpoint: "/api/copilotkit",
});
return handleRequest(req);
};
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment