-
-
Save cryptopepy/7d04d74dd16e83969e0733bd2df7b7b7 to your computer and use it in GitHub Desktop.
openai api to ollama api translator
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import httpx | |
| from fastapi import FastAPI, HTTPException | |
| from pydantic import BaseModel | |
| app = FastAPI() | |
| # Define a Pydantic model for request input | |
| class GenerateRequest(BaseModel): | |
| model: str | |
| prompt: str | |
| options: dict = None | |
| system: str = None | |
| template: str = None | |
| context: list = None | |
| # Function to make a request to the given endpoint and return the response | |
| async def make_api_request(endpoint, request_data): | |
| # Define the base URL for the API | |
| # change as needed | |
| base_url = "http://localhost:11434/api" | |
| async with httpx.AsyncClient() as client: | |
| response = await client.post(f"{base_url}/{endpoint}", json=request_data) | |
| # Check if the request was successful (HTTP status code 200) | |
| if response.status_code != 200: | |
| raise HTTPException(status_code=response.status_code, detail="API request failed") | |
| return response.json() | |
| # Implement routes to match the ChatGPT API structure | |
| @app.post("/v1/completions") | |
| async def generate_completion(request_data: GenerateRequest): | |
| # Make a request to the "Generate a completion" endpoint | |
| response_data = await make_api_request("generate", request_data.dict()) | |
| return response_data | |
| @app.post("/v1/messages") | |
| async def generate_message_completion(request_data: GenerateRequest): | |
| # Modify the request to match the message-based conversation format | |
| conversation = [{"role": "user", "content": request_data.prompt}] | |
| if request_data.context: | |
| conversation.extend(request_data.context) | |
| # Make a request to the "Generate a completion" endpoint with the conversation format | |
| modified_request_data = GenerateRequest( | |
| model=request_data.model, | |
| messages=conversation, | |
| options=request_data.options, | |
| system=request_data.system, | |
| template=request_data.template | |
| ) | |
| response_data = await make_api_request("generate", modified_request_data.dict()) | |
| return response_data | |
| if __name__ == "__main__": | |
| import uvicorn | |
| uvicorn.run(app, host="localhost", port=8000) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment