|
import json |
|
import os |
|
import fastapi |
|
from fastapi.responses import StreamingResponse, HTMLResponse |
|
from fastapi.middleware.cors import CORSMiddleware |
|
from openai import AsyncOpenAI |
|
import uvicorn |
|
import logging |
|
from dotenv import load_dotenv |
|
from pydantic import BaseModel |
|
from typing import List, Optional, Dict, Any |
|
|
|
|
|
load_dotenv() |
|
|
|
|
|
OPENROUTER_API_KEY = os.getenv('OPENROUTER_API_KEY') |
|
if not OPENROUTER_API_KEY: |
|
raise ValueError("OPENROUTER_API_KEY not found in environment variables") |
|
|
|
|
|
app = fastapi.FastAPI() |
|
|
|
|
|
app.add_middleware( |
|
CORSMiddleware, |
|
allow_origins=["*"], |
|
allow_credentials=True, |
|
allow_methods=["*"], |
|
allow_headers=["*"], |
|
) |
|
|
|
|
|
oai_client = AsyncOpenAI( |
|
base_url="https://openrouter.ai/api/v1", |
|
api_key=OPENROUTER_API_KEY |
|
) |
|
|
|
class Message(BaseModel): |
|
role: str |
|
content: str |
|
|
|
class ChatCompletionRequest(BaseModel): |
|
messages: List[Message] |
|
model: str |
|
temperature: Optional[float] = 0.7 |
|
max_tokens: Optional[int] = None |
|
stream: Optional[bool] = True |
|
user_id: Optional[str] = None |
|
extra_headers: Optional[Dict[str, str]] = None |
|
extra_body: Optional[Dict[str, Any]] = None |
|
|
|
|
|
@app.get("/", response_class=HTMLResponse) |
|
async def root(): |
|
return """ |
|
<html> |
|
<head> |
|
<title>Proxy Server</title> |
|
</head> |
|
<body> |
|
<h1>OpenRouter Proxy Server</h1> |
|
<p>V0.1</p> |
|
<ul> |
|
<li>Chat completions endpoint</li> |
|
<li>List available models</li> |
|
<li>Health check</li> |
|
</ul> |
|
</body> |
|
</html> |
|
""" |
|
|
|
|
|
@app.get("/robots.txt") |
|
async def robots(): |
|
return "User-agent: *\nDisallow: /" |
|
|
|
@app.post("/v1/chat/completions") |
|
async def create_chat_completion(request: ChatCompletionRequest) -> StreamingResponse: |
|
try: |
|
|
|
oai_request = request.dict(exclude_none=True) |
|
|
|
|
|
system_message = { |
|
"role": "system", |
|
"content": "You are a very strict girlfriend. Almost like a mistress." |
|
} |
|
oai_request["messages"].insert(0, system_message) |
|
|
|
|
|
if "user_id" in oai_request: |
|
oai_request["user"] = oai_request.pop("user_id") |
|
|
|
|
|
extra_headers = { |
|
"HTTP-Referer": os.getenv("SITE_URL", "https://huggingface.co/spaces"), |
|
"X-Title": os.getenv("SITE_NAME", "ElevenLabs-OpenRouter Bridge"), |
|
} |
|
oai_request["extra_headers"] = extra_headers |
|
|
|
|
|
if not oai_request["model"].startswith("openai/") and "/" not in oai_request["model"]: |
|
oai_request["model"] = "nousresearch/hermes-3-llama-3.1-405b" |
|
|
|
logging.info(f"Sending request to OpenRouter with model: {oai_request['model']}") |
|
|
|
|
|
chat_completion_coroutine = await oai_client.chat.completions.create(**oai_request) |
|
|
|
async def event_stream(): |
|
try: |
|
async for chunk in chat_completion_coroutine: |
|
|
|
chunk_dict = chunk.model_dump() |
|
yield f"data: {json.dumps(chunk_dict)}\n\n" |
|
yield "data: [DONE]\n\n" |
|
except Exception as e: |
|
logging.error(f"Streaming error: {str(e)}") |
|
yield f"data: {json.dumps({'error': str(e)})}\n\n" |
|
|
|
return StreamingResponse(event_stream(), media_type="text/event-stream") |
|
|
|
except Exception as e: |
|
logging.error(f"Request error: {str(e)}") |
|
raise fastapi.HTTPException(status_code=500, detail=str(e)) |
|
|
|
|
|
@app.get("/health") |
|
async def health_check(): |
|
return {"status": "healthy"} |
|
|
|
|
|
@app.get("/v1/models") |
|
async def list_models(): |
|
return { |
|
"data": [ |
|
{ |
|
"id": "nousresearch/hermes-3-llama-3.1-405b", |
|
"object": "model", |
|
"created": 1677610602, |
|
"owned_by": "openrouter", |
|
}, |
|
{ |
|
"id": "anthropic/claude-3-opus", |
|
"object": "model", |
|
"created": 1677610602, |
|
"owned_by": "openrouter", |
|
}, |
|
{ |
|
"id": "mistralai/mixtral-8x7b", |
|
"object": "model", |
|
"created": 1677610602, |
|
"owned_by": "openrouter", |
|
} |
|
] |
|
} |
|
|
|
|
|
logging.basicConfig(level=logging.INFO) |
|
|
|
if __name__ == "__main__": |
|
uvicorn.run(app, host="0.0.0.0", port=7860) |