Spaces:
Sleeping
Sleeping
| from __future__ import annotations | |
| from typing import Optional | |
| from openai import OpenAI | |
| from .config import OpenRouterConfig | |
| def build_openrouter_client(cfg: OpenRouterConfig) -> Optional[OpenAI]: | |
| if not cfg.api_key: | |
| return None | |
| try: | |
| client = OpenAI(api_key=cfg.api_key, base_url=cfg.base_url, default_headers=cfg.headers or None) | |
| return client | |
| except Exception as e: | |
| print(f"⚠️ LLM initialization failed: {e}") | |
| return None | |
| def chat_complete(client: OpenAI, model: str, prompt: str, temperature: float = 0.0, max_tokens: int = 800, response_format: dict | None = None) -> str: | |
| # Pass response_format when supported by the model/provider (OpenAI/OpenRouter) | |
| kwargs = { | |
| "model": model, | |
| "messages": [{"role": "user", "content": prompt}], | |
| "temperature": temperature, | |
| "max_tokens": max_tokens, | |
| } | |
| if response_format: | |
| kwargs["response_format"] = response_format | |
| resp = client.chat.completions.create(**kwargs) | |
| choice = resp.choices[0] | |
| return getattr(getattr(choice, "message", None), "content", None) or getattr(choice, "text", "") or "" | |