File size: 1,158 Bytes
89fd50e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a97117b
 
 
 
 
 
 
 
 
 
 
89fd50e
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
from __future__ import annotations
from typing import Optional
from openai import OpenAI
from .config import OpenRouterConfig

def build_openrouter_client(cfg: OpenRouterConfig) -> Optional[OpenAI]:
    if not cfg.api_key:
        return None
    try:
        client = OpenAI(api_key=cfg.api_key, base_url=cfg.base_url, default_headers=cfg.headers or None)
        return client
    except Exception as e:
        print(f"⚠️ LLM initialization failed: {e}")
        return None

def chat_complete(client: OpenAI, model: str, prompt: str, temperature: float = 0.0, max_tokens: int = 800, response_format: dict | None = None) -> str:
    # Pass response_format when supported by the model/provider (OpenAI/OpenRouter)
    kwargs = {
        "model": model,
        "messages": [{"role": "user", "content": prompt}],
        "temperature": temperature,
        "max_tokens": max_tokens,
    }
    if response_format:
        kwargs["response_format"] = response_format
    resp = client.chat.completions.create(**kwargs)
    choice = resp.choices[0]
    return getattr(getattr(choice, "message", None), "content", None) or getattr(choice, "text", "") or ""