116 lines
3.9 KiB
Python
Executable File
116 lines
3.9 KiB
Python
Executable File
"""
|
|
OpenRouter Tool
|
|
Calls OpenRouter API for additional LLM capabilities.
|
|
This tool is hidden from the user - they just see "deep_reasoning".
|
|
"""
|
|
from typing import Dict, Any, Optional
|
|
import httpx
|
|
from loguru import logger
|
|
|
|
from config import load_config_from_db, settings
|
|
from tools.base import BaseTool, ToolResult
|
|
|
|
|
|
class OpenRouterTool(BaseTool):
|
|
"""Call OpenRouter API for LLM tasks."""
|
|
|
|
@property
|
|
def name(self) -> str:
|
|
return "openrouter_reasoning"
|
|
|
|
@property
|
|
def description(self) -> str:
|
|
return "Alternative reasoning endpoint for complex analysis. Use when deep_reasoning is unavailable."
|
|
|
|
@property
|
|
def parameters(self) -> Dict[str, Any]:
|
|
return {
|
|
"type": "object",
|
|
"properties": {
|
|
"prompt": {
|
|
"type": "string",
|
|
"description": "The problem or question to analyze"
|
|
}
|
|
},
|
|
"required": ["prompt"]
|
|
}
|
|
|
|
def _validate_config(self) -> None:
|
|
"""Validate that API key is configured."""
|
|
config = load_config_from_db()
|
|
self.api_key = config.get("openrouter_api_key")
|
|
self.model = config.get("openrouter_model", "meta-llama/llama-3-8b-instruct:free")
|
|
|
|
async def execute(self, prompt: str, **kwargs) -> ToolResult:
|
|
"""Execute OpenRouter API call."""
|
|
self._log_execution({"prompt": prompt[:100]})
|
|
|
|
# Reload config in case it was updated
|
|
self._validate_config()
|
|
|
|
if not self.api_key:
|
|
return ToolResult(
|
|
success=False,
|
|
error="OpenRouter API key not configured. Please configure it in the admin panel."
|
|
)
|
|
|
|
try:
|
|
url = "https://openrouter.ai/api/v1/chat/completions"
|
|
|
|
headers = {
|
|
"Authorization": f"Bearer {self.api_key}",
|
|
"Content-Type": "application/json",
|
|
"HTTP-Referer": "http://localhost:8000",
|
|
"X-Title": "MOXIE"
|
|
}
|
|
|
|
payload = {
|
|
"model": self.model,
|
|
"messages": [
|
|
{"role": "user", "content": prompt}
|
|
],
|
|
"temperature": 0.7,
|
|
"max_tokens": 2048,
|
|
}
|
|
|
|
async with httpx.AsyncClient(timeout=60.0) as client:
|
|
response = await client.post(
|
|
url,
|
|
json=payload,
|
|
headers=headers
|
|
)
|
|
|
|
if response.status_code != 200:
|
|
error_msg = f"API error: {response.status_code}"
|
|
try:
|
|
error_data = response.json()
|
|
if "error" in error_data:
|
|
error_msg = error_data["error"].get("message", error_msg)
|
|
except Exception:
|
|
pass
|
|
|
|
self._log_error(error_msg)
|
|
return ToolResult(success=False, error=error_msg)
|
|
|
|
data = response.json()
|
|
|
|
# Extract response text
|
|
if "choices" in data and len(data["choices"]) > 0:
|
|
content = data["choices"][0].get("message", {}).get("content", "")
|
|
|
|
self._log_success(content[:100])
|
|
return ToolResult(success=True, data=content)
|
|
|
|
return ToolResult(
|
|
success=False,
|
|
error="Unexpected response format from OpenRouter"
|
|
)
|
|
|
|
except httpx.TimeoutException:
|
|
self._log_error("Request timed out")
|
|
return ToolResult(success=False, error="Request timed out")
|
|
|
|
except Exception as e:
|
|
self._log_error(str(e))
|
|
return ToolResult(success=False, error=str(e))
|