diff --git a/Frontend/.env.example b/Frontend/.env.example index 7d6ef52..885d467 100644 --- a/Frontend/.env.example +++ b/Frontend/.env.example @@ -1,7 +1,17 @@ DATABASE_URL="file:../../.data/clientsflow-dev.db" REDIS_URL="redis://localhost:6379" -# Agent (LangGraph + OpenAI) +# Agent (LangGraph + OpenRouter) +OPENROUTER_API_KEY="" +OPENROUTER_BASE_URL="https://openrouter.ai/api/v1" +OPENROUTER_MODEL="openai/gpt-4o-mini" +# Optional headers for OpenRouter ranking/analytics +OPENROUTER_HTTP_REFERER="" +OPENROUTER_X_TITLE="clientsflow" +# Enable reasoning payload for models that support it: 1 or 0 +OPENROUTER_REASONING_ENABLED="0" + +# Optional fallback (OpenAI-compatible) OPENAI_API_KEY="" OPENAI_MODEL="gpt-4o-mini" # "langgraph" (default) or "rule" diff --git a/Frontend/server/agent/crmAgent.ts b/Frontend/server/agent/crmAgent.ts index 8692e98..212c8ae 100644 --- a/Frontend/server/agent/crmAgent.ts +++ b/Frontend/server/agent/crmAgent.ts @@ -102,6 +102,7 @@ export async function runCrmAgentFor( ): Promise { const mode = (process.env.CF_AGENT_MODE ?? "langgraph").toLowerCase(); const llmApiKey = + process.env.OPENROUTER_API_KEY || process.env.LLM_API_KEY || process.env.OPENAI_API_KEY || process.env.DASHSCOPE_API_KEY || diff --git a/Frontend/server/agent/langgraphCrmAgent.ts b/Frontend/server/agent/langgraphCrmAgent.ts index 882a0f5..fb3235a 100644 --- a/Frontend/server/agent/langgraphCrmAgent.ts +++ b/Frontend/server/agent/langgraphCrmAgent.ts @@ -324,6 +324,13 @@ export async function runLangGraphCrmAgentFor(input: { userText: string; onTrace?: (event: AgentTraceEvent) => Promise | void; }): Promise { + const openrouterApiKey = (process.env.OPENROUTER_API_KEY ?? "").trim(); + const openrouterBaseURL = (process.env.OPENROUTER_BASE_URL ?? "https://openrouter.ai/api/v1").trim(); + const openrouterModel = (process.env.OPENROUTER_MODEL ?? "openai/gpt-4o-mini").trim(); + const openrouterReferer = (process.env.OPENROUTER_HTTP_REFERER ?? "").trim(); + const openrouterTitle = (process.env.OPENROUTER_X_TITLE ?? "").trim(); + const openrouterReasoningEnabled = (process.env.OPENROUTER_REASONING_ENABLED ?? "").trim() === "1"; + const genericApiKey = process.env.LLM_API_KEY || process.env.OPENAI_API_KEY || @@ -350,6 +357,19 @@ export async function runLangGraphCrmAgentFor(input: { let llmApiKey = genericApiKey; let llmBaseURL = genericBaseURL; let llmModel = genericModel; + let llmHeaders: Record | undefined; + let llmReasoningEnabled = false; + + if (openrouterApiKey) { + llmApiKey = openrouterApiKey; + llmBaseURL = openrouterBaseURL; + llmModel = openrouterModel; + llmReasoningEnabled = openrouterReasoningEnabled; + llmHeaders = { + ...(openrouterReferer ? { "HTTP-Referer": openrouterReferer } : {}), + ...(openrouterTitle ? { "X-Title": openrouterTitle } : {}), + }; + } if (useGigachat) { try { @@ -374,7 +394,11 @@ export async function runLangGraphCrmAgentFor(input: { if (!llmApiKey) { return { text: "LLM API key не задан. Сейчас включен fallback-агент без LLM.", - plan: ["Проверить .env", "Добавить LLM_API_KEY (или OPENAI_API_KEY / DASHSCOPE_API_KEY / QWEN_API_KEY / GIGACHAT_AUTH_KEY+GIGACHAT_SCOPE)", "Перезапустить dev-сервер"], + plan: [ + "Проверить .env", + "Добавить OPENROUTER_API_KEY (или LLM_API_KEY / OPENAI_API_KEY / DASHSCOPE_API_KEY / QWEN_API_KEY / GIGACHAT_AUTH_KEY+GIGACHAT_SCOPE)", + "Перезапустить dev-сервер", + ], tools: [], thinking: ["LLM недоступна, возвращен fallback-ответ."], toolRuns: [], @@ -808,10 +832,18 @@ export async function runLangGraphCrmAgentFor(input: { apiKey: llmApiKey, model: llmModel, temperature: 0.2, - ...(llmBaseURL + ...(llmReasoningEnabled + ? { + modelKwargs: { + reasoning: { enabled: true }, + }, + } + : {}), + ...(llmBaseURL || llmHeaders ? { configuration: { - baseURL: llmBaseURL, + ...(llmBaseURL ? { baseURL: llmBaseURL } : {}), + ...(llmHeaders ? { defaultHeaders: llmHeaders } : {}), }, } : {}), diff --git a/compose.yaml b/compose.yaml index f09b92e..a385ae1 100644 --- a/compose.yaml +++ b/compose.yaml @@ -16,9 +16,14 @@ services: DATABASE_URL: "file:../../.data/clientsflow-dev.db" REDIS_URL: "redis://redis:6379" CF_AGENT_MODE: "langgraph" - OPENAI_MODEL: "gpt-4o-mini" + OPENROUTER_API_KEY: "${OPENROUTER_API_KEY:-}" + OPENROUTER_BASE_URL: "https://openrouter.ai/api/v1" + OPENROUTER_MODEL: "openai/gpt-4o-mini" + OPENROUTER_HTTP_REFERER: "${OPENROUTER_HTTP_REFERER:-}" + OPENROUTER_X_TITLE: "clientsflow" + OPENROUTER_REASONING_ENABLED: "${OPENROUTER_REASONING_ENABLED:-0}" # Set this in your shell or a compose override: - # OPENAI_API_KEY: "..." + # OPENROUTER_API_KEY: "..." # GIGACHAT_AUTH_KEY: "..." (if you use GigaChat integration) command: > bash -lc "