Update chat events/transcription flow and container startup fixes

This commit is contained in:
Ruslan Bakiev
2026-02-19 12:54:16 +07:00
parent 7cc86579b2
commit 3ac487c25b
27 changed files with 3888 additions and 780 deletions

View File

@@ -6,11 +6,23 @@ import { createReactAgent } from "@langchain/langgraph/prebuilt";
import { ChatOpenAI } from "@langchain/openai";
import { tool } from "@langchain/core/tools";
import { z } from "zod";
import { getLangfuseClient } from "../utils/langfuse";
function iso(d: Date) {
return d.toISOString();
}
function cyclePrompt(userText: string, cycle: number, cycleNotes: string[], pendingCount: number) {
if (cycle === 1) return userText;
return [
"Continue solving the same user request.",
`User request: ${userText}`,
cycleNotes.length ? `Progress notes:\n- ${cycleNotes.join("\n- ")}` : "No progress notes yet.",
`Pending staged changes: ${pendingCount}.`,
"Do the next useful step. If done, produce final concise answer.",
].join("\n");
}
type GigachatTokenCache = {
token: string;
expiresAtSec: number;
@@ -322,6 +334,8 @@ export async function runLangGraphCrmAgentFor(input: {
teamId: string;
userId: string;
userText: string;
requestId?: string;
conversationId?: string;
onTrace?: (event: AgentTraceEvent) => Promise<void> | void;
}): Promise<AgentReply> {
const openrouterApiKey = (process.env.OPENROUTER_API_KEY ?? "").trim();
@@ -414,6 +428,16 @@ export async function runLangGraphCrmAgentFor(input: {
const pendingChanges: PendingChange[] = [];
async function emitTrace(event: AgentTraceEvent) {
lfTrace?.event({
name: "agent.trace",
input: {
text: event.text,
toolRun: event.toolRun ?? null,
},
metadata: {
requestId: input.requestId ?? null,
},
});
if (!input.onTrace) return;
try {
await input.onTrace(event);
@@ -544,7 +568,7 @@ export async function runLangGraphCrmAgentFor(input: {
const toolName = `crm:${raw.action}`;
const startedAt = new Date().toISOString();
toolsUsed.push(toolName);
await emitTrace({ text: `Tool started: ${toolName}` });
await emitTrace({ text: `Использую инструмент: ${toolName}` });
const executeAction = async () => {
if (raw.action === "get_snapshot") {
@@ -856,6 +880,23 @@ export async function runLangGraphCrmAgentFor(input: {
const maxCycles = Math.max(1, Math.min(Number(process.env.CF_AGENT_MAX_CYCLES ?? "3"), 8));
const cycleTimeoutMs = Math.max(5000, Math.min(Number(process.env.CF_AGENT_CYCLE_TIMEOUT_MS ?? "1200000"), 1800000));
const tracingFlag = (process.env.LANGSMITH_TRACING ?? process.env.LANGCHAIN_TRACING_V2 ?? "").trim().toLowerCase();
const tracingEnabled = tracingFlag === "1" || tracingFlag === "true" || tracingFlag === "yes";
const langfuse = getLangfuseClient();
const lfTrace = langfuse?.trace({
id: input.requestId ?? makeId("trace"),
name: "clientsflow.crm_agent_request",
userId: input.userId,
sessionId: input.conversationId ?? undefined,
input: input.userText,
metadata: {
teamId: input.teamId,
userId: input.userId,
requestId: input.requestId ?? null,
conversationId: input.conversationId ?? null,
},
tags: ["clientsflow", "crm-agent", "langgraph"],
});
let consecutiveNoProgress = 0;
let finalText = "";
const cycleNotes: string[] = [];
@@ -931,24 +972,34 @@ export async function runLangGraphCrmAgentFor(input: {
};
for (let cycle = 1; cycle <= maxCycles; cycle += 1) {
await emitTrace({ text: `Cycle ${cycle}/${maxCycles}: start` });
const userPrompt = cyclePrompt(input.userText, cycle, cycleNotes, pendingChanges.length);
const cycleSpan = lfTrace?.span({
name: "agent.cycle",
input: userPrompt,
metadata: {
cycle,
requestId: input.requestId ?? null,
},
});
await emitTrace({ text: "Анализирую задачу и текущий контекст CRM." });
const beforeRuns = toolRuns.length;
const beforeWrites = dbWrites.length;
const beforePending = pendingChanges.length;
const userPrompt =
cycle === 1
? input.userText
: [
"Continue solving the same user request.",
`User request: ${input.userText}`,
cycleNotes.length ? `Progress notes:\n- ${cycleNotes.join("\n- ")}` : "No progress notes yet.",
`Pending staged changes: ${pendingChanges.length}.`,
"Do the next useful step. If done, produce final concise answer.",
].join("\n");
let res: any;
try {
const invokeConfig: Record<string, any> = { recursionLimit: 30 };
if (tracingEnabled) {
invokeConfig.runName = "clientsflow.crm_agent_cycle";
invokeConfig.tags = ["clientsflow", "crm-agent", "langgraph"];
invokeConfig.metadata = {
teamId: input.teamId,
userId: input.userId,
requestId: input.requestId ?? null,
conversationId: input.conversationId ?? null,
cycle,
};
}
res = await Promise.race([
agent.invoke(
{
@@ -957,14 +1008,19 @@ export async function runLangGraphCrmAgentFor(input: {
{ role: "user", content: userPrompt },
],
},
{ recursionLimit: 30 },
invokeConfig,
),
new Promise((_resolve, reject) =>
setTimeout(() => reject(new Error(`Cycle timeout after ${cycleTimeoutMs}ms`)), cycleTimeoutMs),
),
]);
} catch (e: any) {
await emitTrace({ text: `Cycle ${cycle}/${maxCycles}: failed (${String(e?.message || e)})` });
await emitTrace({ text: "Один из шагов завершился ошибкой, пробую безопасный обход." });
cycleSpan?.end({
output: "error",
level: "ERROR",
statusMessage: String(e?.message ?? e ?? "unknown_error"),
});
if (!finalText) {
finalText = "Не удалось завершить задачу за отведенное время. Уточни запрос или сократи объем.";
}
@@ -978,12 +1034,23 @@ export async function runLangGraphCrmAgentFor(input: {
const progressed =
toolRuns.length > beforeRuns || dbWrites.length > beforeWrites || pendingChanges.length !== beforePending;
cycleSpan?.end({
output: parsed.text || "",
metadata: {
progressed,
toolRunsDelta: toolRuns.length - beforeRuns,
dbWritesDelta: dbWrites.length - beforeWrites,
pendingDelta: pendingChanges.length - beforePending,
},
});
if (progressed) {
cycleNotes.push(`Cycle ${cycle}: updated tools/data state.`);
}
await emitTrace({
text: `Cycle ${cycle}/${maxCycles}: ${progressed ? "progress" : "no progress"} · pending=${pendingChanges.length}`,
text: progressed
? "Продвигаюсь по задаче и обновляю рабочий набор изменений."
: "Промежуточный шаг не дал прогресса, проверяю следующий вариант.",
});
if (!progressed) {
@@ -994,16 +1061,28 @@ export async function runLangGraphCrmAgentFor(input: {
const done = (!progressed && cycle > 1) || cycle === maxCycles;
if (done) {
await emitTrace({ text: `Cycle ${cycle}/${maxCycles}: done` });
await emitTrace({ text: "Формирую итоговый ответ." });
break;
}
if (consecutiveNoProgress >= 2) {
await emitTrace({ text: `Cycle ${cycle}/${maxCycles}: stopped (no progress)` });
await emitTrace({ text: "Останавливаюсь, чтобы не крутиться в пустом цикле." });
break;
}
}
lfTrace?.update({
output: finalText || null,
metadata: {
toolsUsedCount: toolsUsed.length,
toolRunsCount: toolRuns.length,
dbWritesCount: dbWrites.length,
pendingChangesCount: pendingChanges.length,
maxCycles,
},
});
void langfuse?.flushAsync().catch(() => {});
if (!finalText) {
throw new Error("Model returned empty response");
}