Update chat events/transcription flow and container startup fixes

This commit is contained in:
Ruslan Bakiev
2026-02-19 12:54:16 +07:00
parent 7cc86579b2
commit 3ac487c25b
27 changed files with 3888 additions and 780 deletions

View File

@@ -1,4 +1,4 @@
DATABASE_URL="file:../../.data/clientsflow-dev.db" DATABASE_URL="postgresql://postgres:postgres@localhost:5432/clientsflow?schema=public"
REDIS_URL="redis://localhost:6379" REDIS_URL="redis://localhost:6379"
# Agent (LangGraph + OpenRouter) # Agent (LangGraph + OpenRouter)
@@ -11,11 +11,19 @@ OPENROUTER_X_TITLE="clientsflow"
# Enable reasoning payload for models that support it: 1 or 0 # Enable reasoning payload for models that support it: 1 or 0
OPENROUTER_REASONING_ENABLED="0" OPENROUTER_REASONING_ENABLED="0"
# Langfuse local tracing (optional)
LANGFUSE_ENABLED="true"
LANGFUSE_BASE_URL="http://localhost:3001"
LANGFUSE_PUBLIC_KEY="pk-lf-local"
LANGFUSE_SECRET_KEY="sk-lf-local"
# Optional fallback (OpenAI-compatible) # Optional fallback (OpenAI-compatible)
OPENAI_API_KEY="" OPENAI_API_KEY=""
OPENAI_MODEL="gpt-4o-mini" OPENAI_MODEL="gpt-4o-mini"
# "langgraph" (default) or "rule" # "langgraph" (default) or "rule"
CF_AGENT_MODE="langgraph" CF_AGENT_MODE="langgraph"
CF_WHISPER_MODEL="Xenova/whisper-small"
CF_WHISPER_LANGUAGE="ru"
TELEGRAM_BOT_TOKEN="" TELEGRAM_BOT_TOKEN=""
TELEGRAM_WEBHOOK_SECRET="" TELEGRAM_WEBHOOK_SECRET=""

File diff suppressed because it is too large Load Diff

View File

@@ -12,6 +12,7 @@ const props = defineProps<{
modelValue: string; modelValue: string;
room: string; room: string;
placeholder?: string; placeholder?: string;
plain?: boolean;
}>(); }>();
const emit = defineEmits<{ const emit = defineEmits<{
@@ -130,8 +131,8 @@ onBeforeUnmount(() => {
</script> </script>
<template> <template>
<div class="space-y-3"> <div :class="props.plain ? 'space-y-2' : 'space-y-3'">
<div class="flex flex-wrap items-center justify-between gap-2 rounded-xl border border-base-300 bg-base-100 p-2"> <div :class="props.plain ? 'flex flex-wrap items-center justify-between gap-2 bg-transparent p-0' : 'flex flex-wrap items-center justify-between gap-2 rounded-xl border border-base-300 bg-base-100 p-2'">
<div class="flex flex-wrap items-center gap-1"> <div class="flex flex-wrap items-center gap-1">
<button <button
class="btn btn-xs" class="btn btn-xs"
@@ -173,7 +174,7 @@ onBeforeUnmount(() => {
<p class="px-1 text-xs text-base-content/60">Live: {{ peerCount }}</p> <p class="px-1 text-xs text-base-content/60">Live: {{ peerCount }}</p>
</div> </div>
<div class="rounded-xl border border-base-300 bg-base-100 p-2"> <div :class="props.plain ? 'bg-transparent p-0' : 'rounded-xl border border-base-300 bg-base-100 p-2'">
<EditorContent :editor="editor" class="contact-editor min-h-[420px]" /> <EditorContent :editor="editor" class="contact-editor min-h-[420px]" />
</div> </div>
</div> </div>

View File

@@ -0,0 +1,5 @@
mutation ArchiveChatConversationMutation($id: ID!) {
archiveChatConversation(id: $id) {
ok
}
}

View File

@@ -3,6 +3,10 @@ query ChatMessagesQuery {
id id
role role
text text
requestId
eventType
phase
transient
thinking thinking
tools tools
toolRuns { toolRuns {

View File

@@ -40,6 +40,16 @@ query DashboardQuery {
amount amount
nextStep nextStep
summary summary
currentStepId
steps {
id
title
description
status
dueAt
order
completedAt
}
} }
feed { feed {
id id

File diff suppressed because it is too large Load Diff

View File

@@ -13,9 +13,11 @@
"generate": "nuxt generate", "generate": "nuxt generate",
"postinstall": "nuxt prepare && prisma generate", "postinstall": "nuxt prepare && prisma generate",
"preview": "nuxt preview", "preview": "nuxt preview",
"typecheck": "nuxt typecheck" "typecheck": "nuxt typecheck",
"worker:delivery": "tsx server/queues/worker.ts"
}, },
"dependencies": { "dependencies": {
"@ai-sdk/vue": "^3.0.91",
"@langchain/core": "^0.3.77", "@langchain/core": "^0.3.77",
"@langchain/langgraph": "^0.2.74", "@langchain/langgraph": "^0.2.74",
"@langchain/openai": "^0.6.9", "@langchain/openai": "^0.6.9",
@@ -26,19 +28,25 @@
"@tiptap/extension-placeholder": "^2.27.2", "@tiptap/extension-placeholder": "^2.27.2",
"@tiptap/starter-kit": "^2.27.2", "@tiptap/starter-kit": "^2.27.2",
"@tiptap/vue-3": "^2.27.2", "@tiptap/vue-3": "^2.27.2",
"@xenova/transformers": "^2.17.2",
"ai": "^6.0.91",
"bullmq": "^5.58.2", "bullmq": "^5.58.2",
"daisyui": "^5.5.18", "daisyui": "^5.5.18",
"graphql": "^16.12.0", "graphql": "^16.12.0",
"ioredis": "^5.7.0", "ioredis": "^5.7.0",
"langfuse": "^3.38.6",
"langsmith": "^0.5.4",
"nuxt": "^4.3.1", "nuxt": "^4.3.1",
"tailwindcss": "^4.1.18", "tailwindcss": "^4.1.18",
"vue": "^3.5.27", "vue": "^3.5.27",
"wavesurfer.js": "^7.12.1",
"y-webrtc": "^10.3.0", "y-webrtc": "^10.3.0",
"yjs": "^13.6.29", "yjs": "^13.6.29",
"zod": "^4.1.5" "zod": "^4.1.5"
}, },
"devDependencies": { "devDependencies": {
"prisma": "^6.16.1" "prisma": "^6.16.1",
"tsx": "^4.20.5"
}, },
"prisma": { "prisma": {
"seed": "node prisma/seed.mjs" "seed": "node prisma/seed.mjs"

View File

@@ -3,7 +3,7 @@ generator client {
} }
datasource db { datasource db {
provider = "sqlite" provider = "postgresql"
url = env("DATABASE_URL") url = env("DATABASE_URL")
} }
@@ -263,22 +263,43 @@ model CalendarEvent {
} }
model Deal { model Deal {
id String @id @default(cuid()) id String @id @default(cuid())
teamId String teamId String
contactId String contactId String
title String title String
stage String stage String
amount Int? amount Int?
nextStep String? nextStep String?
summary String? summary String?
createdAt DateTime @default(now()) currentStepId String?
updatedAt DateTime @updatedAt createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
team Team @relation(fields: [teamId], references: [id], onDelete: Cascade) team Team @relation(fields: [teamId], references: [id], onDelete: Cascade)
contact Contact @relation(fields: [contactId], references: [id], onDelete: Cascade) contact Contact @relation(fields: [contactId], references: [id], onDelete: Cascade)
steps DealStep[]
@@index([teamId, updatedAt]) @@index([teamId, updatedAt])
@@index([contactId, updatedAt]) @@index([contactId, updatedAt])
@@index([currentStepId])
}
model DealStep {
id String @id @default(cuid())
dealId String
title String
description String?
status String @default("todo")
dueAt DateTime?
order Int @default(0)
completedAt DateTime?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
deal Deal @relation(fields: [dealId], references: [id], onDelete: Cascade)
@@index([dealId, order])
@@index([status, dueAt])
} }
model ChatConversation { model ChatConversation {

View File

@@ -32,7 +32,7 @@ const prisma = new PrismaClient();
const LOGIN_PHONE = "+15550000001"; const LOGIN_PHONE = "+15550000001";
const LOGIN_PASSWORD = "ConnectFlow#2026"; const LOGIN_PASSWORD = "ConnectFlow#2026";
const LOGIN_NAME = "Connect Owner"; const LOGIN_NAME = "Владелец Connect";
const REF_DATE_ISO = "2026-02-20T12:00:00.000Z"; const REF_DATE_ISO = "2026-02-20T12:00:00.000Z";
const SCRYPT_KEY_LENGTH = 64; const SCRYPT_KEY_LENGTH = 64;
@@ -58,26 +58,26 @@ function plusMinutes(date, minutes) {
function buildOdooAiContacts(teamId) { function buildOdooAiContacts(teamId) {
const prospects = [ const prospects = [
{ name: "Olivia Reed", company: "RetailNova", country: "USA", location: "New York", email: "olivia.reed@retailnova.com", phone: "+1 555 120 0101" }, { name: "Оливия Рид", company: "РитейлНова", country: "США", location: "Нью-Йорк", email: "olivia.reed@retailnova.com", phone: "+1 555 120 0101" },
{ name: "Daniel Kim", company: "ForgePeak Manufacturing", country: "USA", location: "Chicago", email: "daniel.kim@forgepeak.com", phone: "+1 555 120 0102" }, { name: "Даниэль Ким", company: "ФорджПик Производство", country: "США", location: "Чикаго", email: "daniel.kim@forgepeak.com", phone: "+1 555 120 0102" },
{ name: "Marta Alonso", company: "Iberia Foods Group", country: "Spain", location: "Barcelona", email: "marta.alonso@iberiafoods.es", phone: "+34 91 555 0103" }, { name: "Марта Алонсо", company: "Иберия Фудс Групп", country: "Испания", location: "Барселона", email: "marta.alonso@iberiafoods.es", phone: "+34 91 555 0103" },
{ name: "Youssef Haddad", company: "GulfTrade Distribution", country: "UAE", location: "Dubai", email: "youssef.haddad@gulftrade.ae", phone: "+971 4 555 0104" }, { name: "Юсеф Хаддад", company: "ГалфТрейд Дистрибуция", country: "ОАЭ", location: "Дубай", email: "youssef.haddad@gulftrade.ae", phone: "+971 4 555 0104" },
{ name: "Emma Collins", company: "NorthBridge Logistics", country: "UK", location: "London", email: "emma.collins@northbridge.co.uk", phone: "+44 20 5550 0105" }, { name: "Эмма Коллинз", company: "НортБридж Логистика", country: "Великобритания", location: "Лондон", email: "emma.collins@northbridge.co.uk", phone: "+44 20 5550 0105" },
{ name: "Noah Fischer", company: "Bergmann Auto Parts", country: "Germany", location: "Munich", email: "noah.fischer@bergmann-auto.de", phone: "+49 89 5550 0106" }, { name: "Ноа Фишер", company: "Бергман Автозапчасти", country: "Германия", location: "Мюнхен", email: "noah.fischer@bergmann-auto.de", phone: "+49 89 5550 0106" },
{ name: "Ava Choi", company: "Pacific MedTech Supply", country: "Singapore", location: "Singapore", email: "ava.choi@pacificmedtech.sg", phone: "+65 6555 0107" }, { name: "Ава Чой", company: "Пасифик МедТех Сапплай", country: "Сингапур", location: "Сингапур", email: "ava.choi@pacificmedtech.sg", phone: "+65 6555 0107" },
{ name: "Liam Dubois", company: "HexaCommerce", country: "France", location: "Paris", email: "liam.dubois@hexacommerce.fr", phone: "+33 1 55 50 0108" }, { name: "Лиам Дюбуа", company: "ГексаКоммерс", country: "Франция", location: "Париж", email: "liam.dubois@hexacommerce.fr", phone: "+33 1 55 50 0108" },
{ name: "Maya Shah", company: "Zenith Consumer Brands", country: "Canada", location: "Toronto", email: "maya.shah@zenithbrands.ca", phone: "+1 416 555 0109" }, { name: "Майя Шах", company: "Зенит Консьюмер Брендс", country: "Канада", location: "Торонто", email: "maya.shah@zenithbrands.ca", phone: "+1 416 555 0109" },
{ name: "Arman Petrosyan", company: "Ararat Electronics", country: "Armenia", location: "Yerevan", email: "arman.petrosyan@ararat-electronics.am", phone: "+374 10 555110" }, { name: "Арман Петросян", company: "Арарат Электроникс", country: "Армения", location: "Ереван", email: "arman.petrosyan@ararat-electronics.am", phone: "+374 10 555110" },
{ name: "Sophia Martinez", company: "Sunline Home Goods", country: "USA", location: "Austin", email: "sophia.martinez@sunlinehg.com", phone: "+1 555 120 0111" }, { name: "София Мартинес", company: "Санлайн Товары для дома", country: "США", location: "Остин", email: "sophia.martinez@sunlinehg.com", phone: "+1 555 120 0111" },
{ name: "Leo Novak", company: "CentralBuild Materials", country: "Germany", location: "Berlin", email: "leo.novak@centralbuild.de", phone: "+49 30 5550 0112" }, { name: "Лео Новак", company: "ЦентралБилд Материалы", country: "Германия", location: "Берлин", email: "leo.novak@centralbuild.de", phone: "+49 30 5550 0112" },
{ name: "Isla Grant", company: "BlueHarbor Pharma", country: "UK", location: "Manchester", email: "isla.grant@blueharbor.co.uk", phone: "+44 161 555 0113" }, { name: "Айла Грант", company: "БлюХарбор Фарма", country: "Великобритания", location: "Манчестер", email: "isla.grant@blueharbor.co.uk", phone: "+44 161 555 0113" },
{ name: "Mateo Rossi", company: "Milano Fashion House", country: "Italy", location: "Milan", email: "mateo.rossi@milanofh.it", phone: "+39 02 5550 0114" }, { name: "Матео Росси", company: "Милано Фэшн Хаус", country: "Италия", location: "Милан", email: "mateo.rossi@milanofh.it", phone: "+39 02 5550 0114" },
{ name: "Nina Volkova", company: "Polar AgriTech", country: "Kazakhstan", location: "Almaty", email: "nina.volkova@polaragri.kz", phone: "+7 727 555 0115" }, { name: "Нина Волкова", company: "Полар АгриТех", country: "Казахстан", location: "Алматы", email: "nina.volkova@polaragri.kz", phone: "+7 727 555 0115" },
{ name: "Ethan Park", company: "Vertex Components", country: "South Korea", location: "Seoul", email: "ethan.park@vertexcomponents.kr", phone: "+82 2 555 0116" }, { name: "Итан Пак", company: "Вертекс Компонентс", country: "Южная Корея", location: "Сеул", email: "ethan.park@vertexcomponents.kr", phone: "+82 2 555 0116" },
{ name: "Zara Khan", company: "Crescent Retail Chain", country: "UAE", location: "Abu Dhabi", email: "zara.khan@crescentretail.ae", phone: "+971 2 555 0117" }, { name: "Зара Хан", company: "Кресент Ритейл Чейн", country: "ОАЭ", location: "Абу-Даби", email: "zara.khan@crescentretail.ae", phone: "+971 2 555 0117" },
{ name: "Hugo Silva", company: "Luso Industrial Systems", country: "Portugal", location: "Lisbon", email: "hugo.silva@lusois.pt", phone: "+351 21 555 0118" }, { name: "Уго Силва", company: "Лузо Индастриал Системс", country: "Португалия", location: "Лиссабон", email: "hugo.silva@lusois.pt", phone: "+351 21 555 0118" },
{ name: "Chloe Bernard", company: "Santex Clinics Network", country: "France", location: "Lyon", email: "chloe.bernard@santex.fr", phone: "+33 4 55 50 0119" }, { name: "Хлоя Бернар", company: "Сантекс Сеть Клиник", country: "Франция", location: "Лион", email: "chloe.bernard@santex.fr", phone: "+33 4 55 50 0119" },
{ name: "James Walker", company: "Metro Wholesale Group", country: "USA", location: "Los Angeles", email: "james.walker@metrowholesale.com", phone: "+1 555 120 0120" }, { name: "Джеймс Уокер", company: "Метро Оптовая Группа", country: "США", location: "Лос-Анджелес", email: "james.walker@metrowholesale.com", phone: "+1 555 120 0120" },
]; ];
return prospects.map((p, idx) => { return prospects.map((p, idx) => {
@@ -113,8 +113,8 @@ async function main() {
const team = await prisma.team.upsert({ const team = await prisma.team.upsert({
where: { id: "demo-team" }, where: { id: "demo-team" },
update: { name: "Connect Workspace" }, update: { name: "Connect Рабочее пространство" },
create: { id: "demo-team", name: "Connect Workspace" }, create: { id: "demo-team", name: "Connect Рабочее пространство" },
}); });
await prisma.teamMember.upsert({ await prisma.teamMember.upsert({
@@ -125,8 +125,8 @@ async function main() {
const conversation = await prisma.chatConversation.upsert({ const conversation = await prisma.chatConversation.upsert({
where: { id: `pilot-${team.id}` }, where: { id: `pilot-${team.id}` },
update: { title: "Pilot" }, update: { title: "Пилот" },
create: { id: `pilot-${team.id}`, teamId: team.id, createdByUserId: user.id, title: "Pilot" }, create: { id: `pilot-${team.id}`, teamId: team.id, createdByUserId: user.id, title: "Пилот" },
}); });
await prisma.$transaction([ await prisma.$transaction([
@@ -150,22 +150,22 @@ async function main() {
}); });
const integrationModules = [ const integrationModules = [
"Sales + CRM + forecasting copilot", "Продажи + CRM + копилот прогнозирования",
"Inventory + demand prediction", "Склад + прогноз спроса",
"Purchase + supplier risk scoring", "Закупки + оценка рисков поставщиков",
"Accounting + AI anomaly detection", "Бухгалтерия + AI-детекция аномалий",
"Helpdesk + ticket triage assistant", "Поддержка + ассистент триажа заявок",
"Manufacturing + production planning AI", "Производство + AI-планирование мощностей",
]; ];
await prisma.contactNote.createMany({ await prisma.contactNote.createMany({
data: contacts.map((c, idx) => ({ data: contacts.map((c, idx) => ({
contactId: c.id, contactId: c.id,
content: content:
`${c.company ?? c.name} is evaluating Odoo implementation with AI extensions. ` + `${c.company ?? c.name} рассматривает внедрение Odoo с AI-расширениями. ` +
`Primary integration scope: ${integrationModules[idx % integrationModules.length]}. ` + `Основной контур интеграции: ${integrationModules[idx % integrationModules.length]}. ` +
`Main buying trigger: reduce manual operations and shorten decision cycles. ` + `Ключевой драйвер покупки: сократить ручные операции и ускорить цикл принятия решений. ` +
`Next milestone: run discovery workshop, confirm data owners, and approve pilot KPI pack.`, `Следующая веха: провести сессию уточнения, согласовать владельцев данных и утвердить KPI пилота.`,
})), })),
}); });
@@ -180,7 +180,7 @@ async function main() {
kind: "MESSAGE", kind: "MESSAGE",
direction: "IN", direction: "IN",
channel: channels[i % channels.length], channel: channels[i % channels.length],
content: `Hi, we are reviewing Odoo + AI rollout for ${contact.company}. Can we align on integration timeline this week?`, content: `Здравствуйте! Мы рассматриваем запуск Odoo + AI для ${contact.company}. Можем согласовать план интеграции на этой неделе?`,
occurredAt: base, occurredAt: base,
}); });
@@ -189,7 +189,7 @@ async function main() {
kind: "MESSAGE", kind: "MESSAGE",
direction: "OUT", direction: "OUT",
channel: channels[(i + 1) % channels.length], channel: channels[(i + 1) % channels.length],
content: "Sure. I suggest a 45-min discovery focused on workflows, API constraints, and pilot KPIs.", content: "Да, предлагаю 45-минутный разбор: процессы, ограничения API и KPI пилота.",
occurredAt: plusMinutes(base, 22), occurredAt: plusMinutes(base, 22),
}); });
@@ -198,7 +198,7 @@ async function main() {
kind: "MESSAGE", kind: "MESSAGE",
direction: i % 3 === 0 ? "OUT" : "IN", direction: i % 3 === 0 ? "OUT" : "IN",
channel: channels[(i + 2) % channels.length], channel: channels[(i + 2) % channels.length],
content: "Status update: technical scope is clear; blocker is budget owner approval and security questionnaire.", content: "Обновление статуса: технический объём ясен; блокер — согласование бюджета и анкета по безопасности.",
occurredAt: plusMinutes(base, 65), occurredAt: plusMinutes(base, 65),
}); });
@@ -208,11 +208,11 @@ async function main() {
kind: "CALL", kind: "CALL",
direction: "OUT", direction: "OUT",
channel: "PHONE", channel: "PHONE",
content: "Discovery call: Odoo modules, data flows, AI use-cases", content: "Созвон по уточнению: модули Odoo, потоки данных и AI-сценарии",
durationSec: 180 + ((i * 23) % 420), durationSec: 180 + ((i * 23) % 420),
transcriptJson: [ transcriptJson: [
`${contact.name}: We need phased rollout, starting from Sales and Inventory.`, `${contact.name}: Нам нужен поэтапный запуск, начнём с продаж и склада.`,
"You: Agreed. We can run a 6-week pilot with KPI baseline and weekly checkpoints.", "Вы: Согласен. Делаем пилот на 6 недель с базовыми KPI и еженедельными контрольными точками.",
], ],
occurredAt: plusMinutes(base, 110), occurredAt: plusMinutes(base, 110),
}); });
@@ -222,47 +222,47 @@ async function main() {
await prisma.calendarEvent.createMany({ await prisma.calendarEvent.createMany({
data: contacts.flatMap((c, idx) => { data: contacts.flatMap((c, idx) => {
// Historical week ending on 20 Feb 2026: all seeded meetings are completed. // Историческая неделя до 20 Feb 2026: все сидовые встречи завершены.
const firstStart = atOffset(-6 + (idx % 5), 10 + (idx % 6), (idx * 5) % 60); const firstStart = atOffset(-6 + (idx % 5), 10 + (idx % 6), (idx * 5) % 60);
const secondStart = atOffset(-5 + (idx % 5), 14 + (idx % 4), (idx * 3) % 60); const secondStart = atOffset(-5 + (idx % 5), 14 + (idx % 4), (idx * 3) % 60);
return [ return [
{ {
teamId: team.id, teamId: team.id,
contactId: c.id, contactId: c.id,
title: `Discovery: Odoo + AI with ${c.company ?? c.name}`, title: `Сессия уточнения: Odoo + AI с ${c.company ?? c.name}`,
startsAt: firstStart, startsAt: firstStart,
endsAt: plusMinutes(firstStart, 30), endsAt: plusMinutes(firstStart, 30),
note: "Confirm integration scope, current stack, and pilot success metrics.", note: "Подтвердить рамки интеграции, текущий стек и метрики успеха пилота.",
status: "done", status: "done",
}, },
{ {
teamId: team.id, teamId: team.id,
contactId: c.id, contactId: c.id,
title: `Architecture workshop: ${c.company ?? c.name}`, title: `Архитектурный воркшоп: ${c.company ?? c.name}`,
startsAt: secondStart, startsAt: secondStart,
endsAt: plusMinutes(secondStart, 45), endsAt: plusMinutes(secondStart, 45),
note: "Review API mapping, ETL boundaries, and AI assistant guardrails.", note: "Проверить маппинг API, границы ETL и ограничения для AI-ассистента.",
status: "done", status: "done",
}, },
]; ];
}), }),
}); });
const stages = ["Lead", "Discovery", "Solution Fit", "Proposal", "Negotiation", "Pilot", "Contract Review"]; const stages = ["Лид", "Уточнение", "Подбор решения", "Коммерческое предложение", "Переговоры", "Пилот", "Проверка договора"];
await prisma.deal.createMany({ await prisma.deal.createMany({
data: contacts.map((c, idx) => ({ data: contacts.map((c, idx) => ({
teamId: team.id, teamId: team.id,
contactId: c.id, contactId: c.id,
title: `${c.company ?? "Account"} Odoo + AI integration`, title: `${c.company ?? "Клиент"}: интеграция Odoo + AI`,
stage: stages[idx % stages.length], stage: stages[idx % stages.length],
amount: 18000 + (idx % 8) * 7000, amount: 18000 + (idx % 8) * 7000,
nextStep: nextStep:
idx % 4 === 0 idx % 4 === 0
? "Send pilot proposal and finalize integration backlog." ? "Отправить предложение по пилоту и зафиксировать список задач интеграции."
: "Run solution workshop and align commercial owner on timeline.", : "Провести воркшоп по решению и согласовать сроки с коммерческим владельцем.",
summary: summary:
"Potential deal for phased Odoo implementation with AI copilots for ops, sales, and planning. " + "Потенциальная сделка на поэтапное внедрение Odoo с AI-копилотами для операций, продаж и планирования. " +
"Commercial model: discovery + pilot + rollout.", "Коммерческая модель: уточнение + пилот + тиражирование.",
})), })),
}); });
@@ -272,8 +272,8 @@ async function main() {
contactId: c.id, contactId: c.id,
text: text:
idx % 3 === 0 idx % 3 === 0
? "Pinned: ask for ERP owner, data owner, and target go-live quarter." ? "Закреплено: уточнить владельца ERP, владельца данных и целевой квартал запуска."
: "Pinned: keep communication around one KPI and one next action.", : "Закреплено: держать коммуникацию вокруг одного KPI и следующего шага.",
})), })),
}); });
@@ -287,14 +287,14 @@ async function main() {
contactId: c.id, contactId: c.id,
happenedAt: atOffset(-(idx % 6), 9 + (idx % 8), (idx * 9) % 60), happenedAt: atOffset(-(idx % 6), 9 + (idx % 8), (idx * 9) % 60),
text: text:
`I reviewed ${c.company ?? c.name} account activity for the Odoo + AI opportunity. ` + `Я проверил активность по аккаунту ${c.company ?? c.name} в рамках сделки Odoo + AI. ` +
"There is enough momentum to move the deal one stage with a concrete next action.", "Есть достаточный импульс, чтобы перевести сделку на следующий этап при чётком следующем шаге.",
proposalJson: { proposalJson: {
title: idx % 2 === 0 ? "Schedule pilot scoping call" : "Send unblock note for budget owner", title: idx % 2 === 0 ? "Назначить созвон по рамкам пилота" : "Отправить сообщение для разблокировки у владельца бюджета",
details: [ details: [
`Contact: ${c.name}`, `Контакт: ${c.name}`,
idx % 2 === 0 ? "Timing: this week, 45 minutes" : "Timing: today in primary channel", idx % 2 === 0 ? "Когда: на этой неделе, 45 минут" : "Когда: сегодня в основном канале",
"Goal: confirm scope, owner, and next commercial checkpoint", "Цель: подтвердить объём, владельца и следующую коммерческую контрольную точку",
], ],
key: proposalKeys[idx % proposalKeys.length], key: proposalKeys[idx % proposalKeys.length],
}, },
@@ -305,62 +305,62 @@ async function main() {
data: [ data: [
{ {
teamId: team.id, teamId: team.id,
title: "Odoo integration discovery checklist", title: "Чеклист уточнения для интеграции Odoo",
type: "Regulation", type: "Regulation",
owner: "Solution Team", owner: "Команда решений",
scope: "Pre-sale discovery", scope: "Предпродажное уточнение",
summary: "Mandatory questions before estimation of Odoo + AI rollout.", summary: "Обязательные вопросы перед оценкой запуска Odoo + AI.",
body: "## Must capture\n- Current ERP modules\n- Integration endpoints\n- Data owner per domain\n- Security constraints\n- Pilot KPI baseline", body: "## Нужно зафиксировать\n- Текущие модули ERP\n- Точки интеграции\n- Владельца данных по каждому домену\n- Ограничения безопасности\n- Базовые KPI пилота",
updatedAt: atOffset(-1, 11, 10), updatedAt: atOffset(-1, 11, 10),
}, },
{ {
teamId: team.id, teamId: team.id,
title: "AI copilot playbook for Odoo", title: "Плейбук AI-копилота для Odoo",
type: "Playbook", type: "Playbook",
owner: "AI Practice Lead", owner: "Лид AI-практики",
scope: "Use-case qualification", scope: "Квалификация сценариев",
summary: "How to position forecasting, assistant, and anomaly detection features.", summary: "Как позиционировать прогнозирование, ассистента и детекцию аномалий.",
body: "## Flow\n1. Process pain\n2. Data quality\n3. Model target\n4. Success KPI\n5. Pilot scope", body: "## Поток\n1. Боль процесса\n2. Качество данных\n3. Целевая модель\n4. KPI успеха\n5. Объём пилота",
updatedAt: atOffset(-2, 15, 0), updatedAt: atOffset(-2, 15, 0),
}, },
{ {
teamId: team.id, teamId: team.id,
title: "Pilot pricing matrix", title: "Матрица цен для пилота",
type: "Policy", type: "Policy",
owner: "Commercial Ops", owner: "Коммерческие операции",
scope: "Discovery and pilot contracts", scope: "Контракты уточнения и пилота",
summary: "Price ranges for discovery, pilot, and production rollout phases.", summary: "Диапазоны цен для уточнения, пилота и продуктивной фазы.",
body: "## Typical ranges\n- Discovery: 5k-12k\n- Pilot: 15k-45k\n- Rollout: 50k+\n\nAlways tie cost to scope and timeline.", body: "## Типовые диапазоны\n- Уточнение: 5k-12k\n- Пилот: 15k-45k\n- Тиражирование: 50k+\n\nВсегда привязывай стоимость к объёму и срокам.",
updatedAt: atOffset(-3, 9, 30), updatedAt: atOffset(-3, 9, 30),
}, },
{ {
teamId: team.id, teamId: team.id,
title: "Security and compliance template", title: "Шаблон по безопасности и комплаенсу",
type: "Template", type: "Template",
owner: "Delivery Office", owner: "Офис внедрения",
scope: "Enterprise prospects", scope: "Крупные клиенты",
summary: "Template answers for data residency, RBAC, audit trail, and PII handling.", summary: "Шаблон ответов по data residency, RBAC, аудиту и обработке PII.",
body: "## Sections\n- Hosting model\n- Access control\n- Logging and audit\n- Data retention\n- Incident response", body: "## Разделы\n- Модель хостинга\n- Контроль доступа\n- Логирование и аудит\n- Срок хранения данных\n- Реакция на инциденты",
updatedAt: atOffset(-4, 13, 45), updatedAt: atOffset(-4, 13, 45),
}, },
{ {
teamId: team.id, teamId: team.id,
title: "Integration architecture blueprint", title: "Референс интеграционной архитектуры",
type: "Playbook", type: "Playbook",
owner: "Architecture Team", owner: "Архитектурная команда",
scope: "Technical workshops", scope: "Технические воркшопы",
summary: "Reference architecture for Odoo connectors, ETL, and AI service layer.", summary: "Референс-архитектура для коннекторов Odoo, ETL и AI-сервисного слоя.",
body: "## Layers\n- Odoo core modules\n- Integration bus\n- Data warehouse\n- AI service endpoints\n- Monitoring", body: "## Слои\n- Базовые модули Odoo\n- Интеграционная шина\n- Хранилище данных\n- Эндпоинты AI-сервиса\n- Мониторинг",
updatedAt: atOffset(-5, 10, 0), updatedAt: atOffset(-5, 10, 0),
}, },
{ {
teamId: team.id, teamId: team.id,
title: "Go-live readiness checklist", title: "Чеклист готовности к запуску",
type: "Regulation", type: "Regulation",
owner: "PMO", owner: "PMO",
scope: "Pilot to production transition", scope: "Переход от пилота к продакшену",
summary: "Checklist to move from pilot acceptance to production launch.", summary: "Чеклист перехода от приёмки пилота к запуску в прод.",
body: "## Required\n- Pilot KPIs approved\n- Rollout backlog prioritized\n- Owners assigned\n- Support model defined", body: "## Обязательно\n- KPI пилота утверждены\n- Backlog тиражирования приоритизирован\n- Владельцы назначены\n- Модель поддержки определена",
updatedAt: atOffset(-6, 16, 15), updatedAt: atOffset(-6, 16, 15),
}, },
], ],

Binary file not shown.

View File

@@ -11,22 +11,32 @@ find .output -mindepth 1 -maxdepth 1 -exec rm -rf {} + || true
rm -rf node_modules/.cache node_modules/.vite rm -rf node_modules/.cache node_modules/.vite
# Install deps (container starts from a clean image). # Install deps (container starts from a clean image).
# Fallback to npm install when lockfile was produced by a newer npm major. # npm ci is unstable in this workspace due lock drift in transitive deps.
if ! npm ci; then npm install
npm install
fi
# DB path used by DATABASE_URL="file:../../.data/clientsflow-dev.db" from /app/Frontend # sharp is a native module and can break when cached node_modules were installed
DB_FILE="/app/.data/clientsflow-dev.db" # for a different CPU variant (for example arm64v8). Force a local rebuild.
ARCH="$(uname -m)"
# First boot: create schema + seed. if [ "$ARCH" = "aarch64" ] || [ "$ARCH" = "arm64" ]; then
# Next boots: keep data, only sync schema and re-run idempotent seed. npm rebuild sharp --platform=linux --arch=arm64v8 \
if [[ ! -f "$DB_FILE" ]]; then || npm rebuild sharp --platform=linux --arch=arm64 \
npx prisma db push --force-reset || npm install sharp --platform=linux --arch=arm64v8 --save-exact=false \
|| npm install sharp --platform=linux --arch=arm64 --save-exact=false
elif [ "$ARCH" = "x86_64" ] || [ "$ARCH" = "amd64" ]; then
npm rebuild sharp --platform=linux --arch=x64 \
|| npm install sharp --platform=linux --arch=x64 --save-exact=false
else else
npx prisma db push npm rebuild sharp || true
fi fi
# Wait until PostgreSQL is reachable before applying schema.
until node -e "const u=new URL(process.env.DATABASE_URL||''); const net=require('net'); const s=net.createConnection({host:u.hostname,port:Number(u.port||5432)}); s.on('connect',()=>{s.end(); process.exit(0);}); s.on('error',()=>process.exit(1)); setTimeout(()=>process.exit(1), 1000);" ; do
echo "Waiting for PostgreSQL..."
sleep 1
done
npx prisma db push
node prisma/seed.mjs node prisma/seed.mjs
exec npm run dev -- --host 0.0.0.0 --port 3000 exec npm run dev -- --host 0.0.0.0 --port 3000

View File

@@ -0,0 +1,29 @@
#!/usr/bin/env bash
set -euo pipefail
cd "$(dirname "$0")/.."
# Worker container starts from clean image.
# Install deps without frontend postinstall hooks (nuxt prepare) to keep worker lean/stable.
npm install --ignore-scripts
ARCH="$(uname -m)"
if [ "$ARCH" = "aarch64" ] || [ "$ARCH" = "arm64" ]; then
npm rebuild sharp --platform=linux --arch=arm64v8 \
|| npm rebuild sharp --platform=linux --arch=arm64 \
|| npm install sharp --platform=linux --arch=arm64v8 --save-exact=false \
|| npm install sharp --platform=linux --arch=arm64 --save-exact=false
elif [ "$ARCH" = "x86_64" ] || [ "$ARCH" = "amd64" ]; then
npm rebuild sharp --platform=linux --arch=x64 \
|| npm install sharp --platform=linux --arch=x64 --save-exact=false
else
npm rebuild sharp || true
fi
npx prisma generate
# Ensure DB is reachable before the worker starts consuming jobs.
until node -e "const u=new URL(process.env.DATABASE_URL||''); const net=require('net'); const s=net.createConnection({host:u.hostname,port:Number(u.port||5432)}); s.on('connect',()=>{s.end(); process.exit(0);}); s.on('error',()=>process.exit(1)); setTimeout(()=>process.exit(1), 1000);" ; do
echo "Waiting for PostgreSQL..."
sleep 1
done
exec npm run worker:delivery

View File

@@ -97,6 +97,8 @@ export async function runCrmAgentFor(
teamId: string; teamId: string;
userId: string; userId: string;
userText: string; userText: string;
requestId?: string;
conversationId?: string;
onTrace?: (event: AgentTraceEvent) => Promise<void> | void; onTrace?: (event: AgentTraceEvent) => Promise<void> | void;
}, },
): Promise<AgentReply> { ): Promise<AgentReply> {
@@ -246,29 +248,23 @@ export async function persistChatMessage(input: {
at: string; at: string;
}>; }>;
changeSet?: ChangeSet | null; changeSet?: ChangeSet | null;
requestId?: string;
eventType?: "user" | "trace" | "assistant" | "note";
phase?: "pending" | "running" | "final" | "error";
transient?: boolean;
teamId: string; teamId: string;
conversationId: string; conversationId: string;
authorUserId?: string | null; authorUserId?: string | null;
}) { }) {
const hasDebugPayload = Boolean( const hasStoredPayload = Boolean(input.changeSet);
(input.plan && input.plan.length) ||
(input.tools && input.tools.length) ||
(input.thinking && input.thinking.length) ||
(input.toolRuns && input.toolRuns.length) ||
input.changeSet,
);
const data: Prisma.ChatMessageCreateInput = { const data: Prisma.ChatMessageCreateInput = {
team: { connect: { id: input.teamId } }, team: { connect: { id: input.teamId } },
conversation: { connect: { id: input.conversationId } }, conversation: { connect: { id: input.conversationId } },
authorUser: input.authorUserId ? { connect: { id: input.authorUserId } } : undefined, authorUser: input.authorUserId ? { connect: { id: input.authorUserId } } : undefined,
role: input.role, role: input.role,
text: input.text, text: input.text,
planJson: hasDebugPayload planJson: hasStoredPayload
? ({ ? ({
steps: input.plan ?? [],
tools: input.tools ?? [],
thinking: input.thinking ?? input.plan ?? [],
toolRuns: input.toolRuns ?? [],
changeSet: input.changeSet ?? null, changeSet: input.changeSet ?? null,
} as any) } as any)
: undefined, : undefined,

View File

@@ -6,11 +6,23 @@ import { createReactAgent } from "@langchain/langgraph/prebuilt";
import { ChatOpenAI } from "@langchain/openai"; import { ChatOpenAI } from "@langchain/openai";
import { tool } from "@langchain/core/tools"; import { tool } from "@langchain/core/tools";
import { z } from "zod"; import { z } from "zod";
import { getLangfuseClient } from "../utils/langfuse";
function iso(d: Date) { function iso(d: Date) {
return d.toISOString(); return d.toISOString();
} }
function cyclePrompt(userText: string, cycle: number, cycleNotes: string[], pendingCount: number) {
if (cycle === 1) return userText;
return [
"Continue solving the same user request.",
`User request: ${userText}`,
cycleNotes.length ? `Progress notes:\n- ${cycleNotes.join("\n- ")}` : "No progress notes yet.",
`Pending staged changes: ${pendingCount}.`,
"Do the next useful step. If done, produce final concise answer.",
].join("\n");
}
type GigachatTokenCache = { type GigachatTokenCache = {
token: string; token: string;
expiresAtSec: number; expiresAtSec: number;
@@ -322,6 +334,8 @@ export async function runLangGraphCrmAgentFor(input: {
teamId: string; teamId: string;
userId: string; userId: string;
userText: string; userText: string;
requestId?: string;
conversationId?: string;
onTrace?: (event: AgentTraceEvent) => Promise<void> | void; onTrace?: (event: AgentTraceEvent) => Promise<void> | void;
}): Promise<AgentReply> { }): Promise<AgentReply> {
const openrouterApiKey = (process.env.OPENROUTER_API_KEY ?? "").trim(); const openrouterApiKey = (process.env.OPENROUTER_API_KEY ?? "").trim();
@@ -414,6 +428,16 @@ export async function runLangGraphCrmAgentFor(input: {
const pendingChanges: PendingChange[] = []; const pendingChanges: PendingChange[] = [];
async function emitTrace(event: AgentTraceEvent) { async function emitTrace(event: AgentTraceEvent) {
lfTrace?.event({
name: "agent.trace",
input: {
text: event.text,
toolRun: event.toolRun ?? null,
},
metadata: {
requestId: input.requestId ?? null,
},
});
if (!input.onTrace) return; if (!input.onTrace) return;
try { try {
await input.onTrace(event); await input.onTrace(event);
@@ -544,7 +568,7 @@ export async function runLangGraphCrmAgentFor(input: {
const toolName = `crm:${raw.action}`; const toolName = `crm:${raw.action}`;
const startedAt = new Date().toISOString(); const startedAt = new Date().toISOString();
toolsUsed.push(toolName); toolsUsed.push(toolName);
await emitTrace({ text: `Tool started: ${toolName}` }); await emitTrace({ text: `Использую инструмент: ${toolName}` });
const executeAction = async () => { const executeAction = async () => {
if (raw.action === "get_snapshot") { if (raw.action === "get_snapshot") {
@@ -856,6 +880,23 @@ export async function runLangGraphCrmAgentFor(input: {
const maxCycles = Math.max(1, Math.min(Number(process.env.CF_AGENT_MAX_CYCLES ?? "3"), 8)); const maxCycles = Math.max(1, Math.min(Number(process.env.CF_AGENT_MAX_CYCLES ?? "3"), 8));
const cycleTimeoutMs = Math.max(5000, Math.min(Number(process.env.CF_AGENT_CYCLE_TIMEOUT_MS ?? "1200000"), 1800000)); const cycleTimeoutMs = Math.max(5000, Math.min(Number(process.env.CF_AGENT_CYCLE_TIMEOUT_MS ?? "1200000"), 1800000));
const tracingFlag = (process.env.LANGSMITH_TRACING ?? process.env.LANGCHAIN_TRACING_V2 ?? "").trim().toLowerCase();
const tracingEnabled = tracingFlag === "1" || tracingFlag === "true" || tracingFlag === "yes";
const langfuse = getLangfuseClient();
const lfTrace = langfuse?.trace({
id: input.requestId ?? makeId("trace"),
name: "clientsflow.crm_agent_request",
userId: input.userId,
sessionId: input.conversationId ?? undefined,
input: input.userText,
metadata: {
teamId: input.teamId,
userId: input.userId,
requestId: input.requestId ?? null,
conversationId: input.conversationId ?? null,
},
tags: ["clientsflow", "crm-agent", "langgraph"],
});
let consecutiveNoProgress = 0; let consecutiveNoProgress = 0;
let finalText = ""; let finalText = "";
const cycleNotes: string[] = []; const cycleNotes: string[] = [];
@@ -931,24 +972,34 @@ export async function runLangGraphCrmAgentFor(input: {
}; };
for (let cycle = 1; cycle <= maxCycles; cycle += 1) { for (let cycle = 1; cycle <= maxCycles; cycle += 1) {
await emitTrace({ text: `Cycle ${cycle}/${maxCycles}: start` }); const userPrompt = cyclePrompt(input.userText, cycle, cycleNotes, pendingChanges.length);
const cycleSpan = lfTrace?.span({
name: "agent.cycle",
input: userPrompt,
metadata: {
cycle,
requestId: input.requestId ?? null,
},
});
await emitTrace({ text: "Анализирую задачу и текущий контекст CRM." });
const beforeRuns = toolRuns.length; const beforeRuns = toolRuns.length;
const beforeWrites = dbWrites.length; const beforeWrites = dbWrites.length;
const beforePending = pendingChanges.length; const beforePending = pendingChanges.length;
const userPrompt =
cycle === 1
? input.userText
: [
"Continue solving the same user request.",
`User request: ${input.userText}`,
cycleNotes.length ? `Progress notes:\n- ${cycleNotes.join("\n- ")}` : "No progress notes yet.",
`Pending staged changes: ${pendingChanges.length}.`,
"Do the next useful step. If done, produce final concise answer.",
].join("\n");
let res: any; let res: any;
try { try {
const invokeConfig: Record<string, any> = { recursionLimit: 30 };
if (tracingEnabled) {
invokeConfig.runName = "clientsflow.crm_agent_cycle";
invokeConfig.tags = ["clientsflow", "crm-agent", "langgraph"];
invokeConfig.metadata = {
teamId: input.teamId,
userId: input.userId,
requestId: input.requestId ?? null,
conversationId: input.conversationId ?? null,
cycle,
};
}
res = await Promise.race([ res = await Promise.race([
agent.invoke( agent.invoke(
{ {
@@ -957,14 +1008,19 @@ export async function runLangGraphCrmAgentFor(input: {
{ role: "user", content: userPrompt }, { role: "user", content: userPrompt },
], ],
}, },
{ recursionLimit: 30 }, invokeConfig,
), ),
new Promise((_resolve, reject) => new Promise((_resolve, reject) =>
setTimeout(() => reject(new Error(`Cycle timeout after ${cycleTimeoutMs}ms`)), cycleTimeoutMs), setTimeout(() => reject(new Error(`Cycle timeout after ${cycleTimeoutMs}ms`)), cycleTimeoutMs),
), ),
]); ]);
} catch (e: any) { } catch (e: any) {
await emitTrace({ text: `Cycle ${cycle}/${maxCycles}: failed (${String(e?.message || e)})` }); await emitTrace({ text: "Один из шагов завершился ошибкой, пробую безопасный обход." });
cycleSpan?.end({
output: "error",
level: "ERROR",
statusMessage: String(e?.message ?? e ?? "unknown_error"),
});
if (!finalText) { if (!finalText) {
finalText = "Не удалось завершить задачу за отведенное время. Уточни запрос или сократи объем."; finalText = "Не удалось завершить задачу за отведенное время. Уточни запрос или сократи объем.";
} }
@@ -978,12 +1034,23 @@ export async function runLangGraphCrmAgentFor(input: {
const progressed = const progressed =
toolRuns.length > beforeRuns || dbWrites.length > beforeWrites || pendingChanges.length !== beforePending; toolRuns.length > beforeRuns || dbWrites.length > beforeWrites || pendingChanges.length !== beforePending;
cycleSpan?.end({
output: parsed.text || "",
metadata: {
progressed,
toolRunsDelta: toolRuns.length - beforeRuns,
dbWritesDelta: dbWrites.length - beforeWrites,
pendingDelta: pendingChanges.length - beforePending,
},
});
if (progressed) { if (progressed) {
cycleNotes.push(`Cycle ${cycle}: updated tools/data state.`); cycleNotes.push(`Cycle ${cycle}: updated tools/data state.`);
} }
await emitTrace({ await emitTrace({
text: `Cycle ${cycle}/${maxCycles}: ${progressed ? "progress" : "no progress"} · pending=${pendingChanges.length}`, text: progressed
? "Продвигаюсь по задаче и обновляю рабочий набор изменений."
: "Промежуточный шаг не дал прогресса, проверяю следующий вариант.",
}); });
if (!progressed) { if (!progressed) {
@@ -994,16 +1061,28 @@ export async function runLangGraphCrmAgentFor(input: {
const done = (!progressed && cycle > 1) || cycle === maxCycles; const done = (!progressed && cycle > 1) || cycle === maxCycles;
if (done) { if (done) {
await emitTrace({ text: `Cycle ${cycle}/${maxCycles}: done` }); await emitTrace({ text: "Формирую итоговый ответ." });
break; break;
} }
if (consecutiveNoProgress >= 2) { if (consecutiveNoProgress >= 2) {
await emitTrace({ text: `Cycle ${cycle}/${maxCycles}: stopped (no progress)` }); await emitTrace({ text: "Останавливаюсь, чтобы не крутиться в пустом цикле." });
break; break;
} }
} }
lfTrace?.update({
output: finalText || null,
metadata: {
toolsUsedCount: toolsUsed.length,
toolRunsCount: toolRuns.length,
dbWritesCount: dbWrites.length,
pendingChangesCount: pendingChanges.length,
maxCycles,
},
});
void langfuse?.flushAsync().catch(() => {});
if (!finalText) { if (!finalText) {
throw new Error("Model returned empty response"); throw new Error("Model returned empty response");
} }

View File

@@ -0,0 +1,62 @@
import { readBody } from "h3";
import { getAuthContext } from "../../../utils/auth";
import { prisma } from "../../../utils/prisma";
import { enqueueOutboundDelivery } from "../../../queues/outboundDelivery";
type EnqueueBody = {
omniMessageId?: string;
endpoint?: string;
method?: "POST" | "PUT" | "PATCH";
headers?: Record<string, string>;
payload?: unknown;
timeoutMs?: number;
provider?: string;
channel?: string;
attempts?: number;
};
export default defineEventHandler(async (event) => {
const auth = await getAuthContext(event);
const body = await readBody<EnqueueBody>(event);
const omniMessageId = String(body?.omniMessageId ?? "").trim();
const endpoint = String(body?.endpoint ?? "").trim();
if (!omniMessageId) {
throw createError({ statusCode: 400, statusMessage: "omniMessageId is required" });
}
if (!endpoint) {
throw createError({ statusCode: 400, statusMessage: "endpoint is required" });
}
const msg = await prisma.omniMessage.findFirst({
where: { id: omniMessageId, teamId: auth.teamId },
select: { id: true },
});
if (!msg) {
throw createError({ statusCode: 404, statusMessage: "omni message not found" });
}
const attempts = Math.max(1, Math.min(Number(body?.attempts ?? 12), 50));
const job = await enqueueOutboundDelivery(
{
omniMessageId,
endpoint,
method: body?.method ?? "POST",
headers: body?.headers ?? {},
payload: body?.payload ?? {},
timeoutMs: body?.timeoutMs,
provider: body?.provider ?? undefined,
channel: body?.channel ?? undefined,
},
{
attempts,
},
);
return {
ok: true,
queue: "omni-outbound",
jobId: job.id,
omniMessageId,
};
});

View File

@@ -0,0 +1,32 @@
import { readBody } from "h3";
import { getAuthContext } from "../../../utils/auth";
import { prisma } from "../../../utils/prisma";
import { enqueueTelegramSend } from "../../../queues/telegramSend";
export default defineEventHandler(async (event) => {
const auth = await getAuthContext(event);
const body = await readBody<{ omniMessageId?: string; attempts?: number }>(event);
const omniMessageId = String(body?.omniMessageId ?? "").trim();
if (!omniMessageId) {
throw createError({ statusCode: 400, statusMessage: "omniMessageId is required" });
}
const msg = await prisma.omniMessage.findFirst({
where: { id: omniMessageId, teamId: auth.teamId, channel: "TELEGRAM", direction: "OUT" },
select: { id: true },
});
if (!msg) {
throw createError({ statusCode: 404, statusMessage: "telegram outbound message not found" });
}
const attempts = Math.max(1, Math.min(Number(body?.attempts ?? 12), 50));
const job = await enqueueTelegramSend({ omniMessageId }, { attempts });
return {
ok: true,
queue: "omni-outbound",
jobId: job.id,
omniMessageId,
};
});

View File

@@ -0,0 +1,130 @@
import { readBody } from "h3";
import { createUIMessageStream, createUIMessageStreamResponse } from "ai";
import { getAuthContext } from "../utils/auth";
import { prisma } from "../utils/prisma";
import { buildChangeSet, captureSnapshot } from "../utils/changeSet";
import { persistChatMessage, runCrmAgentFor, type AgentTraceEvent } from "../agent/crmAgent";
function extractMessageText(message: any): string {
if (!message || !Array.isArray(message.parts)) return "";
return message.parts
.filter((part: any) => part?.type === "text" && typeof part.text === "string")
.map((part: any) => part.text)
.join("")
.trim();
}
function getLastUserText(messages: any[]): string {
for (let i = messages.length - 1; i >= 0; i -= 1) {
const message = messages[i];
if (message?.role !== "user") continue;
const text = extractMessageText(message);
if (text) return text;
}
return "";
}
function humanizeTraceText(trace: AgentTraceEvent): string {
if (trace.toolRun?.name) {
return `Использую инструмент: ${trace.toolRun.name}`;
}
const text = (trace.text ?? "").trim();
if (!text) return "Агент работает с данными CRM.";
if (text.toLowerCase().includes("ошиб")) return "Возникла ошибка шага, пробую другой путь.";
if (text.toLowerCase().includes("итог")) return "Готовлю финальный ответ.";
return text;
}
export default defineEventHandler(async (event) => {
const auth = await getAuthContext(event);
const body = await readBody<{ messages?: any[] }>(event);
const messages = Array.isArray(body?.messages) ? body.messages : [];
const userText = getLastUserText(messages);
if (!userText) {
throw createError({ statusCode: 400, statusMessage: "Last user message is required" });
}
const requestId = `req_${Date.now()}_${Math.floor(Math.random() * 1_000_000)}`;
const stream = createUIMessageStream({
execute: async ({ writer }) => {
const textId = `text-${Date.now()}`;
writer.write({ type: "start" });
try {
const snapshotBefore = await captureSnapshot(prisma, auth.teamId);
await persistChatMessage({
teamId: auth.teamId,
conversationId: auth.conversationId,
authorUserId: auth.userId,
role: "USER",
text: userText,
requestId,
eventType: "user",
phase: "final",
transient: false,
});
const reply = await runCrmAgentFor({
teamId: auth.teamId,
userId: auth.userId,
userText,
requestId,
conversationId: auth.conversationId,
onTrace: async (trace: AgentTraceEvent) => {
writer.write({
type: "data-agent-log",
data: {
requestId,
at: new Date().toISOString(),
text: humanizeTraceText(trace),
},
});
},
});
const snapshotAfter = await captureSnapshot(prisma, auth.teamId);
const changeSet = buildChangeSet(snapshotBefore, snapshotAfter);
await persistChatMessage({
teamId: auth.teamId,
conversationId: auth.conversationId,
authorUserId: null,
role: "ASSISTANT",
text: reply.text,
requestId,
eventType: "assistant",
phase: "final",
transient: false,
changeSet,
});
writer.write({ type: "text-start", id: textId });
writer.write({ type: "text-delta", id: textId, delta: reply.text });
writer.write({ type: "text-end", id: textId });
writer.write({ type: "finish", finishReason: "stop" });
} catch (error: any) {
writer.write({
type: "data-agent-log",
data: {
requestId,
at: new Date().toISOString(),
text: "Ошибка выполнения агентского цикла.",
},
});
writer.write({ type: "text-start", id: textId });
writer.write({
type: "text-delta",
id: textId,
delta: `Не удалось завершить задачу: ${String(error?.message ?? "unknown error")}`,
});
writer.write({ type: "text-end", id: textId });
writer.write({ type: "finish", finishReason: "stop" });
}
},
});
return createUIMessageStreamResponse({ stream });
});

View File

@@ -0,0 +1,62 @@
import { readBody } from "h3";
import { getAuthContext } from "../utils/auth";
import { transcribeWithWhisper } from "../utils/whisper";
type TranscribeBody = {
audioBase64?: string;
sampleRate?: number;
language?: string;
};
function decodeBase64Pcm16(audioBase64: string) {
const pcmBuffer = Buffer.from(audioBase64, "base64");
if (pcmBuffer.length < 2) return new Float32Array();
const sampleCount = Math.floor(pcmBuffer.length / 2);
const out = new Float32Array(sampleCount);
for (let i = 0; i < sampleCount; i += 1) {
const lo = pcmBuffer[i * 2]!;
const hi = pcmBuffer[i * 2 + 1]!;
const int16 = (hi << 8) | lo;
const signed = int16 >= 0x8000 ? int16 - 0x10000 : int16;
out[i] = signed / 32768;
}
return out;
}
export default defineEventHandler(async (event) => {
await getAuthContext(event);
const body = await readBody<TranscribeBody>(event);
const audioBase64 = String(body?.audioBase64 ?? "").trim();
const sampleRateRaw = Number(body?.sampleRate ?? 0);
const language = String(body?.language ?? "").trim() || undefined;
if (!audioBase64) {
throw createError({ statusCode: 400, statusMessage: "audioBase64 is required" });
}
if (!Number.isFinite(sampleRateRaw) || sampleRateRaw < 8000 || sampleRateRaw > 48000) {
throw createError({ statusCode: 400, statusMessage: "sampleRate must be between 8000 and 48000" });
}
const samples = decodeBase64Pcm16(audioBase64);
if (!samples.length) {
throw createError({ statusCode: 400, statusMessage: "Audio is empty" });
}
const maxSamples = Math.floor(sampleRateRaw * 120);
if (samples.length > maxSamples) {
throw createError({ statusCode: 413, statusMessage: "Audio is too long (max 120s)" });
}
const text = await transcribeWithWhisper({
samples,
sampleRate: sampleRateRaw,
language,
});
return { text };
});

View File

@@ -5,7 +5,6 @@ import { clearAuthSession, setSession } from "../utils/auth";
import { prisma } from "../utils/prisma"; import { prisma } from "../utils/prisma";
import { normalizePhone, verifyPassword } from "../utils/password"; import { normalizePhone, verifyPassword } from "../utils/password";
import { persistChatMessage, runCrmAgentFor } from "../agent/crmAgent"; import { persistChatMessage, runCrmAgentFor } from "../agent/crmAgent";
import type { AgentTraceEvent } from "../agent/crmAgent";
import { buildChangeSet, captureSnapshot, rollbackChangeSet } from "../utils/changeSet"; import { buildChangeSet, captureSnapshot, rollbackChangeSet } from "../utils/changeSet";
import type { ChangeSet } from "../utils/changeSet"; import type { ChangeSet } from "../utils/changeSet";
@@ -210,6 +209,55 @@ async function selectChatConversation(auth: AuthContext | null, event: H3Event,
return { ok: true }; return { ok: true };
} }
async function archiveChatConversation(auth: AuthContext | null, event: H3Event, id: string) {
const ctx = requireAuth(auth);
const convId = (id ?? "").trim();
if (!convId) throw new Error("id is required");
const conversation = await prisma.chatConversation.findFirst({
where: {
id: convId,
teamId: ctx.teamId,
createdByUserId: ctx.userId,
},
select: { id: true },
});
if (!conversation) throw new Error("conversation not found");
const nextConversationId = await prisma.$transaction(async (tx) => {
await tx.chatConversation.delete({ where: { id: conversation.id } });
if (ctx.conversationId !== conversation.id) {
return ctx.conversationId;
}
const fallback = await tx.chatConversation.findFirst({
where: { teamId: ctx.teamId, createdByUserId: ctx.userId },
orderBy: { updatedAt: "desc" },
select: { id: true },
});
if (fallback) {
return fallback.id;
}
const created = await tx.chatConversation.create({
data: { teamId: ctx.teamId, createdByUserId: ctx.userId, title: "Pilot" },
select: { id: true },
});
return created.id;
});
setSession(event, {
teamId: ctx.teamId,
userId: ctx.userId,
conversationId: nextConversationId,
});
return { ok: true };
}
async function getChatMessages(auth: AuthContext | null) { async function getChatMessages(auth: AuthContext | null) {
const ctx = requireAuth(auth); const ctx = requireAuth(auth);
const items = await prisma.chatMessage.findMany({ const items = await prisma.chatMessage.findMany({
@@ -219,25 +267,18 @@ async function getChatMessages(auth: AuthContext | null) {
}); });
return items.map((m) => { return items.map((m) => {
const debug = (m.planJson as any) ?? {};
const cs = getChangeSetFromPlanJson(m.planJson); const cs = getChangeSetFromPlanJson(m.planJson);
return { return {
id: m.id, id: m.id,
role: m.role === "USER" ? "user" : m.role === "ASSISTANT" ? "assistant" : "system", role: m.role === "USER" ? "user" : m.role === "ASSISTANT" ? "assistant" : "system",
text: m.text, text: m.text,
thinking: Array.isArray(debug.thinking) ? (debug.thinking as string[]) : [], requestId: null,
tools: Array.isArray(debug.tools) ? (debug.tools as string[]) : [], eventType: null,
toolRuns: Array.isArray(debug.toolRuns) phase: null,
? (debug.toolRuns as any[]) transient: null,
.filter((t) => t && typeof t === "object") thinking: [],
.map((t: any) => ({ tools: [],
name: String(t.name ?? "crm:unknown"), toolRuns: [],
status: t.status === "error" ? "error" : "ok",
input: String(t.input ?? ""),
output: String(t.output ?? ""),
at: t.at ? String(t.at) : m.createdAt.toISOString(),
}))
: [],
changeSetId: cs?.id ?? null, changeSetId: cs?.id ?? null,
changeStatus: cs?.status ?? null, changeStatus: cs?.status ?? null,
changeSummary: cs?.summary ?? null, changeSummary: cs?.summary ?? null,
@@ -292,7 +333,10 @@ async function getDashboard(auth: AuthContext | null) {
}), }),
prisma.deal.findMany({ prisma.deal.findMany({
where: { teamId: ctx.teamId }, where: { teamId: ctx.teamId },
include: { contact: { select: { name: true, company: true } } }, include: {
contact: { select: { name: true, company: true } },
steps: { orderBy: [{ order: "asc" }, { createdAt: "asc" }] },
},
orderBy: { updatedAt: "desc" }, orderBy: { updatedAt: "desc" },
take: 500, take: 500,
}), }),
@@ -366,6 +410,16 @@ async function getDashboard(auth: AuthContext | null) {
amount: d.amount ? String(d.amount) : "", amount: d.amount ? String(d.amount) : "",
nextStep: d.nextStep ?? "", nextStep: d.nextStep ?? "",
summary: d.summary ?? "", summary: d.summary ?? "",
currentStepId: d.currentStepId ?? "",
steps: d.steps.map((step) => ({
id: step.id,
title: step.title,
description: step.description ?? "",
status: step.status,
dueAt: step.dueAt?.toISOString() ?? "",
order: step.order,
completedAt: step.completedAt?.toISOString() ?? "",
})),
})); }));
const feed = feedRaw.map((c) => ({ const feed = feedRaw.map((c) => ({
@@ -596,6 +650,7 @@ async function sendPilotMessage(auth: AuthContext | null, textInput: string) {
const ctx = requireAuth(auth); const ctx = requireAuth(auth);
const text = (textInput ?? "").trim(); const text = (textInput ?? "").trim();
if (!text) throw new Error("text is required"); if (!text) throw new Error("text is required");
const requestId = `req_${Date.now()}_${Math.floor(Math.random() * 1_000_000)}`;
const snapshotBefore = await captureSnapshot(prisma, ctx.teamId); const snapshotBefore = await captureSnapshot(prisma, ctx.teamId);
@@ -605,24 +660,19 @@ async function sendPilotMessage(auth: AuthContext | null, textInput: string) {
authorUserId: ctx.userId, authorUserId: ctx.userId,
role: "USER", role: "USER",
text, text,
requestId,
eventType: "user",
phase: "final",
transient: false,
}); });
const reply = await runCrmAgentFor({ const reply = await runCrmAgentFor({
teamId: ctx.teamId, teamId: ctx.teamId,
userId: ctx.userId, userId: ctx.userId,
userText: text, userText: text,
onTrace: async (event: AgentTraceEvent) => { requestId,
await persistChatMessage({ conversationId: ctx.conversationId,
teamId: ctx.teamId, onTrace: async () => {},
conversationId: ctx.conversationId,
authorUserId: null,
role: "SYSTEM",
text: event.text,
thinking: [],
tools: event.toolRun ? [event.toolRun.name] : [],
toolRuns: event.toolRun ? [event.toolRun] : [],
});
},
}); });
const snapshotAfter = await captureSnapshot(prisma, ctx.teamId); const snapshotAfter = await captureSnapshot(prisma, ctx.teamId);
@@ -634,9 +684,10 @@ async function sendPilotMessage(auth: AuthContext | null, textInput: string) {
authorUserId: null, authorUserId: null,
role: "ASSISTANT", role: "ASSISTANT",
text: reply.text, text: reply.text,
thinking: reply.thinking ?? [], requestId,
tools: reply.tools, eventType: "assistant",
toolRuns: reply.toolRuns ?? [], phase: "final",
transient: false,
changeSet, changeSet,
}); });
@@ -654,9 +705,6 @@ async function logPilotNote(auth: AuthContext | null, textInput: string) {
authorUserId: null, authorUserId: null,
role: "ASSISTANT", role: "ASSISTANT",
text, text,
thinking: [],
tools: [],
toolRuns: [],
}); });
return { ok: true }; return { ok: true };
@@ -675,6 +723,7 @@ export const crmGraphqlSchema = buildSchema(`
logout: MutationResult! logout: MutationResult!
createChatConversation(title: String): Conversation! createChatConversation(title: String): Conversation!
selectChatConversation(id: ID!): MutationResult! selectChatConversation(id: ID!): MutationResult!
archiveChatConversation(id: ID!): MutationResult!
sendPilotMessage(text: String!): MutationResult! sendPilotMessage(text: String!): MutationResult!
confirmLatestChangeSet: MutationResult! confirmLatestChangeSet: MutationResult!
rollbackLatestChangeSet: MutationResult! rollbackLatestChangeSet: MutationResult!
@@ -743,6 +792,10 @@ export const crmGraphqlSchema = buildSchema(`
id: ID! id: ID!
role: String! role: String!
text: String! text: String!
requestId: String
eventType: String
phase: String
transient: Boolean
thinking: [String!]! thinking: [String!]!
tools: [String!]! tools: [String!]!
toolRuns: [PilotToolRun!]! toolRuns: [PilotToolRun!]!
@@ -822,6 +875,18 @@ export const crmGraphqlSchema = buildSchema(`
amount: String! amount: String!
nextStep: String! nextStep: String!
summary: String! summary: String!
currentStepId: String!
steps: [DealStep!]!
}
type DealStep {
id: ID!
title: String!
description: String!
status: String!
dueAt: String!
order: Int!
completedAt: String!
} }
type FeedCard { type FeedCard {
@@ -878,6 +943,9 @@ export const crmGraphqlRoot = {
selectChatConversation: async (args: { id: string }, context: GraphQLContext) => selectChatConversation: async (args: { id: string }, context: GraphQLContext) =>
selectChatConversation(context.auth, context.event, args.id), selectChatConversation(context.auth, context.event, args.id),
archiveChatConversation: async (args: { id: string }, context: GraphQLContext) =>
archiveChatConversation(context.auth, context.event, args.id),
sendPilotMessage: async (args: { text: string }, context: GraphQLContext) => sendPilotMessage: async (args: { text: string }, context: GraphQLContext) =>
sendPilotMessage(context.auth, args.text), sendPilotMessage(context.auth, args.text),

View File

@@ -0,0 +1,200 @@
import { Queue, Worker, type JobsOptions } from "bullmq";
import { prisma } from "../utils/prisma";
import { getRedis } from "../utils/redis";
export const OUTBOUND_DELIVERY_QUEUE_NAME = "omni-outbound";
export type OutboundDeliveryJob = {
omniMessageId: string;
endpoint: string;
method?: "POST" | "PUT" | "PATCH";
headers?: Record<string, string>;
payload: unknown;
timeoutMs?: number;
channel?: string;
provider?: string;
};
function ensureHttpUrl(value: string) {
const raw = (value ?? "").trim();
if (!raw) throw new Error("endpoint is required");
const parsed = new URL(raw);
if (parsed.protocol !== "http:" && parsed.protocol !== "https:") {
throw new Error(`Unsupported endpoint protocol: ${parsed.protocol}`);
}
return parsed.toString();
}
function compactError(error: unknown) {
if (!error) return "unknown_error";
if (typeof error === "string") return error;
const anyErr = error as any;
return String(anyErr?.message ?? anyErr);
}
function extractProviderMessageId(body: unknown): string | null {
const obj = body as any;
if (!obj || typeof obj !== "object") return null;
const candidate =
obj?.message_id ??
obj?.messageId ??
obj?.id ??
obj?.result?.message_id ??
obj?.result?.id ??
null;
if (candidate == null) return null;
return String(candidate);
}
export function outboundDeliveryQueue() {
return new Queue<OutboundDeliveryJob>(OUTBOUND_DELIVERY_QUEUE_NAME, {
connection: getRedis(),
defaultJobOptions: {
removeOnComplete: { count: 1000 },
removeOnFail: { count: 5000 },
},
});
}
export async function enqueueOutboundDelivery(input: OutboundDeliveryJob, opts?: JobsOptions) {
const endpoint = ensureHttpUrl(input.endpoint);
const q = outboundDeliveryQueue();
// Keep source message in pending before actual send starts.
await prisma.omniMessage.update({
where: { id: input.omniMessageId },
data: {
status: "PENDING",
rawJson: {
queue: {
queueName: OUTBOUND_DELIVERY_QUEUE_NAME,
enqueuedAt: new Date().toISOString(),
},
deliveryRequest: {
endpoint,
method: input.method ?? "POST",
channel: input.channel ?? null,
provider: input.provider ?? null,
payload: input.payload,
},
},
},
});
return q.add("deliver", { ...input, endpoint }, {
jobId: `omni:${input.omniMessageId}`,
attempts: 12,
backoff: { type: "exponential", delay: 1000 },
...opts,
});
}
export function startOutboundDeliveryWorker() {
return new Worker<OutboundDeliveryJob>(
OUTBOUND_DELIVERY_QUEUE_NAME,
async (job) => {
const msg = await prisma.omniMessage.findUnique({
where: { id: job.data.omniMessageId },
include: { thread: true },
});
if (!msg) return;
// Idempotency: if already sent/delivered, do not resend.
if ((msg.status === "SENT" || msg.status === "DELIVERED" || msg.status === "READ") && msg.providerMessageId) {
return;
}
const endpoint = ensureHttpUrl(job.data.endpoint);
const timeoutMs = Math.max(1000, Math.min(job.data.timeoutMs ?? 20000, 120000));
const method = job.data.method ?? "POST";
const headers: Record<string, string> = {
"content-type": "application/json",
...(job.data.headers ?? {}),
};
const requestStartedAt = new Date().toISOString();
try {
const response = await fetch(endpoint, {
method,
headers,
body: JSON.stringify(job.data.payload ?? {}),
signal: AbortSignal.timeout(timeoutMs),
});
const text = await response.text();
const responseBody = (() => {
try {
return JSON.parse(text);
} catch {
return text;
}
})();
if (!response.ok) {
throw new Error(`HTTP ${response.status}: ${typeof responseBody === "string" ? responseBody : JSON.stringify(responseBody)}`);
}
const providerMessageId = extractProviderMessageId(responseBody);
await prisma.omniMessage.update({
where: { id: msg.id },
data: {
status: "SENT",
providerMessageId,
rawJson: {
queue: {
queueName: OUTBOUND_DELIVERY_QUEUE_NAME,
completedAt: new Date().toISOString(),
attemptsMade: job.attemptsMade + 1,
},
deliveryRequest: {
endpoint,
method,
channel: job.data.channel ?? null,
provider: job.data.provider ?? null,
startedAt: requestStartedAt,
payload: job.data.payload ?? null,
},
deliveryResponse: {
status: response.status,
body: responseBody,
},
},
},
});
} catch (error) {
const isLastAttempt =
typeof job.opts.attempts === "number" && job.attemptsMade + 1 >= job.opts.attempts;
if (isLastAttempt) {
await prisma.omniMessage.update({
where: { id: msg.id },
data: {
status: "FAILED",
rawJson: {
queue: {
queueName: OUTBOUND_DELIVERY_QUEUE_NAME,
failedAt: new Date().toISOString(),
attemptsMade: job.attemptsMade + 1,
},
deliveryRequest: {
endpoint,
method,
channel: job.data.channel ?? null,
provider: job.data.provider ?? null,
startedAt: requestStartedAt,
payload: job.data.payload ?? null,
},
deliveryError: {
message: compactError(error),
},
},
},
});
}
throw error;
}
},
{ connection: getRedis() },
);
}

View File

@@ -1,92 +1,43 @@
import { Queue, Worker, JobsOptions } from "bullmq"; import type { JobsOptions } from "bullmq";
import { getRedis } from "../utils/redis";
import { prisma } from "../utils/prisma"; import { prisma } from "../utils/prisma";
import { telegramBotApi } from "../utils/telegram"; import { telegramApiBase, requireTelegramBotToken } from "../utils/telegram";
import { enqueueOutboundDelivery, startOutboundDeliveryWorker } from "./outboundDelivery";
export const TELEGRAM_SEND_QUEUE_NAME = "telegram:send";
type TelegramSendJob = { type TelegramSendJob = {
omniMessageId: string; omniMessageId: string;
}; };
export function telegramSendQueue() {
return new Queue<TelegramSendJob>(TELEGRAM_SEND_QUEUE_NAME, {
connection: getRedis(),
defaultJobOptions: {
removeOnComplete: { count: 1000 },
removeOnFail: { count: 5000 },
},
});
}
export async function enqueueTelegramSend(input: TelegramSendJob, opts?: JobsOptions) { export async function enqueueTelegramSend(input: TelegramSendJob, opts?: JobsOptions) {
const q = telegramSendQueue(); const msg = await prisma.omniMessage.findUnique({
return q.add("send", input, { where: { id: input.omniMessageId },
jobId: input.omniMessageId, // idempotency include: { thread: true },
attempts: 10,
backoff: { type: "exponential", delay: 1000 },
...opts,
}); });
} if (!msg) throw new Error(`omni message not found: ${input.omniMessageId}`);
if (msg.channel !== "TELEGRAM" || msg.direction !== "OUT") {
throw new Error(`Invalid omni message for telegram send: ${msg.id}`);
}
export function startTelegramSendWorker() { const token = requireTelegramBotToken();
return new Worker<TelegramSendJob>( const endpoint = `${telegramApiBase()}/bot${token}/sendMessage`;
TELEGRAM_SEND_QUEUE_NAME, const payload = {
async (job) => { chat_id: msg.thread.externalChatId,
const msg = await prisma.omniMessage.findUnique({ text: msg.text,
where: { id: job.data.omniMessageId }, ...(msg.thread.businessConnectionId ? { business_connection_id: msg.thread.businessConnectionId } : {}),
include: { thread: true }, };
});
if (!msg) return;
// Idempotency: if we already sent it, don't send twice. return enqueueOutboundDelivery(
if (msg.status === "SENT" && msg.providerMessageId) return; {
omniMessageId: msg.id,
if (msg.channel !== "TELEGRAM" || msg.direction !== "OUT") { endpoint,
throw new Error(`Invalid omni message for telegram send: ${msg.id}`); method: "POST",
} payload,
provider: "telegram_business",
const thread = msg.thread; channel: "TELEGRAM",
const chatId = thread.externalChatId;
const businessConnectionId = thread.businessConnectionId || undefined;
try {
const result = await telegramBotApi<any>("sendMessage", {
chat_id: chatId,
text: msg.text,
...(businessConnectionId ? { business_connection_id: businessConnectionId } : {}),
});
const providerMessageId = result?.message_id != null ? String(result.message_id) : null;
await prisma.omniMessage.update({
where: { id: msg.id },
data: {
status: "SENT",
providerMessageId: providerMessageId,
rawJson: result,
},
});
} catch (e: any) {
const isLastAttempt =
typeof job.opts.attempts === "number" && job.attemptsMade + 1 >= job.opts.attempts;
if (isLastAttempt) {
await prisma.omniMessage.update({
where: { id: msg.id },
data: {
status: "FAILED",
rawJson: {
error: String(e?.message || e),
attemptsMade: job.attemptsMade + 1,
},
},
});
}
throw e;
}
}, },
{ connection: getRedis() }, opts,
); );
} }
export function startTelegramSendWorker() {
return startOutboundDeliveryWorker();
}

View File

@@ -0,0 +1,35 @@
import { startOutboundDeliveryWorker } from "./outboundDelivery";
import { prisma } from "../utils/prisma";
import { getRedis } from "../utils/redis";
const worker = startOutboundDeliveryWorker();
console.log("[delivery-worker] started queue omni:outbound");
async function shutdown(signal: string) {
console.log(`[delivery-worker] shutting down by ${signal}`);
try {
await worker.close();
} catch {
// ignore shutdown errors
}
try {
const redis = getRedis();
await redis.quit();
} catch {
// ignore shutdown errors
}
try {
await prisma.$disconnect();
} catch {
// ignore shutdown errors
}
process.exit(0);
}
process.on("SIGINT", () => {
void shutdown("SIGINT");
});
process.on("SIGTERM", () => {
void shutdown("SIGTERM");
});

View File

@@ -0,0 +1,29 @@
import { Langfuse } from "langfuse";
let client: Langfuse | null = null;
function isTruthy(value: string | undefined) {
const v = (value ?? "").trim().toLowerCase();
return v === "1" || v === "true" || v === "yes" || v === "on";
}
export function isLangfuseEnabled() {
const enabledRaw = process.env.LANGFUSE_ENABLED;
if (enabledRaw && !isTruthy(enabledRaw)) return false;
return Boolean((process.env.LANGFUSE_PUBLIC_KEY ?? "").trim() && (process.env.LANGFUSE_SECRET_KEY ?? "").trim());
}
export function getLangfuseClient() {
if (!isLangfuseEnabled()) return null;
if (client) return client;
client = new Langfuse({
publicKey: (process.env.LANGFUSE_PUBLIC_KEY ?? "").trim(),
secretKey: (process.env.LANGFUSE_SECRET_KEY ?? "").trim(),
baseUrl: (process.env.LANGFUSE_BASE_URL ?? "http://langfuse-web:3000").trim(),
enabled: true,
});
return client;
}

View File

@@ -0,0 +1,53 @@
type WhisperTranscribeInput = {
samples: Float32Array;
sampleRate: number;
language?: string;
};
let whisperPipelinePromise: Promise<any> | null = null;
let transformersPromise: Promise<any> | null = null;
function getWhisperModelId() {
return (process.env.CF_WHISPER_MODEL ?? "Xenova/whisper-small").trim() || "Xenova/whisper-small";
}
function getWhisperLanguage() {
const value = (process.env.CF_WHISPER_LANGUAGE ?? "ru").trim();
return value || "ru";
}
async function getWhisperPipeline() {
if (!transformersPromise) {
transformersPromise = import("@xenova/transformers");
}
const { env, pipeline } = await transformersPromise;
if (!whisperPipelinePromise) {
env.allowRemoteModels = true;
env.allowLocalModels = true;
env.cacheDir = "/app/.data/transformers";
const modelId = getWhisperModelId();
whisperPipelinePromise = pipeline("automatic-speech-recognition", modelId);
}
return whisperPipelinePromise;
}
export async function transcribeWithWhisper(input: WhisperTranscribeInput) {
const transcriber = (await getWhisperPipeline()) as any;
const result = await transcriber(
input.samples,
{
sampling_rate: input.sampleRate,
language: (input.language ?? getWhisperLanguage()) || "ru",
task: "transcribe",
chunk_length_s: 20,
stride_length_s: 5,
return_timestamps: false,
},
);
const text = String((result as any)?.text ?? "").trim();
return text;
}

View File

@@ -13,7 +13,7 @@ services:
ports: ports:
- "3000:3000" - "3000:3000"
environment: environment:
DATABASE_URL: "file:../../.data/clientsflow-dev.db" DATABASE_URL: "postgresql://postgres:postgres@postgres:5432/clientsflow?schema=public"
REDIS_URL: "redis://redis:6379" REDIS_URL: "redis://redis:6379"
CF_AGENT_MODE: "langgraph" CF_AGENT_MODE: "langgraph"
OPENROUTER_API_KEY: "${OPENROUTER_API_KEY:-}" OPENROUTER_API_KEY: "${OPENROUTER_API_KEY:-}"
@@ -22,6 +22,12 @@ services:
OPENROUTER_HTTP_REFERER: "${OPENROUTER_HTTP_REFERER:-}" OPENROUTER_HTTP_REFERER: "${OPENROUTER_HTTP_REFERER:-}"
OPENROUTER_X_TITLE: "clientsflow" OPENROUTER_X_TITLE: "clientsflow"
OPENROUTER_REASONING_ENABLED: "${OPENROUTER_REASONING_ENABLED:-0}" OPENROUTER_REASONING_ENABLED: "${OPENROUTER_REASONING_ENABLED:-0}"
CF_WHISPER_MODEL: "${CF_WHISPER_MODEL:-Xenova/whisper-small}"
CF_WHISPER_LANGUAGE: "${CF_WHISPER_LANGUAGE:-ru}"
LANGFUSE_ENABLED: "${LANGFUSE_ENABLED:-true}"
LANGFUSE_BASE_URL: "${LANGFUSE_BASE_URL:-http://langfuse-web:3000}"
LANGFUSE_PUBLIC_KEY: "${LANGFUSE_PUBLIC_KEY:-pk-lf-local}"
LANGFUSE_SECRET_KEY: "${LANGFUSE_SECRET_KEY:-sk-lf-local}"
# Set this in your shell or a compose override: # Set this in your shell or a compose override:
# OPENROUTER_API_KEY: "..." # OPENROUTER_API_KEY: "..."
# GIGACHAT_AUTH_KEY: "..." (if you use GigaChat integration) # GIGACHAT_AUTH_KEY: "..." (if you use GigaChat integration)
@@ -31,6 +37,28 @@ services:
" "
depends_on: depends_on:
- redis - redis
- postgres
- langfuse-web
delivery-worker:
image: node:22-bookworm-slim
working_dir: /app/Frontend
volumes:
- ./Frontend:/app/Frontend
- clientsflow_data:/app/.data
- delivery_node_modules:/app/Frontend/node_modules
environment:
DATABASE_URL: "postgresql://postgres:postgres@postgres:5432/clientsflow?schema=public"
REDIS_URL: "redis://redis:6379"
TELEGRAM_API_BASE: "${TELEGRAM_API_BASE:-https://api.telegram.org}"
TELEGRAM_BOT_TOKEN: "${TELEGRAM_BOT_TOKEN:-}"
command: >
bash -lc "
bash ./scripts/compose-worker.sh
"
depends_on:
- redis
- postgres
redis: redis:
image: redis:7-alpine image: redis:7-alpine
@@ -39,9 +67,157 @@ services:
volumes: volumes:
- redis_data:/data - redis_data:/data
postgres:
image: postgres:16-alpine
ports:
- "5432:5432"
environment:
POSTGRES_DB: "clientsflow"
POSTGRES_USER: "postgres"
POSTGRES_PASSWORD: "postgres"
volumes:
- postgres_data:/var/lib/postgresql/data
langfuse-worker:
image: docker.io/langfuse/langfuse-worker:3
restart: always
depends_on:
langfuse-postgres:
condition: service_healthy
langfuse-minio:
condition: service_healthy
langfuse-redis:
condition: service_healthy
langfuse-clickhouse:
condition: service_healthy
environment: &langfuse_env
NEXTAUTH_URL: "http://localhost:3001"
DATABASE_URL: "postgresql://langfuse:langfuse@langfuse-postgres:5432/langfuse"
SALT: "clientsflow-local-salt"
ENCRYPTION_KEY: "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
TELEMETRY_ENABLED: "false"
CLICKHOUSE_MIGRATION_URL: "clickhouse://langfuse-clickhouse:9000"
CLICKHOUSE_URL: "http://langfuse-clickhouse:8123"
CLICKHOUSE_USER: "clickhouse"
CLICKHOUSE_PASSWORD: "clickhouse"
CLICKHOUSE_CLUSTER_ENABLED: "false"
LANGFUSE_S3_EVENT_UPLOAD_BUCKET: "langfuse"
LANGFUSE_S3_EVENT_UPLOAD_REGION: "auto"
LANGFUSE_S3_EVENT_UPLOAD_ACCESS_KEY_ID: "minio"
LANGFUSE_S3_EVENT_UPLOAD_SECRET_ACCESS_KEY: "miniosecret"
LANGFUSE_S3_EVENT_UPLOAD_ENDPOINT: "http://langfuse-minio:9000"
LANGFUSE_S3_EVENT_UPLOAD_FORCE_PATH_STYLE: "true"
LANGFUSE_S3_EVENT_UPLOAD_PREFIX: "events/"
LANGFUSE_S3_MEDIA_UPLOAD_BUCKET: "langfuse"
LANGFUSE_S3_MEDIA_UPLOAD_REGION: "auto"
LANGFUSE_S3_MEDIA_UPLOAD_ACCESS_KEY_ID: "minio"
LANGFUSE_S3_MEDIA_UPLOAD_SECRET_ACCESS_KEY: "miniosecret"
LANGFUSE_S3_MEDIA_UPLOAD_ENDPOINT: "http://langfuse-minio:9000"
LANGFUSE_S3_MEDIA_UPLOAD_FORCE_PATH_STYLE: "true"
LANGFUSE_S3_MEDIA_UPLOAD_PREFIX: "media/"
REDIS_HOST: "langfuse-redis"
REDIS_PORT: "6379"
REDIS_AUTH: "langfuse-redis"
REDIS_TLS_ENABLED: "false"
langfuse-web:
image: docker.io/langfuse/langfuse:3
restart: always
depends_on:
langfuse-postgres:
condition: service_healthy
langfuse-minio:
condition: service_healthy
langfuse-redis:
condition: service_healthy
langfuse-clickhouse:
condition: service_healthy
ports:
- "3001:3000"
environment:
<<: *langfuse_env
NEXTAUTH_SECRET: "clientsflow-local-nextauth-secret"
LANGFUSE_INIT_ORG_ID: "org-clientsflow"
LANGFUSE_INIT_ORG_NAME: "Clientsflow Local"
LANGFUSE_INIT_PROJECT_ID: "proj-clientsflow"
LANGFUSE_INIT_PROJECT_NAME: "clientsflow"
LANGFUSE_INIT_PROJECT_PUBLIC_KEY: "pk-lf-local"
LANGFUSE_INIT_PROJECT_SECRET_KEY: "sk-lf-local"
LANGFUSE_INIT_USER_EMAIL: "admin@clientsflow.local"
LANGFUSE_INIT_USER_NAME: "Local Admin"
LANGFUSE_INIT_USER_PASSWORD: "clientsflow-local-admin"
langfuse-clickhouse:
image: docker.io/clickhouse/clickhouse-server:latest
restart: always
user: "101:101"
environment:
CLICKHOUSE_DB: "default"
CLICKHOUSE_USER: "clickhouse"
CLICKHOUSE_PASSWORD: "clickhouse"
volumes:
- langfuse_clickhouse_data:/var/lib/clickhouse
- langfuse_clickhouse_logs:/var/log/clickhouse-server
healthcheck:
test: ["CMD-SHELL", "wget --no-verbose --tries=1 --spider http://localhost:8123/ping || exit 1"]
interval: 5s
timeout: 5s
retries: 20
start_period: 5s
langfuse-minio:
image: cgr.dev/chainguard/minio:latest
restart: always
entrypoint: sh
command: -c 'mkdir -p /data/langfuse && minio server --address ":9000" --console-address ":9001" /data'
environment:
MINIO_ROOT_USER: "minio"
MINIO_ROOT_PASSWORD: "miniosecret"
volumes:
- langfuse_minio_data:/data
healthcheck:
test: ["CMD", "mc", "ready", "local"]
interval: 2s
timeout: 5s
retries: 15
start_period: 5s
langfuse-redis:
image: docker.io/redis:7-alpine
restart: always
command: ["redis-server", "--requirepass", "langfuse-redis", "--maxmemory-policy", "noeviction"]
healthcheck:
test: ["CMD-SHELL", "redis-cli -a langfuse-redis ping | grep PONG"]
interval: 3s
timeout: 5s
retries: 20
start_period: 5s
langfuse-postgres:
image: postgres:16-alpine
restart: always
environment:
POSTGRES_DB: "langfuse"
POSTGRES_USER: "langfuse"
POSTGRES_PASSWORD: "langfuse"
volumes:
- langfuse_postgres_data:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U langfuse -d langfuse"]
interval: 3s
timeout: 3s
retries: 20
start_period: 5s
volumes: volumes:
clientsflow_data: clientsflow_data:
frontend_node_modules: frontend_node_modules:
delivery_node_modules:
frontend_nuxt: frontend_nuxt:
frontend_output: frontend_output:
redis_data: redis_data:
postgres_data:
langfuse_postgres_data:
langfuse_clickhouse_data:
langfuse_clickhouse_logs:
langfuse_minio_data: