add backend hatchet worker for calendar predue sync

This commit is contained in:
Ruslan Bakiev
2026-03-08 19:15:30 +07:00
parent 0df426d5d6
commit e4870ce669
21 changed files with 1859 additions and 350 deletions

View File

@@ -7,6 +7,7 @@ Core CRM/omni-домен с единственной Prisma-базой.
- принимает входящие telegram-события через GraphQL mutation `ingestTelegramInbound`;
- создает исходящую задачу через GraphQL mutation `requestTelegramOutbound``telegram_backend`, далее в Hatchet);
- принимает отчет о доставке через GraphQL mutation `reportTelegramOutbound`.
- выполняет sync календарных предзаписей через GraphQL mutation `syncCalendarPredueTimeline`.
## API
@@ -27,9 +28,13 @@ Core CRM/omni-домен с единственной Prisma-базой.
- `TELEGRAM_BACKEND_GRAPHQL_URL` (required для `requestTelegramOutbound`)
- `TELEGRAM_BACKEND_GRAPHQL_SHARED_SECRET` (optional)
- `DEFAULT_TEAM_ID` (optional fallback для inbound маршрутизации)
- `TIMELINE_EVENT_PREDUE_MINUTES` (default: `30`)
- `TIMELINE_EVENT_LOOKBACK_MINUTES` (default: `180`)
- `TIMELINE_EVENT_LOOKAHEAD_MINUTES` (default: `1440`)
- `TIMELINE_SCHEDULER_LOCK_KEY` (default: `603001`)
## Prisma policy
- Источник схемы: `Frontend/prisma/schema.prisma`.
- Источник схемы: `frontend/prisma/schema.prisma`.
- Локальная копия в `backend/prisma/schema.prisma` обновляется только через `scripts/prisma-sync.sh`.
- Миграции/`db push` выполняются только в `Frontend`.
- Миграции/`db push` выполняются только в `frontend`.

View File

@@ -4,6 +4,7 @@ import {
ingestTelegramInbound,
reportTelegramOutbound,
requestTelegramOutbound,
syncCalendarPredueTimeline,
type TelegramInboundEnvelope,
type TelegramOutboundReport,
type TelegramOutboundRequest,
@@ -31,6 +32,20 @@ const schema = buildSchema(`
omniMessageId: String
}
type SchedulerSyncResult {
ok: Boolean!
message: String!
now: String!
scanned: Int!
updated: Int!
skippedBeforeWindow: Int!
skippedLocked: Boolean!
preDueMinutes: Int!
lookbackMinutes: Int!
lookaheadMinutes: Int!
lockKey: Int!
}
input TelegramInboundInput {
version: Int!
idempotencyKey: String!
@@ -65,6 +80,7 @@ const schema = buildSchema(`
ingestTelegramInbound(input: TelegramInboundInput!): MutationResult!
reportTelegramOutbound(input: TelegramOutboundReportInput!): MutationResult!
requestTelegramOutbound(input: TelegramOutboundTaskInput!): MutationResult!
syncCalendarPredueTimeline: SchedulerSyncResult!
}
`);
@@ -172,6 +188,23 @@ const root = {
omniMessageId: null,
};
},
syncCalendarPredueTimeline: async () => {
const result = await syncCalendarPredueTimeline();
return {
ok: result.ok,
message: result.message,
now: result.now,
scanned: result.scanned,
updated: result.updated,
skippedBeforeWindow: result.skippedBeforeWindow,
skippedLocked: result.skippedLocked,
preDueMinutes: result.preDueMinutes,
lookbackMinutes: result.lookbackMinutes,
lookaheadMinutes: result.lookaheadMinutes,
lockKey: result.lockKey,
};
},
};
export function startServer() {

View File

@@ -38,6 +38,20 @@ export type TelegramOutboundRequest = {
businessConnectionId?: string | null;
};
export type CalendarPredueSyncResult = {
ok: boolean;
message: string;
now: string;
scanned: number;
updated: number;
skippedBeforeWindow: number;
skippedLocked: boolean;
preDueMinutes: number;
lookbackMinutes: number;
lookaheadMinutes: number;
lockKey: number;
};
function asString(value: unknown) {
if (typeof value !== "string") return null;
const v = value.trim();
@@ -54,6 +68,13 @@ function normalizeDirection(value: string): MessageDirection {
return value === "OUT" ? "OUT" : "IN";
}
function readIntEnv(name: string, defaultValue: number) {
const raw = asString(process.env[name]);
if (!raw) return defaultValue;
const parsed = Number.parseInt(raw, 10);
return Number.isFinite(parsed) ? parsed : defaultValue;
}
async function resolveTeamId(envelope: TelegramInboundEnvelope) {
const n = envelope.payloadNormalized;
const bcId = asString(n.businessConnectionId);
@@ -510,3 +531,107 @@ export async function requestTelegramOutbound(input: TelegramOutboundRequest) {
return { ok: true, message: "outbound_enqueued", runId: result.runId ?? null };
}
export async function syncCalendarPredueTimeline(): Promise<CalendarPredueSyncResult> {
const preDueMinutes = Math.max(1, readIntEnv("TIMELINE_EVENT_PREDUE_MINUTES", 30));
const lookbackMinutes = Math.max(preDueMinutes, readIntEnv("TIMELINE_EVENT_LOOKBACK_MINUTES", 180));
const lookaheadMinutes = Math.max(preDueMinutes, readIntEnv("TIMELINE_EVENT_LOOKAHEAD_MINUTES", 1440));
const lockKey = readIntEnv("TIMELINE_SCHEDULER_LOCK_KEY", 603001);
const now = new Date();
const rangeStart = new Date(now.getTime() - lookbackMinutes * 60_000);
const rangeEnd = new Date(now.getTime() + lookaheadMinutes * 60_000);
const lockRows = await prisma.$queryRaw<Array<{ locked: boolean }>>`
SELECT pg_try_advisory_lock(${lockKey}) AS locked
`;
const locked = Boolean(lockRows?.[0]?.locked);
if (!locked) {
return {
ok: true,
message: "lock_busy_skip",
now: now.toISOString(),
scanned: 0,
updated: 0,
skippedBeforeWindow: 0,
skippedLocked: true,
preDueMinutes,
lookbackMinutes,
lookaheadMinutes,
lockKey,
};
}
try {
const events = await prisma.calendarEvent.findMany({
where: {
isArchived: false,
contactId: { not: null },
startsAt: {
gte: rangeStart,
lte: rangeEnd,
},
},
orderBy: { startsAt: "asc" },
select: {
id: true,
teamId: true,
contactId: true,
startsAt: true,
},
});
let updated = 0;
let skippedBeforeWindow = 0;
for (const event of events) {
if (!event.contactId) continue;
const preDueAt = new Date(event.startsAt.getTime() - preDueMinutes * 60_000);
if (now < preDueAt) {
skippedBeforeWindow += 1;
continue;
}
await prisma.clientTimelineEntry.upsert({
where: {
teamId_contentType_contentId: {
teamId: event.teamId,
contentType: "CALENDAR_EVENT",
contentId: event.id,
},
},
create: {
teamId: event.teamId,
contactId: event.contactId,
contentType: "CALENDAR_EVENT",
contentId: event.id,
datetime: preDueAt,
},
update: {
contactId: event.contactId,
datetime: preDueAt,
},
});
updated += 1;
}
return {
ok: true,
message: "calendar_predue_synced",
now: now.toISOString(),
scanned: events.length,
updated,
skippedBeforeWindow,
skippedLocked: false,
preDueMinutes,
lookbackMinutes,
lookaheadMinutes,
lockKey,
};
} finally {
await prisma.$queryRaw`SELECT pg_advisory_unlock(${lockKey})`;
}
}

13
backend_worker/Dockerfile Normal file
View File

@@ -0,0 +1,13 @@
FROM node:22-alpine
WORKDIR /app
COPY package*.json ./
RUN npm ci
COPY src ./src
COPY tsconfig.json ./tsconfig.json
ENV NODE_ENV=production
CMD ["npm", "run", "start"]

24
backend_worker/README.md Normal file
View File

@@ -0,0 +1,24 @@
# backend_worker
Hatchet worker для периодических backend-задач.
## Назначение
- запускает cron workflow `backend-calendar-timeline-scheduler`;
- вызывает `backend` GraphQL mutation `syncCalendarPredueTimeline`;
- заменяет legacy `schedulers/` сервис для предзаписи календарных событий в `ClientTimelineEntry`.
## Переменные окружения
- `BACKEND_GRAPHQL_URL` (required)
- `BACKEND_GRAPHQL_SHARED_SECRET` (optional)
- `BACKEND_TIMELINE_SYNC_CRON` (default: `* * * * *`)
- `HATCHET_CLIENT_TOKEN` (required)
- `HATCHET_CLIENT_TLS_STRATEGY` (optional, например `none` для self-host без TLS)
- `HATCHET_CLIENT_HOST_PORT` (optional, например `hatchet-engine:7070`)
- `HATCHET_CLIENT_API_URL` (optional)
## Скрипты
- `npm run start` — запуск Hatchet worker.
- `npm run typecheck` — проверка TypeScript.

1456
backend_worker/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,17 @@
{
"name": "crm-backend-worker",
"private": true,
"type": "module",
"scripts": {
"start": "tsx src/hatchet/worker.ts",
"typecheck": "tsc --noEmit"
},
"dependencies": {
"@hatchet-dev/typescript-sdk": "^1.15.2"
},
"devDependencies": {
"@types/node": "^22.13.9",
"tsx": "^4.20.5",
"typescript": "^5.9.2"
}
}

View File

@@ -0,0 +1,3 @@
import { HatchetClient } from "@hatchet-dev/typescript-sdk/v1";
export const hatchet = HatchetClient.init();

View File

@@ -0,0 +1,22 @@
import { hatchet } from "./client";
import { backendCalendarTimelineScheduler } from "./workflow";
import path from "node:path";
import { fileURLToPath } from "node:url";
async function main() {
const worker = await hatchet.worker("backend-worker", {
workflows: [backendCalendarTimelineScheduler],
});
await worker.start();
}
const isMain = process.argv[1] && path.resolve(process.argv[1]) === fileURLToPath(import.meta.url);
if (isMain) {
main().catch((error) => {
const message = error instanceof Error ? error.stack || error.message : String(error);
console.error(`[backend_worker/hatchet] worker failed: ${message}`);
process.exitCode = 1;
});
}

View File

@@ -0,0 +1,119 @@
import { hatchet } from "./client";
type SyncCalendarPredueResult = {
syncCalendarPredueTimeline: {
ok: boolean;
message: string;
now: string;
scanned: number;
updated: number;
skippedBeforeWindow: number;
skippedLocked: boolean;
preDueMinutes: number;
lookbackMinutes: number;
lookaheadMinutes: number;
lockKey: number;
};
};
type GraphqlResponse<T> = {
data?: T;
errors?: Array<{ message?: string }>;
};
function asString(value: unknown) {
if (typeof value !== "string") return null;
const v = value.trim();
return v || null;
}
function requiredEnv(name: string) {
const value = asString(process.env[name]);
if (!value) {
throw new Error(`${name} is required`);
}
return value;
}
async function callBackendSyncMutation() {
const url = requiredEnv("BACKEND_GRAPHQL_URL");
const secret = asString(process.env.BACKEND_GRAPHQL_SHARED_SECRET);
const headers: Record<string, string> = {
"content-type": "application/json",
};
if (secret) {
headers["x-graphql-secret"] = secret;
}
const query = `mutation SyncCalendarPredueTimeline {
syncCalendarPredueTimeline {
ok
message
now
scanned
updated
skippedBeforeWindow
skippedLocked
preDueMinutes
lookbackMinutes
lookaheadMinutes
lockKey
}
}`;
const response = await fetch(url, {
method: "POST",
headers,
body: JSON.stringify({
operationName: "SyncCalendarPredueTimeline",
query,
variables: {},
}),
});
const payload = (await response.json()) as GraphqlResponse<SyncCalendarPredueResult>;
if (!response.ok || payload.errors?.length) {
const message = payload.errors?.map((error) => error.message).filter(Boolean).join("; ") || `HTTP ${response.status}`;
throw new Error(message);
}
const result = payload.data?.syncCalendarPredueTimeline;
if (!result?.ok) {
throw new Error(result?.message || "syncCalendarPredueTimeline failed");
}
return result;
}
const BACKEND_WORKER_CRON = asString(process.env.BACKEND_TIMELINE_SYNC_CRON) || "* * * * *";
export const backendCalendarTimelineScheduler = hatchet.workflow({
name: "backend-calendar-timeline-scheduler",
on: {
cron: BACKEND_WORKER_CRON,
},
});
backendCalendarTimelineScheduler.task({
name: "sync-calendar-predue-timeline-in-backend",
retries: 6,
backoff: {
factor: 2,
maxSeconds: 60,
},
fn: async (_, ctx) => {
const result = await callBackendSyncMutation();
await ctx.logger.info("backend timeline predue sync completed", {
scanned: result.scanned,
updated: result.updated,
skippedBeforeWindow: result.skippedBeforeWindow,
skippedLocked: result.skippedLocked,
now: result.now,
});
return result;
},
});

View File

@@ -0,0 +1,14 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "ESNext",
"moduleResolution": "Bundler",
"strict": true,
"skipLibCheck": true,
"esModuleInterop": true,
"types": ["node"],
"resolveJsonModule": true,
"verbatimModuleSyntax": true
},
"include": ["src/**/*.ts"]
}

View File

@@ -3,6 +3,7 @@ version = 1
[services]
frontend = { deploy_mode = "dokploy_webhook", env_storage = "dokploy_ui" }
backend = { deploy_mode = "dokploy_webhook", env_storage = "dokploy_ui" }
backend_worker = { deploy_mode = "dokploy_webhook", env_storage = "dokploy_ui" }
telegram_backend = { deploy_mode = "dokploy_webhook", env_storage = "dokploy_ui" }
telegram_worker = { deploy_mode = "dokploy_webhook", env_storage = "dokploy_ui" }
hatchet = { deploy_mode = "dokploy_webhook", env_storage = "dokploy_ui", compose_path = "hatchet/docker-compose.yml" }

View File

@@ -5,10 +5,11 @@
## Контекст
Нужна минимальная и предсказуемая схема из 5 сервисов:
Нужна минимальная и предсказуемая схема из 6 сервисов:
- `frontend`
- `backend`
- `backend_worker`
- `telegram_backend`
- `telegram_worker`
- `hatchet`
@@ -42,7 +43,11 @@
- для outbound вызывает `telegram_backend /graphql` (`sendTelegramMessage`), затем `backend /graphql` (`reportTelegramOutbound`);
- не имеет собственной Prisma-базы.
4. `hatchet`
4. `backend_worker`
- исполняет периодические backend workflow в Hatchet;
- для cron-задач вызывает `backend /graphql` (без прямого доступа к Prisma).
5. `hatchet`
- единый оркестратор задач, ретраев и backoff-политик.
## Потоки
@@ -61,6 +66,12 @@
3. `telegram_worker` вызывает `telegram_backend.sendTelegramMessage`.
4. `telegram_worker` репортит итог в `backend.reportTelegramOutbound`.
### Calendar Predue (Backend cron)
1. Hatchet по cron запускает workflow в `backend_worker`.
2. `backend_worker` вызывает `backend.syncCalendarPredueTimeline`.
3. `backend` делает upsert `ClientTimelineEntry` для `CalendarEvent` в окне `startsAt - preDue`.
## Границы ответственности
`backend`:
@@ -75,19 +86,24 @@
- можно: исполнение задач, ретраи, orchestration шагов;
- нельзя: хранение CRM-состояния и прямой доступ к основной БД.
`backend_worker`:
- можно: периодические orchestration задачи через Hatchet;
- нельзя: прямой доступ к основной БД (только через backend GraphQL).
## Надежность
- webhook отвечает `200` только после успешной постановки задачи в Hatchet;
- при недоступности сервисов задача ретраится Hatchet;
- inbound обработка идемпотентна через `idempotencyKey` и provider identifiers в `backend`.
- календарный sync использует advisory-lock в `backend`, поэтому параллельные cron-run безопасны.
## Последствия
Плюсы:
- меньше сервисов и меньше скрытых связей;
- меньше скрытых связей;
- изоляция доменной БД в `backend`;
- единая точка ретраев/оркестрации (Hatchet).
Минусы:
- выше требования к стабильности GraphQL-контрактов между сервисами;
- нужна наблюдаемость по цепочке `telegram_backend -> hatchet -> telegram_worker -> backend`.
- нужна наблюдаемость по цепочкам `telegram_backend -> hatchet -> telegram_worker -> backend` и `hatchet -> backend_worker -> backend`.

View File

@@ -2,18 +2,18 @@
## Single source of truth
- Canonical Prisma schema: `Frontend/prisma/schema.prisma`.
- Canonical Prisma schema: `frontend/prisma/schema.prisma`.
- Service copy:
- `backend/prisma/schema.prisma`
## Update flow
1. Edit only `Frontend/prisma/schema.prisma`.
1. Edit only `frontend/prisma/schema.prisma`.
2. Run `./scripts/prisma-sync.sh`.
3. Run `./scripts/prisma-check.sh`.
4. Commit changed schema copy.
## Rollout policy
- Schema rollout (`prisma db push` / migrations) is allowed only in `Frontend`.
- Schema rollout (`prisma db push` / migrations) is allowed only in `frontend`.
- `backend` must use generated Prisma client only.

View File

@@ -1,12 +0,0 @@
FROM node:22-bookworm-slim
WORKDIR /app/scheduler
COPY package*.json ./
RUN npm ci --omit=dev
COPY run.mjs ./run.mjs
ENV NODE_ENV=production
CMD ["npm", "run", "start"]

View File

@@ -1,38 +0,0 @@
# Client Timeline Calendar Scheduler
Изолированный scheduler-сервис для актуализации `ClientTimelineEntry.datetime` за 30 минут до календарного события.
## Что делает
- Берет advisory lock в PostgreSQL, чтобы не было гонок между инстансами.
- Находит активные `CalendarEvent` с `contactId` в рабочем окне времени.
- Когда событие вошло в окно `start - 30m`, делает upsert в `ClientTimelineEntry`:
- `contentType = CALENDAR_EVENT`
- `datetime = startsAt - TIMELINE_EVENT_PREDUE_MINUTES`
## ENV
- `DATABASE_URL` (обязательно)
- `TIMELINE_EVENT_PREDUE_MINUTES` (default `30`)
- `TIMELINE_EVENT_LOOKBACK_MINUTES` (default `180`)
- `TIMELINE_EVENT_LOOKAHEAD_MINUTES` (default `1440`)
- `TIMELINE_SCHEDULER_LOCK_KEY` (default `603001`)
## Локально
```bash
cd schedulers
npm install
npm run start
```
## Docker
```bash
docker build -t client-timeline-calendar-scheduler .
docker run --rm -e DATABASE_URL="$DATABASE_URL" client-timeline-calendar-scheduler
```
## Dokploy
Сделай отдельный app/job и поставь schedule `* * * * *`.

View File

@@ -1,160 +0,0 @@
{
"name": "client-timeline-calendar-scheduler",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "client-timeline-calendar-scheduler",
"dependencies": {
"pg": "^8.16.3"
}
},
"node_modules/pg": {
"version": "8.18.0",
"resolved": "https://registry.npmjs.org/pg/-/pg-8.18.0.tgz",
"integrity": "sha512-xqrUDL1b9MbkydY/s+VZ6v+xiMUmOUk7SS9d/1kpyQxoJ6U9AO1oIJyUWVZojbfe5Cc/oluutcgFG4L9RDP1iQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"pg-connection-string": "^2.11.0",
"pg-pool": "^3.11.0",
"pg-protocol": "^1.11.0",
"pg-types": "2.2.0",
"pgpass": "1.0.5"
},
"engines": {
"node": ">= 16.0.0"
},
"optionalDependencies": {
"pg-cloudflare": "^1.3.0"
},
"peerDependencies": {
"pg-native": ">=3.0.1"
},
"peerDependenciesMeta": {
"pg-native": {
"optional": true
}
}
},
"node_modules/pg-cloudflare": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.3.0.tgz",
"integrity": "sha512-6lswVVSztmHiRtD6I8hw4qP/nDm1EJbKMRhf3HCYaqud7frGysPv7FYJ5noZQdhQtN2xJnimfMtvQq21pdbzyQ==",
"license": "MIT",
"optional": true
},
"node_modules/pg-connection-string": {
"version": "2.11.0",
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.11.0.tgz",
"integrity": "sha512-kecgoJwhOpxYU21rZjULrmrBJ698U2RxXofKVzOn5UDj61BPj/qMb7diYUR1nLScCDbrztQFl1TaQZT0t1EtzQ==",
"license": "MIT"
},
"node_modules/pg-int8": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
"integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==",
"license": "ISC",
"engines": {
"node": ">=4.0.0"
}
},
"node_modules/pg-pool": {
"version": "3.11.0",
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.11.0.tgz",
"integrity": "sha512-MJYfvHwtGp870aeusDh+hg9apvOe2zmpZJpyt+BMtzUWlVqbhFmMK6bOBXLBUPd7iRtIF9fZplDc7KrPN3PN7w==",
"license": "MIT",
"peerDependencies": {
"pg": ">=8.0"
}
},
"node_modules/pg-protocol": {
"version": "1.11.0",
"resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.11.0.tgz",
"integrity": "sha512-pfsxk2M9M3BuGgDOfuy37VNRRX3jmKgMjcvAcWqNDpZSf4cUmv8HSOl5ViRQFsfARFn0KuUQTgLxVMbNq5NW3g==",
"license": "MIT"
},
"node_modules/pg-types": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz",
"integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==",
"license": "MIT",
"dependencies": {
"pg-int8": "1.0.1",
"postgres-array": "~2.0.0",
"postgres-bytea": "~1.0.0",
"postgres-date": "~1.0.4",
"postgres-interval": "^1.1.0"
},
"engines": {
"node": ">=4"
}
},
"node_modules/pgpass": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz",
"integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==",
"license": "MIT",
"dependencies": {
"split2": "^4.1.0"
}
},
"node_modules/postgres-array": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz",
"integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==",
"license": "MIT",
"engines": {
"node": ">=4"
}
},
"node_modules/postgres-bytea": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.1.tgz",
"integrity": "sha512-5+5HqXnsZPE65IJZSMkZtURARZelel2oXUEO8rH83VS/hxH5vv1uHquPg5wZs8yMAfdv971IU+kcPUczi7NVBQ==",
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/postgres-date": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz",
"integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==",
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/postgres-interval": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz",
"integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==",
"license": "MIT",
"dependencies": {
"xtend": "^4.0.0"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/split2": {
"version": "4.2.0",
"resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz",
"integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==",
"license": "ISC",
"engines": {
"node": ">= 10.x"
}
},
"node_modules/xtend": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
"integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==",
"license": "MIT",
"engines": {
"node": ">=0.4"
}
}
}
}

View File

@@ -1,11 +0,0 @@
{
"name": "client-timeline-calendar-scheduler",
"private": true,
"type": "module",
"scripts": {
"start": "node run.mjs"
},
"dependencies": {
"pg": "^8.16.3"
}
}

View File

@@ -1,118 +0,0 @@
import crypto from "node:crypto";
import { Pool } from "pg";
function readIntEnv(name, defaultValue) {
const raw = String(process.env[name] ?? "").trim();
if (!raw) return defaultValue;
const parsed = Number.parseInt(raw, 10);
return Number.isFinite(parsed) ? parsed : defaultValue;
}
function makeTimelineEntryId() {
return `ctle_${Date.now().toString(36)}_${crypto.randomBytes(8).toString("hex")}`;
}
async function run() {
const databaseUrl = String(process.env.DATABASE_URL ?? "").trim();
if (!databaseUrl) {
throw new Error("DATABASE_URL is required");
}
const preDueMinutes = Math.max(1, readIntEnv("TIMELINE_EVENT_PREDUE_MINUTES", 30));
const lookbackMinutes = Math.max(preDueMinutes, readIntEnv("TIMELINE_EVENT_LOOKBACK_MINUTES", 180));
const lookaheadMinutes = Math.max(preDueMinutes, readIntEnv("TIMELINE_EVENT_LOOKAHEAD_MINUTES", 1440));
const lockKey = readIntEnv("TIMELINE_SCHEDULER_LOCK_KEY", 603001);
const now = new Date();
const rangeStart = new Date(now.getTime() - lookbackMinutes * 60_000);
const rangeEnd = new Date(now.getTime() + lookaheadMinutes * 60_000);
const pool = new Pool({ connectionString: databaseUrl });
const client = await pool.connect();
try {
const lockRes = await client.query("SELECT pg_try_advisory_lock($1) AS locked", [lockKey]);
const locked = Boolean(lockRes.rows?.[0]?.locked);
if (!locked) {
console.log(`[timeline-calendar-scheduler] skipped: lock ${lockKey} is busy`);
return;
}
try {
const eventsRes = await client.query(
`
SELECT
"id",
"teamId",
"contactId",
"startsAt"
FROM "CalendarEvent"
WHERE
"isArchived" = FALSE
AND "contactId" IS NOT NULL
AND "startsAt" >= $1
AND "startsAt" <= $2
ORDER BY "startsAt" ASC
`,
[rangeStart.toISOString(), rangeEnd.toISOString()],
);
let touched = 0;
let skippedBeforeWindow = 0;
for (const event of eventsRes.rows) {
const contactId = String(event.contactId ?? "").trim();
const teamId = String(event.teamId ?? "").trim();
const contentId = String(event.id ?? "").trim();
const startsAt = new Date(event.startsAt);
if (!contactId || !teamId || !contentId || Number.isNaN(startsAt.getTime())) continue;
const preDueAt = new Date(startsAt.getTime() - preDueMinutes * 60_000);
if (now < preDueAt) {
skippedBeforeWindow += 1;
continue;
}
await client.query(
`
INSERT INTO "ClientTimelineEntry" (
"id",
"teamId",
"contactId",
"contentType",
"contentId",
"datetime",
"createdAt",
"updatedAt"
)
VALUES ($1, $2, $3, 'CALENDAR_EVENT', $4, $5, NOW(), NOW())
ON CONFLICT ("teamId", "contentType", "contentId")
DO UPDATE SET
"contactId" = EXCLUDED."contactId",
"datetime" = EXCLUDED."datetime",
"updatedAt" = NOW()
`,
[makeTimelineEntryId(), teamId, contactId, contentId, preDueAt.toISOString()],
);
touched += 1;
}
console.log(
`[timeline-calendar-scheduler] done: scanned=${eventsRes.rowCount ?? 0} updated=${touched} skipped_before_window=${skippedBeforeWindow} at=${now.toISOString()}`,
);
} finally {
await client.query("SELECT pg_advisory_unlock($1)", [lockKey]).catch(() => undefined);
}
} finally {
client.release();
await pool.end();
}
}
run().catch((error) => {
const message = error instanceof Error ? error.stack || error.message : String(error);
console.error(`[timeline-calendar-scheduler] failed: ${message}`);
process.exitCode = 1;
});

View File

@@ -2,7 +2,7 @@
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
CANONICAL_SCHEMA="$ROOT_DIR/Frontend/prisma/schema.prisma"
CANONICAL_SCHEMA="$ROOT_DIR/frontend/prisma/schema.prisma"
TARGETS=(
"$ROOT_DIR/backend/prisma/schema.prisma"
)

View File

@@ -2,7 +2,7 @@
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
CANONICAL_SCHEMA="$ROOT_DIR/Frontend/prisma/schema.prisma"
CANONICAL_SCHEMA="$ROOT_DIR/frontend/prisma/schema.prisma"
TARGETS=(
"$ROOT_DIR/backend/prisma/schema.prisma"
)