Compare commits
43 Commits
b510dd54d6
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
430cb1cff6 | ||
|
|
0bbe245e05 | ||
|
|
576a18a0e6 | ||
|
|
190a768d82 | ||
|
|
665aaf0922 | ||
|
|
9384566610 | ||
|
|
56a7734e8e | ||
|
|
e577b41a86 | ||
|
|
294f4077f0 | ||
|
|
52cbed91f8 | ||
|
|
31fc8cbc34 | ||
|
|
4ec6506633 | ||
|
|
e99cbf4882 | ||
|
|
3648366ebe | ||
|
|
eb73c5b1a1 | ||
|
|
f5f261ff89 | ||
|
|
443dc7fa5d | ||
|
|
09324bb25e | ||
|
|
387abf03e4 | ||
|
|
9db56c5edc | ||
|
|
ca01a91019 | ||
|
|
17081e13e4 | ||
|
|
0c19135c49 | ||
|
|
9ff7927463 | ||
|
|
81f86b6538 | ||
|
|
2e7f5e7863 | ||
|
|
64f7e4bdba | ||
|
|
40f7f66f83 | ||
|
|
56df2ab37b | ||
|
|
5363b113cf | ||
|
|
46c87c7caa | ||
|
|
27b05cf362 | ||
|
|
e342d68197 | ||
|
|
3a24f4a9cd | ||
|
|
0106c84daf | ||
|
|
596bdbf1c5 | ||
|
|
07f89ba5fb | ||
|
|
5112f52722 | ||
|
|
339db65514 | ||
|
|
b6f9b2d70b | ||
|
|
a3b0b5ff79 | ||
|
|
6084333704 | ||
|
|
8f1e3be129 |
25
.dockerignore
Normal file
25
.dockerignore
Normal file
@@ -0,0 +1,25 @@
|
||||
.git
|
||||
.gitignore
|
||||
.DS_Store
|
||||
*.log
|
||||
node_modules
|
||||
.pnpm-store
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
__pycache__
|
||||
*.pyc
|
||||
*.pyo
|
||||
.venv
|
||||
venv
|
||||
.pytest_cache
|
||||
.mypy_cache
|
||||
coverage
|
||||
.coverage
|
||||
.nuxt
|
||||
.output
|
||||
dist
|
||||
build
|
||||
@@ -28,4 +28,4 @@ jobs:
|
||||
tags: gitea.dsrptlab.com/optovia/geo/geo:latest
|
||||
|
||||
- name: Deploy to Dokploy
|
||||
run: curl -X POST "https://dokploy.optovia.ru/api/deploy/_9J00xDYcVjW0E8jhI-jj"
|
||||
run: curl -k -X POST "https://dokploy.dsrptlab.com/api/deploy/nNkyHZyGCzk1bgwC0BYhR"
|
||||
|
||||
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
node_modules
|
||||
dist
|
||||
39
Dockerfile
39
Dockerfile
@@ -1,24 +1,31 @@
|
||||
FROM python:3.12-slim
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
NIXPACKS_POETRY_VERSION=2.2.1
|
||||
FROM node:22-alpine AS deps
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends build-essential curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
COPY package.json package-lock.json ./
|
||||
RUN npm ci
|
||||
|
||||
RUN python -m venv --copies /opt/venv
|
||||
ENV VIRTUAL_ENV=/opt/venv
|
||||
ENV PATH="/opt/venv/bin:$PATH"
|
||||
FROM deps AS builder
|
||||
|
||||
COPY . .
|
||||
COPY tsconfig.json ./
|
||||
COPY src ./src
|
||||
RUN npm run build
|
||||
|
||||
RUN pip install --no-cache-dir poetry==$NIXPACKS_POETRY_VERSION \
|
||||
&& poetry install --no-interaction --no-ansi
|
||||
FROM deps AS runtime-deps
|
||||
RUN npm prune --omit=dev
|
||||
|
||||
ENV PORT=8000
|
||||
FROM node:22-alpine AS runtime
|
||||
|
||||
CMD ["sh", "-c", "poetry run python manage.py collectstatic --noinput && poetry run python -m gunicorn geo.wsgi:application --bind 0.0.0.0:${PORT:-8000}"]
|
||||
RUN apk add --no-cache curl jq
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package.json ./
|
||||
COPY --from=runtime-deps /app/node_modules ./node_modules
|
||||
|
||||
COPY --from=builder /app/dist ./dist
|
||||
COPY scripts ./scripts
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["sh", "-c", ". /app/scripts/load-vault-env.sh && node dist/index.js"]
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
"""Geo Django project."""
|
||||
148
geo/settings.py
148
geo/settings.py
@@ -1,148 +0,0 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from dotenv import load_dotenv
|
||||
from infisical_sdk import InfisicalSDKClient
|
||||
import sentry_sdk
|
||||
from sentry_sdk.integrations.django import DjangoIntegration
|
||||
|
||||
load_dotenv()
|
||||
|
||||
INFISICAL_API_URL = os.environ["INFISICAL_API_URL"]
|
||||
INFISICAL_CLIENT_ID = os.environ["INFISICAL_CLIENT_ID"]
|
||||
INFISICAL_CLIENT_SECRET = os.environ["INFISICAL_CLIENT_SECRET"]
|
||||
INFISICAL_PROJECT_ID = os.environ["INFISICAL_PROJECT_ID"]
|
||||
INFISICAL_ENV = os.environ.get("INFISICAL_ENV", "prod")
|
||||
|
||||
client = InfisicalSDKClient(host=INFISICAL_API_URL)
|
||||
client.auth.universal_auth.login(
|
||||
client_id=INFISICAL_CLIENT_ID,
|
||||
client_secret=INFISICAL_CLIENT_SECRET,
|
||||
)
|
||||
|
||||
# Fetch secrets from /geo and /shared
|
||||
for secret_path in ["/geo", "/shared"]:
|
||||
secrets_response = client.secrets.list_secrets(
|
||||
environment_slug=INFISICAL_ENV,
|
||||
secret_path=secret_path,
|
||||
project_id=INFISICAL_PROJECT_ID,
|
||||
expand_secret_references=True,
|
||||
view_secret_value=True,
|
||||
)
|
||||
for secret in secrets_response.secrets:
|
||||
os.environ[secret.secretKey] = secret.secretValue
|
||||
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
|
||||
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', 'dev-secret-key-change-in-production')
|
||||
|
||||
DEBUG = os.getenv('DEBUG', 'False') == 'True'
|
||||
|
||||
# Sentry/GlitchTip configuration
|
||||
SENTRY_DSN = os.getenv('SENTRY_DSN', '')
|
||||
if SENTRY_DSN:
|
||||
sentry_sdk.init(
|
||||
dsn=SENTRY_DSN,
|
||||
integrations=[DjangoIntegration()],
|
||||
auto_session_tracking=False,
|
||||
traces_sample_rate=0.01,
|
||||
release=os.getenv('RELEASE_VERSION', '1.0.0'),
|
||||
environment=os.getenv('ENVIRONMENT', 'production'),
|
||||
send_default_pii=False,
|
||||
debug=DEBUG,
|
||||
)
|
||||
|
||||
ALLOWED_HOSTS = ['*']
|
||||
|
||||
CSRF_TRUSTED_ORIGINS = ['https://geo.optovia.ru']
|
||||
|
||||
INSTALLED_APPS = [
|
||||
'whitenoise.runserver_nostatic',
|
||||
'django.contrib.contenttypes',
|
||||
'django.contrib.staticfiles',
|
||||
'corsheaders',
|
||||
'graphene_django',
|
||||
'geo_app',
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
'corsheaders.middleware.CorsMiddleware',
|
||||
'django.middleware.security.SecurityMiddleware',
|
||||
'whitenoise.middleware.WhiteNoiseMiddleware',
|
||||
'django.middleware.common.CommonMiddleware',
|
||||
]
|
||||
|
||||
ROOT_URLCONF = 'geo.urls'
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [],
|
||||
'APP_DIRS': True,
|
||||
'OPTIONS': {
|
||||
'context_processors': [
|
||||
'django.template.context_processors.debug',
|
||||
'django.template.context_processors.request',
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
WSGI_APPLICATION = 'geo.wsgi.application'
|
||||
|
||||
# No database - we use ArangoDB directly
|
||||
DATABASES = {}
|
||||
|
||||
# Internationalization
|
||||
LANGUAGE_CODE = 'ru-ru'
|
||||
TIME_ZONE = 'UTC'
|
||||
USE_I18N = True
|
||||
USE_TZ = True
|
||||
|
||||
# Static files
|
||||
STATIC_URL = '/static/'
|
||||
STATIC_ROOT = BASE_DIR / 'staticfiles'
|
||||
|
||||
# Default primary key field type
|
||||
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
|
||||
|
||||
# CORS
|
||||
CORS_ALLOW_ALL_ORIGINS = False
|
||||
CORS_ALLOWED_ORIGINS = ['https://optovia.ru']
|
||||
CORS_ALLOW_CREDENTIALS = True
|
||||
|
||||
# GraphQL
|
||||
GRAPHENE = {
|
||||
'SCHEMA': 'geo_app.schema.schema',
|
||||
}
|
||||
|
||||
# ArangoDB connection (internal M2M)
|
||||
ARANGODB_INTERNAL_URL = os.getenv('ARANGODB_INTERNAL_URL', 'localhost:8529')
|
||||
ARANGODB_DATABASE = os.getenv('ARANGODB_DATABASE', 'optovia_maps')
|
||||
ARANGODB_PASSWORD = os.getenv('ARANGODB_PASSWORD', '')
|
||||
|
||||
# Routing services (external APIs)
|
||||
GRAPHHOPPER_EXTERNAL_URL = os.getenv('GRAPHHOPPER_EXTERNAL_URL', 'https://graphhopper.optovia.ru')
|
||||
OPENRAILROUTING_EXTERNAL_URL = os.getenv('OPENRAILROUTING_EXTERNAL_URL', 'https://openrailrouting.optovia.ru')
|
||||
|
||||
# Logging
|
||||
LOGGING = {
|
||||
'version': 1,
|
||||
'disable_existing_loggers': False,
|
||||
'handlers': {
|
||||
'console': {
|
||||
'class': 'logging.StreamHandler',
|
||||
},
|
||||
},
|
||||
'loggers': {
|
||||
'django.request': {
|
||||
'handlers': ['console'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': False,
|
||||
},
|
||||
'geo_app': {
|
||||
'handlers': ['console'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': False,
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -1,125 +0,0 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from dotenv import load_dotenv
|
||||
import sentry_sdk
|
||||
from sentry_sdk.integrations.django import DjangoIntegration
|
||||
|
||||
load_dotenv()
|
||||
|
||||
|
||||
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
|
||||
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', 'dev-secret-key-change-in-production')
|
||||
|
||||
DEBUG = True
|
||||
|
||||
# Sentry/GlitchTip configuration
|
||||
SENTRY_DSN = os.getenv('SENTRY_DSN', '')
|
||||
if SENTRY_DSN:
|
||||
sentry_sdk.init(
|
||||
dsn=SENTRY_DSN,
|
||||
integrations=[DjangoIntegration()],
|
||||
auto_session_tracking=False,
|
||||
traces_sample_rate=0.01,
|
||||
release=os.getenv('RELEASE_VERSION', '1.0.0'),
|
||||
environment=os.getenv('ENVIRONMENT', 'production'),
|
||||
send_default_pii=False,
|
||||
debug=DEBUG,
|
||||
)
|
||||
|
||||
ALLOWED_HOSTS = ['*']
|
||||
|
||||
CSRF_TRUSTED_ORIGINS = ['https://geo.optovia.ru']
|
||||
|
||||
INSTALLED_APPS = [
|
||||
'whitenoise.runserver_nostatic',
|
||||
'django.contrib.contenttypes',
|
||||
'django.contrib.staticfiles',
|
||||
'corsheaders',
|
||||
'graphene_django',
|
||||
'geo_app',
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
'corsheaders.middleware.CorsMiddleware',
|
||||
'django.middleware.security.SecurityMiddleware',
|
||||
'whitenoise.middleware.WhiteNoiseMiddleware',
|
||||
'django.middleware.common.CommonMiddleware',
|
||||
]
|
||||
|
||||
ROOT_URLCONF = 'geo.urls'
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [],
|
||||
'APP_DIRS': True,
|
||||
'OPTIONS': {
|
||||
'context_processors': [
|
||||
'django.template.context_processors.debug',
|
||||
'django.template.context_processors.request',
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
WSGI_APPLICATION = 'geo.wsgi.application'
|
||||
|
||||
# No database - we use ArangoDB directly
|
||||
DATABASES = {}
|
||||
|
||||
# Internationalization
|
||||
LANGUAGE_CODE = 'ru-ru'
|
||||
TIME_ZONE = 'UTC'
|
||||
USE_I18N = True
|
||||
USE_TZ = True
|
||||
|
||||
# Static files
|
||||
STATIC_URL = '/static/'
|
||||
STATIC_ROOT = BASE_DIR / 'staticfiles'
|
||||
|
||||
# Default primary key field type
|
||||
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
|
||||
|
||||
# CORS
|
||||
CORS_ALLOW_ALL_ORIGINS = False
|
||||
CORS_ALLOWED_ORIGINS = ['http://localhost:3000', 'https://optovia.ru']
|
||||
CORS_ALLOW_CREDENTIALS = True
|
||||
|
||||
# GraphQL
|
||||
GRAPHENE = {
|
||||
'SCHEMA': 'geo_app.schema.schema',
|
||||
}
|
||||
|
||||
# ArangoDB connection (internal M2M)
|
||||
ARANGODB_INTERNAL_URL = os.getenv('ARANGODB_INTERNAL_URL', 'localhost:8529')
|
||||
ARANGODB_DATABASE = os.getenv('ARANGODB_DATABASE', 'optovia_maps')
|
||||
ARANGODB_PASSWORD = os.getenv('ARANGODB_PASSWORD', '')
|
||||
|
||||
# Routing services (external APIs)
|
||||
GRAPHHOPPER_EXTERNAL_URL = os.getenv('GRAPHHOPPER_EXTERNAL_URL', 'https://graphhopper.optovia.ru')
|
||||
OPENRAILROUTING_EXTERNAL_URL = os.getenv('OPENRAILROUTING_EXTERNAL_URL', 'https://openrailrouting.optovia.ru')
|
||||
|
||||
# Logging
|
||||
LOGGING = {
|
||||
'version': 1,
|
||||
'disable_existing_loggers': False,
|
||||
'handlers': {
|
||||
'console': {
|
||||
'class': 'logging.StreamHandler',
|
||||
},
|
||||
},
|
||||
'loggers': {
|
||||
'django.request': {
|
||||
'handlers': ['console'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': False,
|
||||
},
|
||||
'geo_app': {
|
||||
'handlers': ['console'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': False,
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
from django.urls import path
|
||||
from graphene_django.views import GraphQLView
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
|
||||
urlpatterns = [
|
||||
path('graphql/public/', csrf_exempt(GraphQLView.as_view(graphiql=True))),
|
||||
]
|
||||
@@ -1,5 +0,0 @@
|
||||
import os
|
||||
from django.core.wsgi import get_wsgi_application
|
||||
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'geo.settings')
|
||||
application = get_wsgi_application()
|
||||
@@ -1 +0,0 @@
|
||||
"""Geo app - logistics graph operations."""
|
||||
@@ -1,6 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class GeoAppConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'geo_app'
|
||||
@@ -1,49 +0,0 @@
|
||||
"""ArangoDB client singleton."""
|
||||
import logging
|
||||
from arango import ArangoClient
|
||||
from django.conf import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_db = None
|
||||
|
||||
|
||||
def get_db():
|
||||
"""Get ArangoDB database connection (singleton)."""
|
||||
global _db
|
||||
if _db is None:
|
||||
hosts = settings.ARANGODB_INTERNAL_URL
|
||||
if not hosts.startswith("http"):
|
||||
hosts = f"http://{hosts}"
|
||||
|
||||
client = ArangoClient(hosts=hosts)
|
||||
_db = client.db(
|
||||
settings.ARANGODB_DATABASE,
|
||||
username='root',
|
||||
password=settings.ARANGODB_PASSWORD,
|
||||
)
|
||||
logger.info(
|
||||
"Connected to ArangoDB: %s/%s",
|
||||
hosts,
|
||||
settings.ARANGODB_DATABASE,
|
||||
)
|
||||
return _db
|
||||
|
||||
|
||||
def ensure_graph():
|
||||
"""Ensure named graph exists for K_SHORTEST_PATHS queries."""
|
||||
db = get_db()
|
||||
graph_name = 'optovia_graph'
|
||||
|
||||
if db.has_graph(graph_name):
|
||||
return db.graph(graph_name)
|
||||
|
||||
logger.info("Creating graph: %s", graph_name)
|
||||
return db.create_graph(
|
||||
graph_name,
|
||||
edge_definitions=[{
|
||||
'edge_collection': 'edges',
|
||||
'from_vertex_collections': ['nodes'],
|
||||
'to_vertex_collections': ['nodes'],
|
||||
}],
|
||||
)
|
||||
@@ -1,122 +0,0 @@
|
||||
"""
|
||||
Server-side map clustering using Uber H3 hexagonal grid.
|
||||
|
||||
Maps zoom levels to h3 resolutions and groups nodes by cell.
|
||||
"""
|
||||
import logging
|
||||
import threading
|
||||
import h3
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Global cache for nodes
|
||||
_nodes_cache = {}
|
||||
_cache_lock = threading.Lock()
|
||||
|
||||
# Map zoom level to h3 resolution
|
||||
# Higher zoom = higher resolution = smaller cells
|
||||
ZOOM_TO_RES = {
|
||||
0: 0, 1: 0, 2: 1, 3: 1, 4: 2, 5: 2,
|
||||
6: 3, 7: 3, 8: 4, 9: 4, 10: 5, 11: 5,
|
||||
12: 6, 13: 7, 14: 8, 15: 9, 16: 10
|
||||
}
|
||||
|
||||
|
||||
def _fetch_nodes(db, transport_type=None):
|
||||
"""Fetch nodes from database with caching."""
|
||||
cache_key = f"nodes:{transport_type or 'all'}"
|
||||
|
||||
with _cache_lock:
|
||||
if cache_key not in _nodes_cache:
|
||||
aql = """
|
||||
FOR node IN nodes
|
||||
FILTER node.node_type == 'logistics' OR node.node_type == null
|
||||
FILTER node.latitude != null AND node.longitude != null
|
||||
RETURN node
|
||||
"""
|
||||
cursor = db.aql.execute(aql)
|
||||
all_nodes = list(cursor)
|
||||
|
||||
# Filter by transport type if specified
|
||||
if transport_type:
|
||||
all_nodes = [
|
||||
n for n in all_nodes
|
||||
if transport_type in (n.get('transport_types') or [])
|
||||
]
|
||||
|
||||
_nodes_cache[cache_key] = all_nodes
|
||||
logger.info("Cached %d nodes for %s", len(all_nodes), cache_key)
|
||||
|
||||
return _nodes_cache[cache_key]
|
||||
|
||||
|
||||
def get_clustered_nodes(db, west, south, east, north, zoom, transport_type=None):
|
||||
"""
|
||||
Get clustered nodes for given bounding box and zoom level.
|
||||
|
||||
Uses H3 hexagonal grid to group nearby nodes.
|
||||
"""
|
||||
resolution = ZOOM_TO_RES.get(int(zoom), 5)
|
||||
nodes = _fetch_nodes(db, transport_type)
|
||||
|
||||
if not nodes:
|
||||
return []
|
||||
|
||||
# Group nodes by h3 cell
|
||||
cells = {}
|
||||
for node in nodes:
|
||||
lat = node.get('latitude')
|
||||
lng = node.get('longitude')
|
||||
|
||||
# Skip nodes outside bounding box (rough filter)
|
||||
if lat < south or lat > north or lng < west or lng > east:
|
||||
continue
|
||||
|
||||
cell = h3.latlng_to_cell(lat, lng, resolution)
|
||||
if cell not in cells:
|
||||
cells[cell] = []
|
||||
cells[cell].append(node)
|
||||
|
||||
# Build results
|
||||
results = []
|
||||
for cell, nodes_in_cell in cells.items():
|
||||
count = len(nodes_in_cell)
|
||||
|
||||
if count == 1:
|
||||
# Single point — return actual node data
|
||||
node = nodes_in_cell[0]
|
||||
results.append({
|
||||
'id': node.get('_key'),
|
||||
'latitude': node.get('latitude'),
|
||||
'longitude': node.get('longitude'),
|
||||
'count': 1,
|
||||
'expansion_zoom': None,
|
||||
'name': node.get('name'),
|
||||
})
|
||||
else:
|
||||
# Cluster — return cell centroid
|
||||
lat, lng = h3.cell_to_latlng(cell)
|
||||
results.append({
|
||||
'id': f"cluster-{cell}",
|
||||
'latitude': lat,
|
||||
'longitude': lng,
|
||||
'count': count,
|
||||
'expansion_zoom': min(zoom + 2, 16),
|
||||
'name': None,
|
||||
})
|
||||
|
||||
logger.info("Returning %d clusters/points for zoom=%d res=%d", len(results), zoom, resolution)
|
||||
return results
|
||||
|
||||
|
||||
def invalidate_cache(transport_type=None):
|
||||
"""Invalidate node cache after data changes."""
|
||||
with _cache_lock:
|
||||
if transport_type:
|
||||
cache_key = f"nodes:{transport_type}"
|
||||
if cache_key in _nodes_cache:
|
||||
del _nodes_cache[cache_key]
|
||||
else:
|
||||
_nodes_cache.clear()
|
||||
|
||||
logger.info("Cluster cache invalidated")
|
||||
200
geo_app/route_engine.py
Normal file
200
geo_app/route_engine.py
Normal file
@@ -0,0 +1,200 @@
|
||||
"""Unified graph routing helpers."""
|
||||
import heapq
|
||||
|
||||
from .arango_client import ensure_graph
|
||||
|
||||
|
||||
def _allowed_next_phase(current_phase, transport_type):
|
||||
"""
|
||||
Phase-based routing: auto → rail* → auto.
|
||||
- end_auto: allow one auto, rail, or offer
|
||||
- end_auto_done: auto used — rail or offer
|
||||
- rail: any number of rail, then one auto or offer
|
||||
- start_auto_done: auto used — only offer
|
||||
"""
|
||||
if current_phase == 'end_auto':
|
||||
if transport_type == 'offer':
|
||||
return 'offer'
|
||||
if transport_type == 'auto':
|
||||
return 'end_auto_done'
|
||||
if transport_type == 'rail':
|
||||
return 'rail'
|
||||
return None
|
||||
if current_phase == 'end_auto_done':
|
||||
if transport_type == 'offer':
|
||||
return 'offer'
|
||||
if transport_type == 'rail':
|
||||
return 'rail'
|
||||
return None
|
||||
if current_phase == 'rail':
|
||||
if transport_type == 'offer':
|
||||
return 'offer'
|
||||
if transport_type == 'rail':
|
||||
return 'rail'
|
||||
if transport_type == 'auto':
|
||||
return 'start_auto_done'
|
||||
return None
|
||||
if current_phase == 'start_auto_done':
|
||||
if transport_type == 'offer':
|
||||
return 'offer'
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
def _allowed_types_for_phase(phase):
|
||||
if phase == 'end_auto':
|
||||
return ['auto', 'rail', 'offer']
|
||||
if phase == 'end_auto_done':
|
||||
return ['rail', 'offer']
|
||||
if phase == 'rail':
|
||||
return ['rail', 'auto', 'offer']
|
||||
if phase == 'start_auto_done':
|
||||
return ['offer']
|
||||
return ['offer']
|
||||
|
||||
|
||||
def _fetch_neighbors(db, node_key, allowed_types):
|
||||
aql = """
|
||||
FOR edge IN edges
|
||||
FILTER edge.transport_type IN @types
|
||||
FILTER edge._from == @node_id OR edge._to == @node_id
|
||||
LET neighbor_id = edge._from == @node_id ? edge._to : edge._from
|
||||
LET neighbor = DOCUMENT(neighbor_id)
|
||||
FILTER neighbor != null
|
||||
RETURN {
|
||||
neighbor_key: neighbor._key,
|
||||
neighbor_doc: neighbor,
|
||||
from_id: edge._from,
|
||||
to_id: edge._to,
|
||||
transport_type: edge.transport_type,
|
||||
distance_km: edge.distance_km,
|
||||
travel_time_seconds: edge.travel_time_seconds
|
||||
}
|
||||
"""
|
||||
cursor = db.aql.execute(
|
||||
aql,
|
||||
bind_vars={'node_id': f"nodes/{node_key}", 'types': allowed_types},
|
||||
)
|
||||
return list(cursor)
|
||||
|
||||
|
||||
def graph_find_targets(db, start_uuid, target_predicate, route_builder, limit=10, max_expansions=20000):
|
||||
"""Unified graph traversal: auto → rail* → auto, returns routes for target nodes."""
|
||||
ensure_graph()
|
||||
|
||||
nodes_col = db.collection('nodes')
|
||||
start = nodes_col.get(start_uuid)
|
||||
if not start:
|
||||
return []
|
||||
|
||||
queue = []
|
||||
counter = 0
|
||||
heapq.heappush(queue, (0, counter, start_uuid, 'end_auto'))
|
||||
|
||||
visited = {}
|
||||
predecessors = {}
|
||||
node_docs = {start_uuid: start}
|
||||
found = []
|
||||
expansions = 0
|
||||
|
||||
while queue and len(found) < limit and expansions < max_expansions:
|
||||
cost, _, node_key, phase = heapq.heappop(queue)
|
||||
|
||||
if (node_key, phase) in visited and cost > visited[(node_key, phase)]:
|
||||
continue
|
||||
visited[(node_key, phase)] = cost
|
||||
|
||||
node_doc = node_docs.get(node_key)
|
||||
if node_doc and target_predicate(node_doc):
|
||||
path_edges = []
|
||||
state = (node_key, phase)
|
||||
current_key = node_key
|
||||
while state in predecessors:
|
||||
prev_state, edge_info = predecessors[state]
|
||||
prev_key = prev_state[0]
|
||||
path_edges.append((current_key, prev_key, edge_info))
|
||||
state = prev_state
|
||||
current_key = prev_key
|
||||
|
||||
route = route_builder(path_edges, node_docs) if route_builder else None
|
||||
distance_km = route.total_distance_km if route else None
|
||||
|
||||
found.append({
|
||||
'node': node_doc,
|
||||
'route': route,
|
||||
'distance_km': distance_km,
|
||||
'cost': cost,
|
||||
})
|
||||
continue
|
||||
|
||||
neighbors = _fetch_neighbors(db, node_key, _allowed_types_for_phase(phase))
|
||||
expansions += 1
|
||||
|
||||
for neighbor in neighbors:
|
||||
transport_type = neighbor.get('transport_type')
|
||||
next_phase = _allowed_next_phase(phase, transport_type)
|
||||
if next_phase is None:
|
||||
continue
|
||||
|
||||
travel_time = neighbor.get('travel_time_seconds')
|
||||
distance_km = neighbor.get('distance_km')
|
||||
neighbor_key = neighbor.get('neighbor_key')
|
||||
if not neighbor_key:
|
||||
continue
|
||||
|
||||
node_docs[neighbor_key] = neighbor.get('neighbor_doc')
|
||||
step_cost = travel_time if travel_time is not None else (distance_km or 0)
|
||||
new_cost = cost + step_cost
|
||||
|
||||
state_key = (neighbor_key, next_phase)
|
||||
if state_key in visited and new_cost >= visited[state_key]:
|
||||
continue
|
||||
|
||||
counter += 1
|
||||
heapq.heappush(queue, (new_cost, counter, neighbor_key, next_phase))
|
||||
predecessors[state_key] = ((node_key, phase), neighbor)
|
||||
|
||||
return found
|
||||
|
||||
|
||||
def snap_to_nearest_hub(db, lat, lon):
|
||||
aql = """
|
||||
FOR hub IN nodes
|
||||
FILTER hub.node_type == 'logistics' OR hub.node_type == null
|
||||
FILTER hub.product_uuid == null
|
||||
LET types = hub.transport_types != null ? hub.transport_types : []
|
||||
FILTER ('rail' IN types) OR ('sea' IN types)
|
||||
FILTER hub.latitude != null AND hub.longitude != null
|
||||
LET dist = DISTANCE(hub.latitude, hub.longitude, @lat, @lon) / 1000
|
||||
SORT dist ASC
|
||||
LIMIT 1
|
||||
RETURN hub
|
||||
"""
|
||||
cursor = db.aql.execute(aql, bind_vars={'lat': lat, 'lon': lon})
|
||||
hubs = list(cursor)
|
||||
return hubs[0] if hubs else None
|
||||
|
||||
|
||||
def resolve_start_hub(db, source_uuid=None, lat=None, lon=None):
|
||||
nodes_col = db.collection('nodes')
|
||||
|
||||
if source_uuid:
|
||||
node = nodes_col.get(source_uuid)
|
||||
if not node:
|
||||
return None
|
||||
|
||||
if node.get('node_type') in ('logistics', None):
|
||||
types = node.get('transport_types') or []
|
||||
if ('rail' in types) or ('sea' in types):
|
||||
return node
|
||||
|
||||
node_lat = node.get('latitude')
|
||||
node_lon = node.get('longitude')
|
||||
if node_lat is None or node_lon is None:
|
||||
return None
|
||||
return snap_to_nearest_hub(db, node_lat, node_lon)
|
||||
|
||||
if lat is None or lon is None:
|
||||
return None
|
||||
|
||||
return snap_to_nearest_hub(db, lat, lon)
|
||||
1068
geo_app/schema.py
1068
geo_app/schema.py
File diff suppressed because it is too large
Load Diff
17
manage.py
17
manage.py
@@ -1,17 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
"""Django's command-line utility for administrative tasks."""
|
||||
import os
|
||||
import sys
|
||||
|
||||
if __name__ == '__main__':
|
||||
"""Run administrative tasks."""
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'geo.settings')
|
||||
try:
|
||||
from django.core.management import execute_from_command_line
|
||||
except ImportError as exc:
|
||||
raise ImportError(
|
||||
"Couldn't import Django. Are you sure it's installed and "
|
||||
"available on your PYTHONPATH environment variable? Did you "
|
||||
"forget to activate a virtual environment?"
|
||||
) from exc
|
||||
execute_from_command_line(sys.argv)
|
||||
@@ -1,18 +0,0 @@
|
||||
providers = ["python"]
|
||||
|
||||
[build]
|
||||
|
||||
[phases.install]
|
||||
cmds = [
|
||||
"python -m venv --copies /opt/venv",
|
||||
". /opt/venv/bin/activate",
|
||||
"pip install poetry==$NIXPACKS_POETRY_VERSION",
|
||||
"poetry install --no-interaction --no-ansi"
|
||||
]
|
||||
|
||||
[start]
|
||||
cmd = "poetry run python manage.py collectstatic --noinput && poetry run python -m gunicorn geo.wsgi:application --bind 0.0.0.0:${PORT:-8000}"
|
||||
|
||||
[variables]
|
||||
# Set Poetry version to match local environment
|
||||
NIXPACKS_POETRY_VERSION = "2.2.1"
|
||||
3732
package-lock.json
generated
Normal file
3732
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
25
package.json
Normal file
25
package.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "geo",
|
||||
"version": "1.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"start": "node dist/index.js",
|
||||
"dev": "tsx --watch src/index.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@apollo/server": "^4.11.3",
|
||||
"@sentry/node": "^9.5.0",
|
||||
"arangojs": "^9.2.0",
|
||||
"cors": "^2.8.5",
|
||||
"express": "^5.0.1",
|
||||
"h3-js": "^4.2.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/cors": "^2.8.17",
|
||||
"@types/express": "^5.0.0",
|
||||
"@types/node": "^22.13.0",
|
||||
"tsx": "^4.19.3",
|
||||
"typescript": "^5.7.3"
|
||||
}
|
||||
}
|
||||
@@ -1,27 +0,0 @@
|
||||
[project]
|
||||
name = "geo"
|
||||
version = "0.1.0"
|
||||
description = "Geo service - logistics graph and routing"
|
||||
authors = [
|
||||
{name = "Ruslan Bakiev",email = "572431+veikab@users.noreply.github.com"}
|
||||
]
|
||||
requires-python = "^3.11"
|
||||
dependencies = [
|
||||
"django (>=5.2.8,<6.0)",
|
||||
"graphene-django (>=3.2.3,<4.0.0)",
|
||||
"django-cors-headers (>=4.9.0,<5.0.0)",
|
||||
"python-arango (>=8.0.0,<9.0.0)",
|
||||
"python-dotenv (>=1.2.1,<2.0.0)",
|
||||
"infisicalsdk (>=1.0.12,<2.0.0)",
|
||||
"gunicorn (>=23.0.0,<24.0.0)",
|
||||
"whitenoise (>=6.7.0,<7.0.0)",
|
||||
"sentry-sdk (>=2.47.0,<3.0.0)",
|
||||
"h3 (>=4.0.0,<5.0.0)"
|
||||
]
|
||||
|
||||
[tool.poetry]
|
||||
package-mode = false
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=2.0.0,<3.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
44
scripts/load-secrets.mjs
Normal file
44
scripts/load-secrets.mjs
Normal file
@@ -0,0 +1,44 @@
|
||||
import { InfisicalSDK } from "@infisical/sdk";
|
||||
import { writeFileSync } from "fs";
|
||||
|
||||
const INFISICAL_API_URL = process.env.INFISICAL_API_URL;
|
||||
const INFISICAL_CLIENT_ID = process.env.INFISICAL_CLIENT_ID;
|
||||
const INFISICAL_CLIENT_SECRET = process.env.INFISICAL_CLIENT_SECRET;
|
||||
const INFISICAL_PROJECT_ID = process.env.INFISICAL_PROJECT_ID;
|
||||
const INFISICAL_ENV = process.env.INFISICAL_ENV || "prod";
|
||||
const SECRET_PATHS = (process.env.INFISICAL_SECRET_PATHS || "/shared").split(",");
|
||||
|
||||
if (!INFISICAL_API_URL || !INFISICAL_CLIENT_ID || !INFISICAL_CLIENT_SECRET || !INFISICAL_PROJECT_ID) {
|
||||
process.stderr.write("Missing required Infisical environment variables\n");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const client = new InfisicalSDK({ siteUrl: INFISICAL_API_URL });
|
||||
|
||||
await client.auth().universalAuth.login({
|
||||
clientId: INFISICAL_CLIENT_ID,
|
||||
clientSecret: INFISICAL_CLIENT_SECRET,
|
||||
});
|
||||
|
||||
process.stderr.write(`Loading secrets from Infisical (env: ${INFISICAL_ENV})...\n`);
|
||||
|
||||
const envLines = [];
|
||||
|
||||
for (const secretPath of SECRET_PATHS) {
|
||||
const response = await client.secrets().listSecrets({
|
||||
projectId: INFISICAL_PROJECT_ID,
|
||||
environment: INFISICAL_ENV,
|
||||
secretPath: secretPath.trim(),
|
||||
expandSecretReferences: true,
|
||||
});
|
||||
|
||||
for (const secret of response.secrets) {
|
||||
const escapedValue = secret.secretValue.replace(/'/g, "'\\''");
|
||||
envLines.push(`export ${secret.secretKey}='${escapedValue}'`);
|
||||
}
|
||||
|
||||
process.stderr.write(` ${secretPath.trim()}: ${response.secrets.length} secrets loaded\n`);
|
||||
}
|
||||
|
||||
writeFileSync(".env.infisical", envLines.join("\n"));
|
||||
process.stderr.write("Secrets written to .env.infisical\n");
|
||||
60
scripts/load-vault-env.sh
Executable file
60
scripts/load-vault-env.sh
Executable file
@@ -0,0 +1,60 @@
|
||||
#!/bin/sh
|
||||
set -eu
|
||||
|
||||
log() {
|
||||
printf '%s\n' "$*" >&2
|
||||
}
|
||||
|
||||
VAULT_ENABLED="${VAULT_ENABLED:-auto}"
|
||||
if [ "$VAULT_ENABLED" = "false" ] || [ "$VAULT_ENABLED" = "0" ]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ -z "${VAULT_ADDR:-}" ] || [ -z "${VAULT_TOKEN:-}" ]; then
|
||||
if [ "$VAULT_ENABLED" = "true" ] || [ "$VAULT_ENABLED" = "1" ]; then
|
||||
log "Vault bootstrap is required but VAULT_ADDR or VAULT_TOKEN is missing."
|
||||
exit 1
|
||||
fi
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if ! command -v curl >/dev/null 2>&1 || ! command -v jq >/dev/null 2>&1; then
|
||||
log "Vault bootstrap requires curl and jq."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
VAULT_KV_MOUNT="${VAULT_KV_MOUNT:-secret}"
|
||||
|
||||
load_secret_path() {
|
||||
path="$1"
|
||||
source_name="$2"
|
||||
if [ -z "$path" ]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
url="${VAULT_ADDR%/}/v1/${VAULT_KV_MOUNT}/data/${path}"
|
||||
response="$(curl -fsS -H "X-Vault-Token: $VAULT_TOKEN" "$url")" || {
|
||||
log "Failed to load Vault path ${VAULT_KV_MOUNT}/${path}."
|
||||
return 1
|
||||
}
|
||||
|
||||
encoded_items="$(printf '%s' "$response" | jq -r '.data.data // {} | to_entries[]? | @base64')"
|
||||
if [ -z "$encoded_items" ]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
old_ifs="${IFS}"
|
||||
IFS='
|
||||
'
|
||||
for encoded_item in $encoded_items; do
|
||||
key="$(printf '%s' "$encoded_item" | base64 -d | jq -r '.key')"
|
||||
value="$(printf '%s' "$encoded_item" | base64 -d | jq -r '.value | tostring')"
|
||||
export "$key=$value"
|
||||
done
|
||||
IFS="${old_ifs}"
|
||||
|
||||
log "Loaded Vault ${source_name} secrets from ${VAULT_KV_MOUNT}/${path}."
|
||||
}
|
||||
|
||||
load_secret_path "${VAULT_SHARED_PATH:-}" "shared"
|
||||
load_secret_path "${VAULT_PROJECT_PATH:-}" "project"
|
||||
192
src/cluster.ts
Normal file
192
src/cluster.ts
Normal file
@@ -0,0 +1,192 @@
|
||||
import { latLngToCell, cellToLatLng } from 'h3-js'
|
||||
import { getDb } from './db.js'
|
||||
|
||||
const ZOOM_TO_RES: Record<number, number> = {
|
||||
0: 0, 1: 0, 2: 1, 3: 1, 4: 2, 5: 2,
|
||||
6: 3, 7: 3, 8: 4, 9: 4, 10: 5, 11: 5,
|
||||
12: 6, 13: 7, 14: 8, 15: 9, 16: 10,
|
||||
}
|
||||
|
||||
interface CachedNode {
|
||||
_key: string
|
||||
name?: string
|
||||
latitude?: number
|
||||
longitude?: number
|
||||
country?: string
|
||||
country_code?: string
|
||||
node_type?: string
|
||||
transport_types?: string[]
|
||||
}
|
||||
|
||||
const nodesCache = new Map<string, CachedNode[]>()
|
||||
|
||||
function fetchNodes(transportType?: string | null, nodeType?: string | null): CachedNode[] {
|
||||
const cacheKey = `nodes:${transportType || 'all'}:${nodeType || 'logistics'}`
|
||||
if (nodesCache.has(cacheKey)) return nodesCache.get(cacheKey)!
|
||||
|
||||
const db = getDb()
|
||||
let aql: string
|
||||
|
||||
if (nodeType === 'offer') {
|
||||
aql = `
|
||||
FOR node IN nodes
|
||||
FILTER node.node_type == 'offer'
|
||||
FILTER node.latitude != null AND node.longitude != null
|
||||
RETURN node
|
||||
`
|
||||
} else if (nodeType === 'supplier') {
|
||||
aql = `
|
||||
FOR offer IN nodes
|
||||
FILTER offer.node_type == 'offer'
|
||||
FILTER offer.supplier_uuid != null
|
||||
LET supplier = DOCUMENT(CONCAT('nodes/', offer.supplier_uuid))
|
||||
FILTER supplier != null
|
||||
FILTER supplier.latitude != null AND supplier.longitude != null
|
||||
COLLECT sup_uuid = offer.supplier_uuid INTO offers
|
||||
LET sup = DOCUMENT(CONCAT('nodes/', sup_uuid))
|
||||
RETURN {
|
||||
_key: sup_uuid,
|
||||
name: sup.name,
|
||||
latitude: sup.latitude,
|
||||
longitude: sup.longitude,
|
||||
country: sup.country,
|
||||
country_code: sup.country_code,
|
||||
node_type: 'supplier',
|
||||
offers_count: LENGTH(offers)
|
||||
}
|
||||
`
|
||||
} else {
|
||||
aql = `
|
||||
FOR node IN nodes
|
||||
FILTER node.node_type == 'logistics' OR node.node_type == null
|
||||
FILTER node.latitude != null AND node.longitude != null
|
||||
RETURN node
|
||||
`
|
||||
}
|
||||
|
||||
// arangojs query returns a cursor — we need async. Use a sync cache pattern with pre-fetching.
|
||||
// Since this is called from resolvers which are async, we'll use a different approach.
|
||||
// Store a promise instead.
|
||||
throw new Error('Use fetchNodesAsync instead')
|
||||
}
|
||||
|
||||
export async function fetchNodesAsync(transportType?: string | null, nodeType?: string | null): Promise<CachedNode[]> {
|
||||
const cacheKey = `nodes:${transportType || 'all'}:${nodeType || 'logistics'}`
|
||||
if (nodesCache.has(cacheKey)) return nodesCache.get(cacheKey)!
|
||||
|
||||
const db = getDb()
|
||||
let aql: string
|
||||
|
||||
if (nodeType === 'offer') {
|
||||
aql = `
|
||||
FOR node IN nodes
|
||||
FILTER node.node_type == 'offer'
|
||||
FILTER node.latitude != null AND node.longitude != null
|
||||
RETURN node
|
||||
`
|
||||
} else if (nodeType === 'supplier') {
|
||||
aql = `
|
||||
FOR offer IN nodes
|
||||
FILTER offer.node_type == 'offer'
|
||||
FILTER offer.supplier_uuid != null
|
||||
LET supplier = DOCUMENT(CONCAT('nodes/', offer.supplier_uuid))
|
||||
FILTER supplier != null
|
||||
FILTER supplier.latitude != null AND supplier.longitude != null
|
||||
COLLECT sup_uuid = offer.supplier_uuid INTO offers
|
||||
LET sup = DOCUMENT(CONCAT('nodes/', sup_uuid))
|
||||
RETURN {
|
||||
_key: sup_uuid,
|
||||
name: sup.name,
|
||||
latitude: sup.latitude,
|
||||
longitude: sup.longitude,
|
||||
country: sup.country,
|
||||
country_code: sup.country_code,
|
||||
node_type: 'supplier',
|
||||
offers_count: LENGTH(offers)
|
||||
}
|
||||
`
|
||||
} else {
|
||||
aql = `
|
||||
FOR node IN nodes
|
||||
FILTER node.node_type == 'logistics' OR node.node_type == null
|
||||
FILTER node.latitude != null AND node.longitude != null
|
||||
RETURN node
|
||||
`
|
||||
}
|
||||
|
||||
const cursor = await db.query(aql)
|
||||
let allNodes: CachedNode[] = await cursor.all()
|
||||
|
||||
if (transportType && (!nodeType || nodeType === 'logistics')) {
|
||||
allNodes = allNodes.filter(n => (n.transport_types || []).includes(transportType))
|
||||
}
|
||||
|
||||
nodesCache.set(cacheKey, allNodes)
|
||||
console.log(`Cached ${allNodes.length} nodes for ${cacheKey}`)
|
||||
return allNodes
|
||||
}
|
||||
|
||||
export interface ClusterPoint {
|
||||
id: string
|
||||
latitude: number
|
||||
longitude: number
|
||||
count: number
|
||||
expansion_zoom: number | null
|
||||
name: string | null
|
||||
}
|
||||
|
||||
export async function getClusteredNodes(
|
||||
west: number, south: number, east: number, north: number,
|
||||
zoom: number, transportType?: string | null, nodeType?: string | null,
|
||||
): Promise<ClusterPoint[]> {
|
||||
const resolution = ZOOM_TO_RES[Math.floor(zoom)] ?? 5
|
||||
const nodes = await fetchNodesAsync(transportType, nodeType)
|
||||
|
||||
if (!nodes.length) return []
|
||||
|
||||
const cells = new Map<string, CachedNode[]>()
|
||||
|
||||
for (const node of nodes) {
|
||||
const lat = node.latitude
|
||||
const lng = node.longitude
|
||||
if (lat == null || lng == null) continue
|
||||
if (lat < south || lat > north || lng < west || lng > east) continue
|
||||
|
||||
const cell = latLngToCell(lat, lng, resolution)
|
||||
if (!cells.has(cell)) cells.set(cell, [])
|
||||
cells.get(cell)!.push(node)
|
||||
}
|
||||
|
||||
const results: ClusterPoint[] = []
|
||||
|
||||
for (const [cell, nodesInCell] of cells) {
|
||||
if (nodesInCell.length === 1) {
|
||||
const node = nodesInCell[0]
|
||||
results.push({
|
||||
id: node._key,
|
||||
latitude: node.latitude!,
|
||||
longitude: node.longitude!,
|
||||
count: 1,
|
||||
expansion_zoom: null,
|
||||
name: node.name || null,
|
||||
})
|
||||
} else {
|
||||
const [lat, lng] = cellToLatLng(cell)
|
||||
results.push({
|
||||
id: `cluster-${cell}`,
|
||||
latitude: lat,
|
||||
longitude: lng,
|
||||
count: nodesInCell.length,
|
||||
expansion_zoom: Math.min(zoom + 2, 16),
|
||||
name: null,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
export function invalidateCache(): void {
|
||||
nodesCache.clear()
|
||||
console.log('Cluster cache invalidated')
|
||||
}
|
||||
27
src/db.ts
Normal file
27
src/db.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { Database } from 'arangojs'
|
||||
|
||||
const ARANGODB_URL = process.env.ARANGODB_URL || process.env.ARANGODB_INTERNAL_URL || 'http://localhost:8529'
|
||||
const ARANGODB_DATABASE = process.env.ARANGODB_DATABASE || 'optovia_maps'
|
||||
const ARANGODB_PASSWORD = process.env.ARANGODB_PASSWORD || ''
|
||||
|
||||
let _db: Database | null = null
|
||||
|
||||
export function getDb(): Database {
|
||||
if (!_db) {
|
||||
const url = ARANGODB_URL.startsWith('http') ? ARANGODB_URL : `http://${ARANGODB_URL}`
|
||||
_db = new Database({ url, databaseName: ARANGODB_DATABASE, auth: { username: 'root', password: ARANGODB_PASSWORD } })
|
||||
console.log(`Connected to ArangoDB: ${url}/${ARANGODB_DATABASE}`)
|
||||
}
|
||||
return _db
|
||||
}
|
||||
|
||||
export async function ensureGraph(): Promise<void> {
|
||||
const db = getDb()
|
||||
const graphs = await db.listGraphs()
|
||||
if (graphs.some(g => g.name === 'optovia_graph')) return
|
||||
|
||||
console.log('Creating graph: optovia_graph')
|
||||
await db.createGraph('optovia_graph', [
|
||||
{ collection: 'edges', from: ['nodes'], to: ['nodes'] },
|
||||
])
|
||||
}
|
||||
90
src/helpers.ts
Normal file
90
src/helpers.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
/** Haversine distance in km. */
|
||||
export function distanceKm(lat1: number, lon1: number, lat2: number, lon2: number): number {
|
||||
const R = 6371
|
||||
const dLat = (lat2 - lat1) * Math.PI / 180
|
||||
const dLon = (lon2 - lon1) * Math.PI / 180
|
||||
const a =
|
||||
Math.sin(dLat / 2) ** 2 +
|
||||
Math.cos(lat1 * Math.PI / 180) * Math.cos(lat2 * Math.PI / 180) *
|
||||
Math.sin(dLon / 2) ** 2
|
||||
return R * 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a))
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export type ArangoDoc = Record<string, any>
|
||||
|
||||
export interface RouteStage {
|
||||
fromUuid: string | null
|
||||
fromName: string | null
|
||||
fromLat: number | null
|
||||
fromLon: number | null
|
||||
toUuid: string | null
|
||||
toName: string | null
|
||||
toLat: number | null
|
||||
toLon: number | null
|
||||
distanceKm: number
|
||||
travelTimeSeconds: number
|
||||
transportType: string | null
|
||||
}
|
||||
|
||||
export interface RoutePath {
|
||||
totalDistanceKm: number
|
||||
totalTimeSeconds: number
|
||||
stages: RouteStage[]
|
||||
}
|
||||
|
||||
function buildStage(fromDoc: ArangoDoc | undefined, toDoc: ArangoDoc | undefined, transportType: string, edges: ArangoDoc[]): RouteStage {
|
||||
const distance = edges.reduce((s, e) => s + (e.distance_km || 0), 0)
|
||||
const time = edges.reduce((s, e) => s + (e.travel_time_seconds || 0), 0)
|
||||
return {
|
||||
fromUuid: fromDoc?._key ?? null,
|
||||
fromName: fromDoc?.name ?? null,
|
||||
fromLat: fromDoc?.latitude ?? null,
|
||||
fromLon: fromDoc?.longitude ?? null,
|
||||
toUuid: toDoc?._key ?? null,
|
||||
toName: toDoc?.name ?? null,
|
||||
toLat: toDoc?.latitude ?? null,
|
||||
toLon: toDoc?.longitude ?? null,
|
||||
distanceKm: distance,
|
||||
travelTimeSeconds: time,
|
||||
transportType: transportType,
|
||||
}
|
||||
}
|
||||
|
||||
export function buildRouteFromEdges(pathEdges: [string, string, ArangoDoc][], nodeDocs: Map<string, ArangoDoc>): RoutePath | null {
|
||||
if (!pathEdges.length) return null
|
||||
|
||||
// Filter offer edges — not transport stages
|
||||
const filtered = pathEdges.filter(([, , e]) => e.transport_type !== 'offer')
|
||||
if (!filtered.length) return null
|
||||
|
||||
const stages: RouteStage[] = []
|
||||
let currentEdges: ArangoDoc[] = []
|
||||
let currentType: string | null = null
|
||||
let segmentStart: string | null = null
|
||||
|
||||
for (const [fromKey, , edge] of filtered) {
|
||||
const edgeType = edge.transport_type as string
|
||||
if (currentType === null) {
|
||||
currentType = edgeType
|
||||
currentEdges = [edge]
|
||||
segmentStart = fromKey
|
||||
} else if (edgeType === currentType) {
|
||||
currentEdges.push(edge)
|
||||
} else {
|
||||
stages.push(buildStage(nodeDocs.get(segmentStart!), nodeDocs.get(fromKey), currentType, currentEdges))
|
||||
currentType = edgeType
|
||||
currentEdges = [edge]
|
||||
segmentStart = fromKey
|
||||
}
|
||||
}
|
||||
|
||||
const lastTo = filtered[filtered.length - 1][1]
|
||||
stages.push(buildStage(nodeDocs.get(segmentStart!), nodeDocs.get(lastTo), currentType!, currentEdges))
|
||||
|
||||
return {
|
||||
totalDistanceKm: stages.reduce((s, st) => s + (st.distanceKm || 0), 0),
|
||||
totalTimeSeconds: stages.reduce((s, st) => s + (st.travelTimeSeconds || 0), 0),
|
||||
stages,
|
||||
}
|
||||
}
|
||||
22
src/index.ts
Normal file
22
src/index.ts
Normal file
@@ -0,0 +1,22 @@
|
||||
import express from 'express'
|
||||
import cors from 'cors'
|
||||
import { ApolloServer } from '@apollo/server'
|
||||
import { expressMiddleware } from '@apollo/server/express4'
|
||||
import { typeDefs, resolvers } from './schema.js'
|
||||
|
||||
const PORT = parseInt(process.env.PORT || '8000', 10)
|
||||
|
||||
const app = express()
|
||||
app.use(cors({ origin: ['https://optovia.ru'], credentials: true }))
|
||||
|
||||
const server = new ApolloServer({ typeDefs, resolvers, introspection: true })
|
||||
await server.start()
|
||||
|
||||
app.use('/graphql/public', express.json(), expressMiddleware(server) as unknown as express.RequestHandler)
|
||||
|
||||
app.get('/health', (_, res) => { res.json({ status: 'ok' }) })
|
||||
|
||||
app.listen(PORT, '0.0.0.0', () => {
|
||||
console.log(`Geo server ready on port ${PORT}`)
|
||||
console.log(` /graphql/public - public (no auth)`)
|
||||
})
|
||||
1047
src/schema.ts
Normal file
1047
src/schema.ts
Normal file
File diff suppressed because it is too large
Load Diff
19
tsconfig.json
Normal file
19
tsconfig.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"outDir": "dist",
|
||||
"rootDir": "src",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"resolveJsonModule": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"sourceMap": true
|
||||
},
|
||||
"include": ["src"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
Reference in New Issue
Block a user