Migrate geo backend from Django/Graphene to Express + Apollo Server + arangojs
All checks were successful
Build Docker Image / build (push) Successful in 1m5s
All checks were successful
Build Docker Image / build (push) Successful in 1m5s
Replace Python stack with TypeScript. All 30+ GraphQL queries preserved including phase-based routing (Dijkstra), H3 clustering, K_SHORTEST_PATHS, and external routing services (GraphHopper, OpenRailRouting). Single public endpoint, no auth.
This commit is contained in:
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
node_modules
|
||||
dist
|
||||
31
Dockerfile
31
Dockerfile
@@ -1,24 +1,23 @@
|
||||
FROM python:3.12-slim
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1 \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
NIXPACKS_POETRY_VERSION=2.2.1
|
||||
FROM node:22-alpine AS builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends build-essential curl \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
COPY package.json ./
|
||||
RUN npm install
|
||||
|
||||
RUN python -m venv --copies /opt/venv
|
||||
ENV VIRTUAL_ENV=/opt/venv
|
||||
ENV PATH="/opt/venv/bin:$PATH"
|
||||
COPY tsconfig.json ./
|
||||
COPY src ./src
|
||||
RUN npm run build
|
||||
|
||||
COPY . .
|
||||
FROM node:22-alpine
|
||||
|
||||
RUN pip install --no-cache-dir poetry==$NIXPACKS_POETRY_VERSION \
|
||||
&& poetry install --no-interaction --no-ansi
|
||||
WORKDIR /app
|
||||
|
||||
ENV PORT=8000
|
||||
COPY package.json ./
|
||||
RUN npm install --omit=dev
|
||||
|
||||
CMD ["sh", "-c", "poetry run python manage.py collectstatic --noinput && poetry run python -m gunicorn geo.wsgi:application --bind 0.0.0.0:${PORT:-8000}"]
|
||||
COPY --from=builder /app/dist ./dist
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["node", "dist/index.js"]
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
"""Geo Django project."""
|
||||
148
geo/settings.py
148
geo/settings.py
@@ -1,148 +0,0 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from dotenv import load_dotenv
|
||||
from infisical_sdk import InfisicalSDKClient
|
||||
import sentry_sdk
|
||||
from sentry_sdk.integrations.django import DjangoIntegration
|
||||
|
||||
load_dotenv()
|
||||
|
||||
INFISICAL_API_URL = os.environ["INFISICAL_API_URL"]
|
||||
INFISICAL_CLIENT_ID = os.environ["INFISICAL_CLIENT_ID"]
|
||||
INFISICAL_CLIENT_SECRET = os.environ["INFISICAL_CLIENT_SECRET"]
|
||||
INFISICAL_PROJECT_ID = os.environ["INFISICAL_PROJECT_ID"]
|
||||
INFISICAL_ENV = os.environ.get("INFISICAL_ENV", "prod")
|
||||
|
||||
client = InfisicalSDKClient(host=INFISICAL_API_URL)
|
||||
client.auth.universal_auth.login(
|
||||
client_id=INFISICAL_CLIENT_ID,
|
||||
client_secret=INFISICAL_CLIENT_SECRET,
|
||||
)
|
||||
|
||||
# Fetch secrets from /geo and /shared
|
||||
for secret_path in ["/geo", "/shared"]:
|
||||
secrets_response = client.secrets.list_secrets(
|
||||
environment_slug=INFISICAL_ENV,
|
||||
secret_path=secret_path,
|
||||
project_id=INFISICAL_PROJECT_ID,
|
||||
expand_secret_references=True,
|
||||
view_secret_value=True,
|
||||
)
|
||||
for secret in secrets_response.secrets:
|
||||
os.environ[secret.secretKey] = secret.secretValue
|
||||
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
|
||||
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', 'dev-secret-key-change-in-production')
|
||||
|
||||
DEBUG = os.getenv('DEBUG', 'False') == 'True'
|
||||
|
||||
# Sentry/GlitchTip configuration
|
||||
SENTRY_DSN = os.getenv('SENTRY_DSN', '')
|
||||
if SENTRY_DSN:
|
||||
sentry_sdk.init(
|
||||
dsn=SENTRY_DSN,
|
||||
integrations=[DjangoIntegration()],
|
||||
auto_session_tracking=False,
|
||||
traces_sample_rate=0.01,
|
||||
release=os.getenv('RELEASE_VERSION', '1.0.0'),
|
||||
environment=os.getenv('ENVIRONMENT', 'production'),
|
||||
send_default_pii=False,
|
||||
debug=DEBUG,
|
||||
)
|
||||
|
||||
ALLOWED_HOSTS = ['*']
|
||||
|
||||
CSRF_TRUSTED_ORIGINS = ['https://geo.optovia.ru']
|
||||
|
||||
INSTALLED_APPS = [
|
||||
'whitenoise.runserver_nostatic',
|
||||
'django.contrib.contenttypes',
|
||||
'django.contrib.staticfiles',
|
||||
'corsheaders',
|
||||
'graphene_django',
|
||||
'geo_app',
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
'corsheaders.middleware.CorsMiddleware',
|
||||
'django.middleware.security.SecurityMiddleware',
|
||||
'whitenoise.middleware.WhiteNoiseMiddleware',
|
||||
'django.middleware.common.CommonMiddleware',
|
||||
]
|
||||
|
||||
ROOT_URLCONF = 'geo.urls'
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [],
|
||||
'APP_DIRS': True,
|
||||
'OPTIONS': {
|
||||
'context_processors': [
|
||||
'django.template.context_processors.debug',
|
||||
'django.template.context_processors.request',
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
WSGI_APPLICATION = 'geo.wsgi.application'
|
||||
|
||||
# No database - we use ArangoDB directly
|
||||
DATABASES = {}
|
||||
|
||||
# Internationalization
|
||||
LANGUAGE_CODE = 'ru-ru'
|
||||
TIME_ZONE = 'UTC'
|
||||
USE_I18N = True
|
||||
USE_TZ = True
|
||||
|
||||
# Static files
|
||||
STATIC_URL = '/static/'
|
||||
STATIC_ROOT = BASE_DIR / 'staticfiles'
|
||||
|
||||
# Default primary key field type
|
||||
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
|
||||
|
||||
# CORS
|
||||
CORS_ALLOW_ALL_ORIGINS = False
|
||||
CORS_ALLOWED_ORIGINS = ['https://optovia.ru']
|
||||
CORS_ALLOW_CREDENTIALS = True
|
||||
|
||||
# GraphQL
|
||||
GRAPHENE = {
|
||||
'SCHEMA': 'geo_app.schema.schema',
|
||||
}
|
||||
|
||||
# ArangoDB connection (internal M2M)
|
||||
ARANGODB_INTERNAL_URL = os.getenv('ARANGODB_INTERNAL_URL', 'localhost:8529')
|
||||
ARANGODB_DATABASE = os.getenv('ARANGODB_DATABASE', 'optovia_maps')
|
||||
ARANGODB_PASSWORD = os.getenv('ARANGODB_PASSWORD', '')
|
||||
|
||||
# Routing services (external APIs)
|
||||
GRAPHHOPPER_EXTERNAL_URL = os.getenv('GRAPHHOPPER_EXTERNAL_URL', 'https://graphhopper.optovia.ru')
|
||||
OPENRAILROUTING_EXTERNAL_URL = os.getenv('OPENRAILROUTING_EXTERNAL_URL', 'https://openrailrouting.optovia.ru')
|
||||
|
||||
# Logging
|
||||
LOGGING = {
|
||||
'version': 1,
|
||||
'disable_existing_loggers': False,
|
||||
'handlers': {
|
||||
'console': {
|
||||
'class': 'logging.StreamHandler',
|
||||
},
|
||||
},
|
||||
'loggers': {
|
||||
'django.request': {
|
||||
'handlers': ['console'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': False,
|
||||
},
|
||||
'geo_app': {
|
||||
'handlers': ['console'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': False,
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -1,125 +0,0 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from dotenv import load_dotenv
|
||||
import sentry_sdk
|
||||
from sentry_sdk.integrations.django import DjangoIntegration
|
||||
|
||||
load_dotenv()
|
||||
|
||||
|
||||
|
||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
||||
|
||||
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', 'dev-secret-key-change-in-production')
|
||||
|
||||
DEBUG = True
|
||||
|
||||
# Sentry/GlitchTip configuration
|
||||
SENTRY_DSN = os.getenv('SENTRY_DSN', '')
|
||||
if SENTRY_DSN:
|
||||
sentry_sdk.init(
|
||||
dsn=SENTRY_DSN,
|
||||
integrations=[DjangoIntegration()],
|
||||
auto_session_tracking=False,
|
||||
traces_sample_rate=0.01,
|
||||
release=os.getenv('RELEASE_VERSION', '1.0.0'),
|
||||
environment=os.getenv('ENVIRONMENT', 'production'),
|
||||
send_default_pii=False,
|
||||
debug=DEBUG,
|
||||
)
|
||||
|
||||
ALLOWED_HOSTS = ['*']
|
||||
|
||||
CSRF_TRUSTED_ORIGINS = ['https://geo.optovia.ru']
|
||||
|
||||
INSTALLED_APPS = [
|
||||
'whitenoise.runserver_nostatic',
|
||||
'django.contrib.contenttypes',
|
||||
'django.contrib.staticfiles',
|
||||
'corsheaders',
|
||||
'graphene_django',
|
||||
'geo_app',
|
||||
]
|
||||
|
||||
MIDDLEWARE = [
|
||||
'corsheaders.middleware.CorsMiddleware',
|
||||
'django.middleware.security.SecurityMiddleware',
|
||||
'whitenoise.middleware.WhiteNoiseMiddleware',
|
||||
'django.middleware.common.CommonMiddleware',
|
||||
]
|
||||
|
||||
ROOT_URLCONF = 'geo.urls'
|
||||
|
||||
TEMPLATES = [
|
||||
{
|
||||
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
||||
'DIRS': [],
|
||||
'APP_DIRS': True,
|
||||
'OPTIONS': {
|
||||
'context_processors': [
|
||||
'django.template.context_processors.debug',
|
||||
'django.template.context_processors.request',
|
||||
],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
WSGI_APPLICATION = 'geo.wsgi.application'
|
||||
|
||||
# No database - we use ArangoDB directly
|
||||
DATABASES = {}
|
||||
|
||||
# Internationalization
|
||||
LANGUAGE_CODE = 'ru-ru'
|
||||
TIME_ZONE = 'UTC'
|
||||
USE_I18N = True
|
||||
USE_TZ = True
|
||||
|
||||
# Static files
|
||||
STATIC_URL = '/static/'
|
||||
STATIC_ROOT = BASE_DIR / 'staticfiles'
|
||||
|
||||
# Default primary key field type
|
||||
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
|
||||
|
||||
# CORS
|
||||
CORS_ALLOW_ALL_ORIGINS = False
|
||||
CORS_ALLOWED_ORIGINS = ['http://localhost:3000', 'https://optovia.ru']
|
||||
CORS_ALLOW_CREDENTIALS = True
|
||||
|
||||
# GraphQL
|
||||
GRAPHENE = {
|
||||
'SCHEMA': 'geo_app.schema.schema',
|
||||
}
|
||||
|
||||
# ArangoDB connection (internal M2M)
|
||||
ARANGODB_INTERNAL_URL = os.getenv('ARANGODB_INTERNAL_URL', 'localhost:8529')
|
||||
ARANGODB_DATABASE = os.getenv('ARANGODB_DATABASE', 'optovia_maps')
|
||||
ARANGODB_PASSWORD = os.getenv('ARANGODB_PASSWORD', '')
|
||||
|
||||
# Routing services (external APIs)
|
||||
GRAPHHOPPER_EXTERNAL_URL = os.getenv('GRAPHHOPPER_EXTERNAL_URL', 'https://graphhopper.optovia.ru')
|
||||
OPENRAILROUTING_EXTERNAL_URL = os.getenv('OPENRAILROUTING_EXTERNAL_URL', 'https://openrailrouting.optovia.ru')
|
||||
|
||||
# Logging
|
||||
LOGGING = {
|
||||
'version': 1,
|
||||
'disable_existing_loggers': False,
|
||||
'handlers': {
|
||||
'console': {
|
||||
'class': 'logging.StreamHandler',
|
||||
},
|
||||
},
|
||||
'loggers': {
|
||||
'django.request': {
|
||||
'handlers': ['console'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': False,
|
||||
},
|
||||
'geo_app': {
|
||||
'handlers': ['console'],
|
||||
'level': 'DEBUG',
|
||||
'propagate': False,
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
from django.urls import path
|
||||
from graphene_django.views import GraphQLView
|
||||
from django.views.decorators.csrf import csrf_exempt
|
||||
|
||||
urlpatterns = [
|
||||
path('graphql/public/', csrf_exempt(GraphQLView.as_view(graphiql=True))),
|
||||
]
|
||||
@@ -1,5 +0,0 @@
|
||||
import os
|
||||
from django.core.wsgi import get_wsgi_application
|
||||
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'geo.settings')
|
||||
application = get_wsgi_application()
|
||||
@@ -1 +0,0 @@
|
||||
"""Geo app - logistics graph operations."""
|
||||
@@ -1,6 +0,0 @@
|
||||
from django.apps import AppConfig
|
||||
|
||||
|
||||
class GeoAppConfig(AppConfig):
|
||||
default_auto_field = 'django.db.models.BigAutoField'
|
||||
name = 'geo_app'
|
||||
@@ -1,49 +0,0 @@
|
||||
"""ArangoDB client singleton."""
|
||||
import logging
|
||||
from arango import ArangoClient
|
||||
from django.conf import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_db = None
|
||||
|
||||
|
||||
def get_db():
|
||||
"""Get ArangoDB database connection (singleton)."""
|
||||
global _db
|
||||
if _db is None:
|
||||
hosts = settings.ARANGODB_INTERNAL_URL
|
||||
if not hosts.startswith("http"):
|
||||
hosts = f"http://{hosts}"
|
||||
|
||||
client = ArangoClient(hosts=hosts)
|
||||
_db = client.db(
|
||||
settings.ARANGODB_DATABASE,
|
||||
username='root',
|
||||
password=settings.ARANGODB_PASSWORD,
|
||||
)
|
||||
logger.info(
|
||||
"Connected to ArangoDB: %s/%s",
|
||||
hosts,
|
||||
settings.ARANGODB_DATABASE,
|
||||
)
|
||||
return _db
|
||||
|
||||
|
||||
def ensure_graph():
|
||||
"""Ensure named graph exists for K_SHORTEST_PATHS queries."""
|
||||
db = get_db()
|
||||
graph_name = 'optovia_graph'
|
||||
|
||||
if db.has_graph(graph_name):
|
||||
return db.graph(graph_name)
|
||||
|
||||
logger.info("Creating graph: %s", graph_name)
|
||||
return db.create_graph(
|
||||
graph_name,
|
||||
edge_definitions=[{
|
||||
'edge_collection': 'edges',
|
||||
'from_vertex_collections': ['nodes'],
|
||||
'to_vertex_collections': ['nodes'],
|
||||
}],
|
||||
)
|
||||
@@ -1,236 +0,0 @@
|
||||
"""
|
||||
Server-side map clustering using Uber H3 hexagonal grid.
|
||||
|
||||
Maps zoom levels to h3 resolutions and groups nodes by cell.
|
||||
"""
|
||||
import logging
|
||||
import h3
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Map zoom level to h3 resolution
|
||||
# Higher zoom = higher resolution = smaller cells
|
||||
ZOOM_TO_RES = {
|
||||
0: 0, 1: 0, 2: 1, 3: 1, 4: 2, 5: 2,
|
||||
6: 3, 7: 3, 8: 4, 9: 4, 10: 5, 11: 5,
|
||||
12: 6, 13: 7, 14: 8, 15: 9, 16: 10
|
||||
}
|
||||
|
||||
|
||||
def _fetch_nodes(
|
||||
db,
|
||||
west,
|
||||
south,
|
||||
east,
|
||||
north,
|
||||
transport_type=None,
|
||||
node_type=None,
|
||||
product_uuid=None,
|
||||
hub_uuid=None,
|
||||
supplier_uuid=None,
|
||||
):
|
||||
"""Fetch nodes from database for a bounding box.
|
||||
|
||||
Args:
|
||||
db: Database connection
|
||||
west, south, east, north: Bounding box coordinates
|
||||
transport_type: Filter by transport type (auto, rail, sea, air)
|
||||
node_type: Type of nodes to fetch ('logistics', 'offer', 'supplier')
|
||||
"""
|
||||
bind_vars = {
|
||||
'west': west,
|
||||
'south': south,
|
||||
'east': east,
|
||||
'north': north,
|
||||
'product_uuid': product_uuid,
|
||||
'hub_uuid': hub_uuid,
|
||||
'supplier_uuid': supplier_uuid,
|
||||
}
|
||||
|
||||
# Select AQL query based on node_type
|
||||
if node_type == 'offer':
|
||||
aql = """
|
||||
FOR node IN nodes
|
||||
FILTER node.node_type == 'offer'
|
||||
FILTER node.latitude != null AND node.longitude != null
|
||||
FILTER node.latitude >= @south AND node.latitude <= @north
|
||||
FILTER node.longitude >= @west AND node.longitude <= @east
|
||||
FILTER @product_uuid == null OR node.product_uuid == @product_uuid
|
||||
FILTER @supplier_uuid == null OR node.supplier_uuid == @supplier_uuid
|
||||
LET has_hub = @hub_uuid == null ? true : LENGTH(
|
||||
FOR edge IN edges
|
||||
FILTER edge.transport_type == 'offer'
|
||||
FILTER (
|
||||
(edge._from == CONCAT('nodes/', node._key) AND edge._to == CONCAT('nodes/', @hub_uuid)) OR
|
||||
(edge._to == CONCAT('nodes/', node._key) AND edge._from == CONCAT('nodes/', @hub_uuid))
|
||||
)
|
||||
LIMIT 1
|
||||
RETURN 1
|
||||
) > 0
|
||||
FILTER has_hub
|
||||
RETURN node
|
||||
"""
|
||||
elif node_type == 'supplier':
|
||||
# Get suppliers that have offers (aggregate through offers)
|
||||
aql = """
|
||||
FOR offer IN nodes
|
||||
FILTER offer.node_type == 'offer'
|
||||
FILTER offer.supplier_uuid != null
|
||||
FILTER @product_uuid == null OR offer.product_uuid == @product_uuid
|
||||
FILTER @supplier_uuid == null OR offer.supplier_uuid == @supplier_uuid
|
||||
LET has_hub = @hub_uuid == null ? true : LENGTH(
|
||||
FOR edge IN edges
|
||||
FILTER edge.transport_type == 'offer'
|
||||
FILTER (
|
||||
(edge._from == CONCAT('nodes/', offer._key) AND edge._to == CONCAT('nodes/', @hub_uuid)) OR
|
||||
(edge._to == CONCAT('nodes/', offer._key) AND edge._from == CONCAT('nodes/', @hub_uuid))
|
||||
)
|
||||
LIMIT 1
|
||||
RETURN 1
|
||||
) > 0
|
||||
FILTER has_hub
|
||||
LET supplier = DOCUMENT(CONCAT('nodes/', offer.supplier_uuid))
|
||||
FILTER supplier != null
|
||||
FILTER supplier.latitude != null AND supplier.longitude != null
|
||||
FILTER supplier.latitude >= @south AND supplier.latitude <= @north
|
||||
FILTER supplier.longitude >= @west AND supplier.longitude <= @east
|
||||
COLLECT sup_uuid = offer.supplier_uuid INTO offers
|
||||
LET sup = DOCUMENT(CONCAT('nodes/', sup_uuid))
|
||||
RETURN {
|
||||
_key: sup_uuid,
|
||||
name: sup.name,
|
||||
latitude: sup.latitude,
|
||||
longitude: sup.longitude,
|
||||
country: sup.country,
|
||||
country_code: sup.country_code,
|
||||
node_type: 'supplier',
|
||||
offers_count: LENGTH(offers)
|
||||
}
|
||||
"""
|
||||
else: # logistics (default)
|
||||
aql = """
|
||||
FOR node IN nodes
|
||||
FILTER node.node_type == 'logistics' OR node.node_type == null
|
||||
FILTER node.latitude != null AND node.longitude != null
|
||||
FILTER node.latitude >= @south AND node.latitude <= @north
|
||||
FILTER node.longitude >= @west AND node.longitude <= @east
|
||||
FILTER @hub_uuid == null OR node._key == @hub_uuid
|
||||
LET has_offer = (@product_uuid == null AND @supplier_uuid == null) ? true : LENGTH(
|
||||
FOR edge IN edges
|
||||
FILTER edge.transport_type == 'offer'
|
||||
FILTER edge._from == CONCAT('nodes/', node._key) OR edge._to == CONCAT('nodes/', node._key)
|
||||
LET offer_id = edge._from == CONCAT('nodes/', node._key) ? edge._to : edge._from
|
||||
LET offer = DOCUMENT(offer_id)
|
||||
FILTER offer != null AND offer.node_type == 'offer'
|
||||
FILTER @product_uuid == null OR offer.product_uuid == @product_uuid
|
||||
FILTER @supplier_uuid == null OR offer.supplier_uuid == @supplier_uuid
|
||||
LIMIT 1
|
||||
RETURN 1
|
||||
) > 0
|
||||
FILTER has_offer
|
||||
RETURN node
|
||||
"""
|
||||
|
||||
cursor = db.aql.execute(aql, bind_vars=bind_vars)
|
||||
nodes = list(cursor)
|
||||
|
||||
# Filter by transport type if specified (only for logistics nodes)
|
||||
if node_type in (None, 'logistics'):
|
||||
if transport_type:
|
||||
nodes = [
|
||||
n for n in nodes
|
||||
if transport_type in (n.get('transport_types') or [])
|
||||
]
|
||||
else:
|
||||
# Default: only rail/sea hubs
|
||||
nodes = [
|
||||
n for n in nodes
|
||||
if ('rail' in (n.get('transport_types') or [])) or ('sea' in (n.get('transport_types') or []))
|
||||
]
|
||||
|
||||
return nodes
|
||||
|
||||
|
||||
def get_clustered_nodes(
|
||||
db,
|
||||
west,
|
||||
south,
|
||||
east,
|
||||
north,
|
||||
zoom,
|
||||
transport_type=None,
|
||||
node_type=None,
|
||||
product_uuid=None,
|
||||
hub_uuid=None,
|
||||
supplier_uuid=None,
|
||||
):
|
||||
"""
|
||||
Get clustered nodes for given bounding box and zoom level.
|
||||
|
||||
Uses H3 hexagonal grid to group nearby nodes.
|
||||
|
||||
Args:
|
||||
db: Database connection
|
||||
west, south, east, north: Bounding box coordinates
|
||||
zoom: Map zoom level
|
||||
transport_type: Filter by transport type (for logistics nodes)
|
||||
node_type: Type of nodes ('logistics', 'offer', 'supplier')
|
||||
"""
|
||||
resolution = ZOOM_TO_RES.get(int(zoom), 5)
|
||||
nodes = _fetch_nodes(
|
||||
db,
|
||||
west,
|
||||
south,
|
||||
east,
|
||||
north,
|
||||
transport_type,
|
||||
node_type,
|
||||
product_uuid,
|
||||
hub_uuid,
|
||||
supplier_uuid,
|
||||
)
|
||||
|
||||
if not nodes:
|
||||
return []
|
||||
|
||||
# Group nodes by h3 cell
|
||||
cells = {}
|
||||
for node in nodes:
|
||||
lat = node.get('latitude')
|
||||
lng = node.get('longitude')
|
||||
|
||||
cell = h3.latlng_to_cell(lat, lng, resolution)
|
||||
if cell not in cells:
|
||||
cells[cell] = []
|
||||
cells[cell].append(node)
|
||||
|
||||
# Build results
|
||||
results = []
|
||||
for cell, nodes_in_cell in cells.items():
|
||||
count = len(nodes_in_cell)
|
||||
|
||||
if count == 1:
|
||||
# Single point — return actual node data
|
||||
node = nodes_in_cell[0]
|
||||
results.append({
|
||||
'id': node.get('_key'),
|
||||
'latitude': node.get('latitude'),
|
||||
'longitude': node.get('longitude'),
|
||||
'count': 1,
|
||||
'expansion_zoom': None,
|
||||
'name': node.get('name'),
|
||||
})
|
||||
else:
|
||||
# Cluster — return cell centroid
|
||||
lat, lng = h3.cell_to_latlng(cell)
|
||||
results.append({
|
||||
'id': f"cluster-{cell}",
|
||||
'latitude': lat,
|
||||
'longitude': lng,
|
||||
'count': count,
|
||||
'expansion_zoom': min(zoom + 2, 16),
|
||||
'name': None,
|
||||
})
|
||||
|
||||
logger.info("Returning %d clusters/points for zoom=%d res=%d", len(results), zoom, resolution)
|
||||
return results
|
||||
1846
geo_app/schema.py
1846
geo_app/schema.py
File diff suppressed because it is too large
Load Diff
17
manage.py
17
manage.py
@@ -1,17 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
"""Django's command-line utility for administrative tasks."""
|
||||
import os
|
||||
import sys
|
||||
|
||||
if __name__ == '__main__':
|
||||
"""Run administrative tasks."""
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'geo.settings')
|
||||
try:
|
||||
from django.core.management import execute_from_command_line
|
||||
except ImportError as exc:
|
||||
raise ImportError(
|
||||
"Couldn't import Django. Are you sure it's installed and "
|
||||
"available on your PYTHONPATH environment variable? Did you "
|
||||
"forget to activate a virtual environment?"
|
||||
) from exc
|
||||
execute_from_command_line(sys.argv)
|
||||
@@ -1,18 +0,0 @@
|
||||
providers = ["python"]
|
||||
|
||||
[build]
|
||||
|
||||
[phases.install]
|
||||
cmds = [
|
||||
"python -m venv --copies /opt/venv",
|
||||
". /opt/venv/bin/activate",
|
||||
"pip install poetry==$NIXPACKS_POETRY_VERSION",
|
||||
"poetry install --no-interaction --no-ansi"
|
||||
]
|
||||
|
||||
[start]
|
||||
cmd = "poetry run python manage.py collectstatic --noinput && poetry run python -m gunicorn geo.wsgi:application --bind 0.0.0.0:${PORT:-8000}"
|
||||
|
||||
[variables]
|
||||
# Set Poetry version to match local environment
|
||||
NIXPACKS_POETRY_VERSION = "2.2.1"
|
||||
3732
package-lock.json
generated
Normal file
3732
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
25
package.json
Normal file
25
package.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"name": "geo",
|
||||
"version": "1.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"start": "node dist/index.js",
|
||||
"dev": "tsx --watch src/index.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@apollo/server": "^4.11.3",
|
||||
"@sentry/node": "^9.5.0",
|
||||
"arangojs": "^9.2.0",
|
||||
"cors": "^2.8.5",
|
||||
"express": "^5.0.1",
|
||||
"h3-js": "^4.2.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/cors": "^2.8.17",
|
||||
"@types/express": "^5.0.0",
|
||||
"@types/node": "^22.13.0",
|
||||
"tsx": "^4.19.3",
|
||||
"typescript": "^5.7.3"
|
||||
}
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
[project]
|
||||
name = "geo"
|
||||
version = "0.1.0"
|
||||
description = "Geo service - logistics graph and routing"
|
||||
authors = [
|
||||
{name = "Ruslan Bakiev",email = "572431+veikab@users.noreply.github.com"}
|
||||
]
|
||||
requires-python = "^3.11"
|
||||
dependencies = [
|
||||
"django (>=5.2.8,<6.0)",
|
||||
"graphene-django (>=3.2.3,<4.0.0)",
|
||||
"django-cors-headers (>=4.9.0,<5.0.0)",
|
||||
"python-arango (>=8.0.0,<9.0.0)",
|
||||
"python-dotenv (>=1.2.1,<2.0.0)",
|
||||
"infisicalsdk (>=1.0.12,<2.0.0)",
|
||||
"gunicorn (>=23.0.0,<24.0.0)",
|
||||
"whitenoise (>=6.7.0,<7.0.0)",
|
||||
"sentry-sdk (>=2.47.0,<3.0.0)",
|
||||
"h3 (>=4.0.0,<5.0.0)"
|
||||
]
|
||||
|
||||
[tool.poetry]
|
||||
package-mode = false
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pytest = "^8.0.0"
|
||||
requests = "^2.32.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=2.0.0,<3.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
12
pytest.ini
12
pytest.ini
@@ -1,12 +0,0 @@
|
||||
[pytest]
|
||||
testpaths = tests
|
||||
python_files = test_*.py
|
||||
python_classes = Test*
|
||||
python_functions = test_*
|
||||
addopts =
|
||||
-v
|
||||
--tb=short
|
||||
--strict-markers
|
||||
markers =
|
||||
slow: marks tests as slow (deselect with '-m "not slow"')
|
||||
integration: marks tests as integration tests
|
||||
36
run_tests.sh
36
run_tests.sh
@@ -1,36 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Run geo service GraphQL endpoint tests
|
||||
|
||||
set -e
|
||||
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
echo "🧪 Running Geo Service GraphQL Tests"
|
||||
echo "===================================="
|
||||
echo ""
|
||||
|
||||
# Check if TEST_GEO_URL is set, otherwise use production
|
||||
if [ -z "$TEST_GEO_URL" ]; then
|
||||
export TEST_GEO_URL="https://geo.optovia.ru/graphql/public/"
|
||||
echo "📍 Testing against: $TEST_GEO_URL (production)"
|
||||
else
|
||||
echo "📍 Testing against: $TEST_GEO_URL"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# Install dependencies if needed
|
||||
if ! poetry run python -c "import pytest" 2>/dev/null; then
|
||||
echo "📦 Installing dependencies..."
|
||||
poetry install --with dev
|
||||
echo ""
|
||||
fi
|
||||
|
||||
# Run tests
|
||||
echo "🚀 Running tests..."
|
||||
echo ""
|
||||
|
||||
poetry run pytest tests/test_graphql_endpoints.py -v -s "$@"
|
||||
|
||||
echo ""
|
||||
echo "✅ Test run complete"
|
||||
192
src/cluster.ts
Normal file
192
src/cluster.ts
Normal file
@@ -0,0 +1,192 @@
|
||||
import { latLngToCell, cellToLatLng } from 'h3-js'
|
||||
import { getDb } from './db.js'
|
||||
|
||||
const ZOOM_TO_RES: Record<number, number> = {
|
||||
0: 0, 1: 0, 2: 1, 3: 1, 4: 2, 5: 2,
|
||||
6: 3, 7: 3, 8: 4, 9: 4, 10: 5, 11: 5,
|
||||
12: 6, 13: 7, 14: 8, 15: 9, 16: 10,
|
||||
}
|
||||
|
||||
interface CachedNode {
|
||||
_key: string
|
||||
name?: string
|
||||
latitude?: number
|
||||
longitude?: number
|
||||
country?: string
|
||||
country_code?: string
|
||||
node_type?: string
|
||||
transport_types?: string[]
|
||||
}
|
||||
|
||||
const nodesCache = new Map<string, CachedNode[]>()
|
||||
|
||||
function fetchNodes(transportType?: string | null, nodeType?: string | null): CachedNode[] {
|
||||
const cacheKey = `nodes:${transportType || 'all'}:${nodeType || 'logistics'}`
|
||||
if (nodesCache.has(cacheKey)) return nodesCache.get(cacheKey)!
|
||||
|
||||
const db = getDb()
|
||||
let aql: string
|
||||
|
||||
if (nodeType === 'offer') {
|
||||
aql = `
|
||||
FOR node IN nodes
|
||||
FILTER node.node_type == 'offer'
|
||||
FILTER node.latitude != null AND node.longitude != null
|
||||
RETURN node
|
||||
`
|
||||
} else if (nodeType === 'supplier') {
|
||||
aql = `
|
||||
FOR offer IN nodes
|
||||
FILTER offer.node_type == 'offer'
|
||||
FILTER offer.supplier_uuid != null
|
||||
LET supplier = DOCUMENT(CONCAT('nodes/', offer.supplier_uuid))
|
||||
FILTER supplier != null
|
||||
FILTER supplier.latitude != null AND supplier.longitude != null
|
||||
COLLECT sup_uuid = offer.supplier_uuid INTO offers
|
||||
LET sup = DOCUMENT(CONCAT('nodes/', sup_uuid))
|
||||
RETURN {
|
||||
_key: sup_uuid,
|
||||
name: sup.name,
|
||||
latitude: sup.latitude,
|
||||
longitude: sup.longitude,
|
||||
country: sup.country,
|
||||
country_code: sup.country_code,
|
||||
node_type: 'supplier',
|
||||
offers_count: LENGTH(offers)
|
||||
}
|
||||
`
|
||||
} else {
|
||||
aql = `
|
||||
FOR node IN nodes
|
||||
FILTER node.node_type == 'logistics' OR node.node_type == null
|
||||
FILTER node.latitude != null AND node.longitude != null
|
||||
RETURN node
|
||||
`
|
||||
}
|
||||
|
||||
// arangojs query returns a cursor — we need async. Use a sync cache pattern with pre-fetching.
|
||||
// Since this is called from resolvers which are async, we'll use a different approach.
|
||||
// Store a promise instead.
|
||||
throw new Error('Use fetchNodesAsync instead')
|
||||
}
|
||||
|
||||
export async function fetchNodesAsync(transportType?: string | null, nodeType?: string | null): Promise<CachedNode[]> {
|
||||
const cacheKey = `nodes:${transportType || 'all'}:${nodeType || 'logistics'}`
|
||||
if (nodesCache.has(cacheKey)) return nodesCache.get(cacheKey)!
|
||||
|
||||
const db = getDb()
|
||||
let aql: string
|
||||
|
||||
if (nodeType === 'offer') {
|
||||
aql = `
|
||||
FOR node IN nodes
|
||||
FILTER node.node_type == 'offer'
|
||||
FILTER node.latitude != null AND node.longitude != null
|
||||
RETURN node
|
||||
`
|
||||
} else if (nodeType === 'supplier') {
|
||||
aql = `
|
||||
FOR offer IN nodes
|
||||
FILTER offer.node_type == 'offer'
|
||||
FILTER offer.supplier_uuid != null
|
||||
LET supplier = DOCUMENT(CONCAT('nodes/', offer.supplier_uuid))
|
||||
FILTER supplier != null
|
||||
FILTER supplier.latitude != null AND supplier.longitude != null
|
||||
COLLECT sup_uuid = offer.supplier_uuid INTO offers
|
||||
LET sup = DOCUMENT(CONCAT('nodes/', sup_uuid))
|
||||
RETURN {
|
||||
_key: sup_uuid,
|
||||
name: sup.name,
|
||||
latitude: sup.latitude,
|
||||
longitude: sup.longitude,
|
||||
country: sup.country,
|
||||
country_code: sup.country_code,
|
||||
node_type: 'supplier',
|
||||
offers_count: LENGTH(offers)
|
||||
}
|
||||
`
|
||||
} else {
|
||||
aql = `
|
||||
FOR node IN nodes
|
||||
FILTER node.node_type == 'logistics' OR node.node_type == null
|
||||
FILTER node.latitude != null AND node.longitude != null
|
||||
RETURN node
|
||||
`
|
||||
}
|
||||
|
||||
const cursor = await db.query(aql)
|
||||
let allNodes: CachedNode[] = await cursor.all()
|
||||
|
||||
if (transportType && (!nodeType || nodeType === 'logistics')) {
|
||||
allNodes = allNodes.filter(n => (n.transport_types || []).includes(transportType))
|
||||
}
|
||||
|
||||
nodesCache.set(cacheKey, allNodes)
|
||||
console.log(`Cached ${allNodes.length} nodes for ${cacheKey}`)
|
||||
return allNodes
|
||||
}
|
||||
|
||||
export interface ClusterPoint {
|
||||
id: string
|
||||
latitude: number
|
||||
longitude: number
|
||||
count: number
|
||||
expansion_zoom: number | null
|
||||
name: string | null
|
||||
}
|
||||
|
||||
export async function getClusteredNodes(
|
||||
west: number, south: number, east: number, north: number,
|
||||
zoom: number, transportType?: string | null, nodeType?: string | null,
|
||||
): Promise<ClusterPoint[]> {
|
||||
const resolution = ZOOM_TO_RES[Math.floor(zoom)] ?? 5
|
||||
const nodes = await fetchNodesAsync(transportType, nodeType)
|
||||
|
||||
if (!nodes.length) return []
|
||||
|
||||
const cells = new Map<string, CachedNode[]>()
|
||||
|
||||
for (const node of nodes) {
|
||||
const lat = node.latitude
|
||||
const lng = node.longitude
|
||||
if (lat == null || lng == null) continue
|
||||
if (lat < south || lat > north || lng < west || lng > east) continue
|
||||
|
||||
const cell = latLngToCell(lat, lng, resolution)
|
||||
if (!cells.has(cell)) cells.set(cell, [])
|
||||
cells.get(cell)!.push(node)
|
||||
}
|
||||
|
||||
const results: ClusterPoint[] = []
|
||||
|
||||
for (const [cell, nodesInCell] of cells) {
|
||||
if (nodesInCell.length === 1) {
|
||||
const node = nodesInCell[0]
|
||||
results.push({
|
||||
id: node._key,
|
||||
latitude: node.latitude!,
|
||||
longitude: node.longitude!,
|
||||
count: 1,
|
||||
expansion_zoom: null,
|
||||
name: node.name || null,
|
||||
})
|
||||
} else {
|
||||
const [lat, lng] = cellToLatLng(cell)
|
||||
results.push({
|
||||
id: `cluster-${cell}`,
|
||||
latitude: lat,
|
||||
longitude: lng,
|
||||
count: nodesInCell.length,
|
||||
expansion_zoom: Math.min(zoom + 2, 16),
|
||||
name: null,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
export function invalidateCache(): void {
|
||||
nodesCache.clear()
|
||||
console.log('Cluster cache invalidated')
|
||||
}
|
||||
27
src/db.ts
Normal file
27
src/db.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { Database } from 'arangojs'
|
||||
|
||||
const ARANGODB_URL = process.env.ARANGODB_INTERNAL_URL || 'http://localhost:8529'
|
||||
const ARANGODB_DATABASE = process.env.ARANGODB_DATABASE || 'optovia_maps'
|
||||
const ARANGODB_PASSWORD = process.env.ARANGODB_PASSWORD || ''
|
||||
|
||||
let _db: Database | null = null
|
||||
|
||||
export function getDb(): Database {
|
||||
if (!_db) {
|
||||
const url = ARANGODB_URL.startsWith('http') ? ARANGODB_URL : `http://${ARANGODB_URL}`
|
||||
_db = new Database({ url, databaseName: ARANGODB_DATABASE, auth: { username: 'root', password: ARANGODB_PASSWORD } })
|
||||
console.log(`Connected to ArangoDB: ${url}/${ARANGODB_DATABASE}`)
|
||||
}
|
||||
return _db
|
||||
}
|
||||
|
||||
export async function ensureGraph(): Promise<void> {
|
||||
const db = getDb()
|
||||
const graphs = await db.listGraphs()
|
||||
if (graphs.some(g => g.name === 'optovia_graph')) return
|
||||
|
||||
console.log('Creating graph: optovia_graph')
|
||||
await db.createGraph('optovia_graph', [
|
||||
{ collection: 'edges', from: ['nodes'], to: ['nodes'] },
|
||||
])
|
||||
}
|
||||
90
src/helpers.ts
Normal file
90
src/helpers.ts
Normal file
@@ -0,0 +1,90 @@
|
||||
/** Haversine distance in km. */
|
||||
export function distanceKm(lat1: number, lon1: number, lat2: number, lon2: number): number {
|
||||
const R = 6371
|
||||
const dLat = (lat2 - lat1) * Math.PI / 180
|
||||
const dLon = (lon2 - lon1) * Math.PI / 180
|
||||
const a =
|
||||
Math.sin(dLat / 2) ** 2 +
|
||||
Math.cos(lat1 * Math.PI / 180) * Math.cos(lat2 * Math.PI / 180) *
|
||||
Math.sin(dLon / 2) ** 2
|
||||
return R * 2 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a))
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export type ArangoDoc = Record<string, any>
|
||||
|
||||
export interface RouteStage {
|
||||
from_uuid: string | null
|
||||
from_name: string | null
|
||||
from_lat: number | null
|
||||
from_lon: number | null
|
||||
to_uuid: string | null
|
||||
to_name: string | null
|
||||
to_lat: number | null
|
||||
to_lon: number | null
|
||||
distance_km: number
|
||||
travel_time_seconds: number
|
||||
transport_type: string | null
|
||||
}
|
||||
|
||||
export interface RoutePath {
|
||||
total_distance_km: number
|
||||
total_time_seconds: number
|
||||
stages: RouteStage[]
|
||||
}
|
||||
|
||||
function buildStage(fromDoc: ArangoDoc | undefined, toDoc: ArangoDoc | undefined, transportType: string, edges: ArangoDoc[]): RouteStage {
|
||||
const distance = edges.reduce((s, e) => s + (e.distance_km || 0), 0)
|
||||
const time = edges.reduce((s, e) => s + (e.travel_time_seconds || 0), 0)
|
||||
return {
|
||||
from_uuid: fromDoc?._key ?? null,
|
||||
from_name: fromDoc?.name ?? null,
|
||||
from_lat: fromDoc?.latitude ?? null,
|
||||
from_lon: fromDoc?.longitude ?? null,
|
||||
to_uuid: toDoc?._key ?? null,
|
||||
to_name: toDoc?.name ?? null,
|
||||
to_lat: toDoc?.latitude ?? null,
|
||||
to_lon: toDoc?.longitude ?? null,
|
||||
distance_km: distance,
|
||||
travel_time_seconds: time,
|
||||
transport_type: transportType,
|
||||
}
|
||||
}
|
||||
|
||||
export function buildRouteFromEdges(pathEdges: [string, string, ArangoDoc][], nodeDocs: Map<string, ArangoDoc>): RoutePath | null {
|
||||
if (!pathEdges.length) return null
|
||||
|
||||
// Filter offer edges — not transport stages
|
||||
const filtered = pathEdges.filter(([, , e]) => e.transport_type !== 'offer')
|
||||
if (!filtered.length) return null
|
||||
|
||||
const stages: RouteStage[] = []
|
||||
let currentEdges: ArangoDoc[] = []
|
||||
let currentType: string | null = null
|
||||
let segmentStart: string | null = null
|
||||
|
||||
for (const [fromKey, , edge] of filtered) {
|
||||
const edgeType = edge.transport_type as string
|
||||
if (currentType === null) {
|
||||
currentType = edgeType
|
||||
currentEdges = [edge]
|
||||
segmentStart = fromKey
|
||||
} else if (edgeType === currentType) {
|
||||
currentEdges.push(edge)
|
||||
} else {
|
||||
stages.push(buildStage(nodeDocs.get(segmentStart!), nodeDocs.get(fromKey), currentType, currentEdges))
|
||||
currentType = edgeType
|
||||
currentEdges = [edge]
|
||||
segmentStart = fromKey
|
||||
}
|
||||
}
|
||||
|
||||
const lastTo = filtered[filtered.length - 1][1]
|
||||
stages.push(buildStage(nodeDocs.get(segmentStart!), nodeDocs.get(lastTo), currentType!, currentEdges))
|
||||
|
||||
return {
|
||||
total_distance_km: stages.reduce((s, st) => s + (st.distance_km || 0), 0),
|
||||
total_time_seconds: stages.reduce((s, st) => s + (st.travel_time_seconds || 0), 0),
|
||||
stages,
|
||||
}
|
||||
}
|
||||
33
src/index.ts
Normal file
33
src/index.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import express from 'express'
|
||||
import cors from 'cors'
|
||||
import { ApolloServer } from '@apollo/server'
|
||||
import { expressMiddleware } from '@apollo/server/express4'
|
||||
import * as Sentry from '@sentry/node'
|
||||
import { typeDefs, resolvers } from './schema.js'
|
||||
|
||||
const PORT = parseInt(process.env.PORT || '8000', 10)
|
||||
const SENTRY_DSN = process.env.SENTRY_DSN || ''
|
||||
|
||||
if (SENTRY_DSN) {
|
||||
Sentry.init({
|
||||
dsn: SENTRY_DSN,
|
||||
tracesSampleRate: 0.01,
|
||||
release: process.env.RELEASE_VERSION || '1.0.0',
|
||||
environment: process.env.ENVIRONMENT || 'production',
|
||||
})
|
||||
}
|
||||
|
||||
const app = express()
|
||||
app.use(cors({ origin: ['https://optovia.ru'], credentials: true }))
|
||||
|
||||
const server = new ApolloServer({ typeDefs, resolvers, introspection: true })
|
||||
await server.start()
|
||||
|
||||
app.use('/graphql/public', express.json(), expressMiddleware(server) as unknown as express.RequestHandler)
|
||||
|
||||
app.get('/health', (_, res) => { res.json({ status: 'ok' }) })
|
||||
|
||||
app.listen(PORT, '0.0.0.0', () => {
|
||||
console.log(`Geo server ready on port ${PORT}`)
|
||||
console.log(` /graphql/public - public (no auth)`)
|
||||
})
|
||||
1027
src/schema.ts
Normal file
1027
src/schema.ts
Normal file
File diff suppressed because it is too large
Load Diff
130
tests/README.md
130
tests/README.md
@@ -1,130 +0,0 @@
|
||||
# Geo Service Tests
|
||||
|
||||
Comprehensive test suite for all GraphQL endpoints in the geo service.
|
||||
|
||||
## Test Coverage
|
||||
|
||||
### Basic Endpoints (4 tests)
|
||||
- `test_products_query` - List all unique products
|
||||
- `test_nodes_query_basic` - List hubs/nodes without filters
|
||||
- `test_nodes_query_with_filters` - Filter nodes by transport type and country
|
||||
- `test_nodes_query_with_bounds` - Filter nodes by geographic bounds
|
||||
- `test_clustered_nodes_query` - Map clustering for visualization
|
||||
|
||||
### Nearest Endpoints (6 tests)
|
||||
- `test_nearest_hubs` - Find hubs near coordinates
|
||||
- `test_nearest_hubs_with_product_filter` - Find hubs with specific product
|
||||
- `test_nearest_offers` - Find offers near coordinates
|
||||
- `test_nearest_offers_with_product_filter` - Find offers for specific product
|
||||
- `test_nearest_suppliers` - Find suppliers near coordinates
|
||||
- `test_nearest_suppliers_with_product_filter` - Find suppliers with product
|
||||
|
||||
### Routing Endpoints (3 tests)
|
||||
- `test_route_to_coordinate` - Multi-hop route from offer to destination
|
||||
- `test_auto_route` - Road route between coordinates (requires OSRM)
|
||||
- `test_rail_route` - Rail route between coordinates
|
||||
|
||||
### Edge Cases (3 tests)
|
||||
- `test_nearest_with_zero_radius` - Very small search radius
|
||||
- `test_invalid_coordinates` - Invalid lat/lon values
|
||||
- `test_nonexistent_uuid` - Non-existent offer UUID
|
||||
|
||||
**Total: 16 tests covering 8 main endpoints**
|
||||
|
||||
## Running Tests
|
||||
|
||||
### Local Testing (against production)
|
||||
|
||||
```bash
|
||||
cd backends/geo
|
||||
poetry install
|
||||
poetry run pytest tests/test_graphql_endpoints.py -v
|
||||
```
|
||||
|
||||
### Testing against different endpoint
|
||||
|
||||
```bash
|
||||
export TEST_GEO_URL=https://geo-staging.example.com/graphql/public/
|
||||
poetry run pytest tests/test_graphql_endpoints.py -v
|
||||
```
|
||||
|
||||
### Run specific test class
|
||||
|
||||
```bash
|
||||
poetry run pytest tests/test_graphql_endpoints.py::TestNearestEndpoints -v
|
||||
```
|
||||
|
||||
### Run single test
|
||||
|
||||
```bash
|
||||
poetry run pytest tests/test_graphql_endpoints.py::TestNearestEndpoints::test_nearest_offers -v
|
||||
```
|
||||
|
||||
### Show print output
|
||||
|
||||
```bash
|
||||
poetry run pytest tests/test_graphql_endpoints.py -v -s
|
||||
```
|
||||
|
||||
## CI Integration
|
||||
|
||||
Tests should be run on each deployment:
|
||||
|
||||
```yaml
|
||||
# .gitea/workflows/test.yml
|
||||
- name: Run geo endpoint tests
|
||||
run: |
|
||||
cd backends/geo
|
||||
poetry install
|
||||
export TEST_GEO_URL=https://geo.optovia.ru/graphql/public/
|
||||
poetry run pytest tests/test_graphql_endpoints.py -v
|
||||
```
|
||||
|
||||
## Test Data Requirements
|
||||
|
||||
Tests use real data from the production/staging database. Required data:
|
||||
- At least one product in `products` collection
|
||||
- At least one hub node with coordinates
|
||||
- At least one offer with coordinates
|
||||
- Graph edges for routing tests
|
||||
|
||||
## Expected Test Results
|
||||
|
||||
All tests should pass on production environment. Some tests may be skipped if:
|
||||
- No products exist: `test_nearest_hubs_with_product_filter`, `test_nearest_offers_with_product_filter`
|
||||
- No offers exist: `test_route_to_coordinate`
|
||||
- OSRM not configured: `test_auto_route`, `test_rail_route` (warnings, not failures)
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### All nearest* tests return 0 results
|
||||
|
||||
Check that nodes collection has documents with:
|
||||
- Valid `latitude` and `longitude` fields (not null)
|
||||
- Correct `node_type` field (`'hub'`, `'offer'`, `'supplier'`)
|
||||
|
||||
Query ArangoDB directly:
|
||||
|
||||
```javascript
|
||||
// Count offers with coordinates
|
||||
db._query(`
|
||||
FOR node IN nodes
|
||||
FILTER node.node_type == 'offer'
|
||||
FILTER node.latitude != null AND node.longitude != null
|
||||
RETURN node
|
||||
`).toArray().length
|
||||
```
|
||||
|
||||
### Test failures with 400 errors
|
||||
|
||||
Check GraphQL schema matches test queries. GraphQL validation errors indicate:
|
||||
- Missing required arguments
|
||||
- Wrong argument types
|
||||
- Invalid field names
|
||||
|
||||
### Connection errors
|
||||
|
||||
Verify:
|
||||
- TEST_GEO_URL points to correct endpoint
|
||||
- Endpoint is accessible (not behind VPN/firewall)
|
||||
- GraphQL endpoint is `/graphql/public/` not `/graphql/`
|
||||
@@ -1 +0,0 @@
|
||||
# Geo service tests
|
||||
Binary file not shown.
Binary file not shown.
@@ -1,844 +0,0 @@
|
||||
"""
|
||||
Comprehensive tests for all Geo GraphQL endpoints.
|
||||
Tests use real API calls to production/staging GraphQL endpoint.
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import requests
|
||||
import pytest
|
||||
|
||||
# GraphQL endpoint - override with TEST_GEO_URL env var
|
||||
GEO_URL = os.getenv('TEST_GEO_URL', 'https://geo.optovia.ru/graphql/public/')
|
||||
|
||||
|
||||
class TestBasicEndpoints:
|
||||
"""Test basic list/query endpoints."""
|
||||
|
||||
def test_products_query(self):
|
||||
"""Test products query - should return list of unique products."""
|
||||
query = """
|
||||
query GetProducts {
|
||||
products {
|
||||
uuid
|
||||
name
|
||||
offersCount
|
||||
}
|
||||
}
|
||||
"""
|
||||
response = requests.post(GEO_URL, json={'query': query})
|
||||
assert response.status_code == 200, f"Expected 200, got {response.status_code}: {response.text}"
|
||||
|
||||
data = response.json()
|
||||
assert 'errors' not in data, f"GraphQL errors: {data.get('errors')}"
|
||||
assert 'data' in data
|
||||
assert 'products' in data['data']
|
||||
|
||||
products = data['data']['products']
|
||||
assert isinstance(products, list), "products should be a list"
|
||||
if len(products) > 0:
|
||||
product = products[0]
|
||||
assert 'uuid' in product
|
||||
assert 'name' in product
|
||||
assert 'offersCount' in product
|
||||
assert isinstance(product['offersCount'], int)
|
||||
|
||||
print(f"✓ products query: {len(products)} products found")
|
||||
|
||||
def test_nodes_query_basic(self):
|
||||
"""Test nodes query without filters."""
|
||||
query = """
|
||||
query GetNodes($limit: Int, $offset: Int) {
|
||||
nodes(limit: $limit, offset: $offset) {
|
||||
uuid
|
||||
name
|
||||
latitude
|
||||
longitude
|
||||
country
|
||||
transportTypes
|
||||
}
|
||||
nodesCount
|
||||
}
|
||||
"""
|
||||
variables = {'limit': 10, 'offset': 0}
|
||||
|
||||
response = requests.post(GEO_URL, json={'query': query, 'variables': variables})
|
||||
assert response.status_code == 200, f"Expected 200, got {response.status_code}: {response.text}"
|
||||
|
||||
data = response.json()
|
||||
assert 'errors' not in data, f"GraphQL errors: {data.get('errors')}"
|
||||
|
||||
nodes = data['data']['nodes']
|
||||
count = data['data']['nodesCount']
|
||||
|
||||
assert isinstance(nodes, list)
|
||||
assert isinstance(count, int)
|
||||
assert count > 0, "Should have at least some nodes in database"
|
||||
assert len(nodes) <= 10, "Should respect limit"
|
||||
|
||||
if len(nodes) > 0:
|
||||
node = nodes[0]
|
||||
assert 'uuid' in node
|
||||
assert 'name' in node
|
||||
# Coordinates might be null for some nodes
|
||||
assert 'latitude' in node
|
||||
assert 'longitude' in node
|
||||
|
||||
print(f"✓ nodes query: {len(nodes)}/{count} nodes found")
|
||||
|
||||
def test_nodes_query_with_filters(self):
|
||||
"""Test nodes query with transport type and country filters."""
|
||||
query = """
|
||||
query GetNodes($transportType: String, $country: String, $limit: Int) {
|
||||
nodes(transportType: $transportType, country: $country, limit: $limit) {
|
||||
uuid
|
||||
name
|
||||
country
|
||||
transportTypes
|
||||
}
|
||||
nodesCount(transportType: $transportType, country: $country)
|
||||
}
|
||||
"""
|
||||
variables = {'transportType': 'sea', 'limit': 5}
|
||||
|
||||
response = requests.post(GEO_URL, json={'query': query, 'variables': variables})
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert 'errors' not in data, f"GraphQL errors: {data.get('errors')}"
|
||||
|
||||
nodes = data['data']['nodes']
|
||||
# All nodes should have 'sea' in their transportTypes
|
||||
for node in nodes:
|
||||
if node.get('transportTypes'):
|
||||
assert 'sea' in node['transportTypes'], f"Node {node['uuid']} missing 'sea' transport type"
|
||||
|
||||
print(f"✓ nodes query with filters: {len(nodes)} sea nodes found")
|
||||
|
||||
def test_nodes_query_with_bounds(self):
|
||||
"""Test nodes query with geographic bounds."""
|
||||
query = """
|
||||
query GetNodes($west: Float, $south: Float, $east: Float, $north: Float, $limit: Int) {
|
||||
nodes(west: $west, south: $south, east: $east, north: $north, limit: $limit) {
|
||||
uuid
|
||||
name
|
||||
latitude
|
||||
longitude
|
||||
}
|
||||
}
|
||||
"""
|
||||
# Bounds for central Europe
|
||||
variables = {
|
||||
'west': 5.0,
|
||||
'south': 45.0,
|
||||
'east': 15.0,
|
||||
'north': 55.0,
|
||||
'limit': 20
|
||||
}
|
||||
|
||||
response = requests.post(GEO_URL, json={'query': query, 'variables': variables})
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert 'errors' not in data, f"GraphQL errors: {data.get('errors')}"
|
||||
|
||||
nodes = data['data']['nodes']
|
||||
# Verify all nodes are within bounds
|
||||
for node in nodes:
|
||||
if node.get('latitude') and node.get('longitude'):
|
||||
lat = float(node['latitude'])
|
||||
lon = float(node['longitude'])
|
||||
assert variables['south'] <= lat <= variables['north'], \
|
||||
f"Node {node['uuid']} latitude {lat} outside bounds"
|
||||
assert variables['west'] <= lon <= variables['east'], \
|
||||
f"Node {node['uuid']} longitude {lon} outside bounds"
|
||||
|
||||
print(f"✓ nodes with bounds: {len(nodes)} nodes in central Europe")
|
||||
|
||||
def test_clustered_nodes_query(self):
|
||||
"""Test clusteredNodes query for map clustering."""
|
||||
query = """
|
||||
query GetClusteredNodes($west: Float!, $south: Float!, $east: Float!, $north: Float!, $zoom: Int!) {
|
||||
clusteredNodes(west: $west, south: $south, east: $east, north: $north, zoom: $zoom) {
|
||||
id
|
||||
latitude
|
||||
longitude
|
||||
count
|
||||
expansionZoom
|
||||
name
|
||||
}
|
||||
}
|
||||
"""
|
||||
# World view
|
||||
variables = {
|
||||
'west': -180.0,
|
||||
'south': -90.0,
|
||||
'east': 180.0,
|
||||
'north': 90.0,
|
||||
'zoom': 2
|
||||
}
|
||||
|
||||
response = requests.post(GEO_URL, json={'query': query, 'variables': variables})
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert 'errors' not in data, f"GraphQL errors: {data.get('errors')}"
|
||||
|
||||
clusters = data['data']['clusteredNodes']
|
||||
assert isinstance(clusters, list)
|
||||
assert len(clusters) > 0, "Should have clusters at zoom level 2"
|
||||
|
||||
for cluster in clusters:
|
||||
assert 'id' in cluster
|
||||
assert 'latitude' in cluster
|
||||
assert 'longitude' in cluster
|
||||
assert 'count' in cluster
|
||||
assert cluster['count'] >= 1
|
||||
|
||||
print(f"✓ clusteredNodes: {len(clusters)} clusters/points at zoom 2")
|
||||
|
||||
|
||||
class TestNearestEndpoints:
|
||||
"""Test new coordinate-based 'nearest' endpoints."""
|
||||
|
||||
def test_nearest_hubs(self):
|
||||
"""Test nearestHubs query - find hubs near coordinates."""
|
||||
query = """
|
||||
query NearestHubs($lat: Float!, $lon: Float!, $radius: Float, $limit: Int) {
|
||||
nearestHubs(lat: $lat, lon: $lon, radius: $radius, limit: $limit) {
|
||||
uuid
|
||||
name
|
||||
latitude
|
||||
longitude
|
||||
country
|
||||
transportTypes
|
||||
distanceKm
|
||||
}
|
||||
}
|
||||
"""
|
||||
# Rotterdam coordinates (major European port)
|
||||
variables = {
|
||||
'lat': 51.9244,
|
||||
'lon': 4.4777,
|
||||
'radius': 200,
|
||||
'limit': 5
|
||||
}
|
||||
|
||||
response = requests.post(GEO_URL, json={'query': query, 'variables': variables})
|
||||
assert response.status_code == 200, f"Status: {response.status_code}, Body: {response.text}"
|
||||
|
||||
data = response.json()
|
||||
assert 'errors' not in data, f"GraphQL errors: {data.get('errors')}"
|
||||
|
||||
hubs = data['data']['nearestHubs']
|
||||
assert isinstance(hubs, list)
|
||||
|
||||
# Should find some hubs in 200km radius of Rotterdam
|
||||
if len(hubs) > 0:
|
||||
hub = hubs[0]
|
||||
assert 'uuid' in hub
|
||||
assert 'name' in hub
|
||||
assert 'distanceKm' in hub
|
||||
assert hub['distanceKm'] <= 200, f"Hub {hub['uuid']} distance {hub['distanceKm']} exceeds radius"
|
||||
|
||||
# Verify hubs are sorted by distance
|
||||
distances = [h['distanceKm'] for h in hubs]
|
||||
assert distances == sorted(distances), "Hubs should be sorted by distance"
|
||||
|
||||
print(f"✓ nearestHubs: {len(hubs)} hubs near Rotterdam")
|
||||
|
||||
def test_nearest_hubs_with_product_filter(self):
|
||||
"""Test nearestHubs with product filter."""
|
||||
# First get a product UUID
|
||||
products_query = "query { products { uuid } }"
|
||||
prod_response = requests.post(GEO_URL, json={'query': products_query})
|
||||
products = prod_response.json()['data']['products']
|
||||
|
||||
if not products:
|
||||
pytest.skip("No products in database")
|
||||
|
||||
product_uuid = products[0]['uuid']
|
||||
|
||||
query = """
|
||||
query NearestHubs($lat: Float!, $lon: Float!, $radius: Float, $productUuid: String) {
|
||||
nearestHubs(lat: $lat, lon: $lon, radius: $radius, productUuid: $productUuid) {
|
||||
uuid
|
||||
name
|
||||
distanceKm
|
||||
}
|
||||
}
|
||||
"""
|
||||
variables = {
|
||||
'lat': 50.0,
|
||||
'lon': 10.0,
|
||||
'radius': 1000,
|
||||
'productUuid': product_uuid
|
||||
}
|
||||
|
||||
response = requests.post(GEO_URL, json={'query': query, 'variables': variables})
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert 'errors' not in data, f"GraphQL errors: {data.get('errors')}"
|
||||
|
||||
hubs = data['data']['nearestHubs']
|
||||
print(f"✓ nearestHubs with product filter: {len(hubs)} hubs for product {product_uuid[:8]}")
|
||||
|
||||
def test_nearest_offers(self):
|
||||
"""Test nearestOffers query - find offers near coordinates."""
|
||||
query = """
|
||||
query NearestOffers($lat: Float!, $lon: Float!, $radius: Float, $limit: Int) {
|
||||
nearestOffers(lat: $lat, lon: $lon, radius: $radius, limit: $limit) {
|
||||
uuid
|
||||
productUuid
|
||||
productName
|
||||
latitude
|
||||
longitude
|
||||
pricePerUnit
|
||||
currency
|
||||
distanceKm
|
||||
}
|
||||
}
|
||||
"""
|
||||
# Central Europe
|
||||
variables = {
|
||||
'lat': 50.0,
|
||||
'lon': 10.0,
|
||||
'radius': 500,
|
||||
'limit': 10
|
||||
}
|
||||
|
||||
response = requests.post(GEO_URL, json={'query': query, 'variables': variables})
|
||||
assert response.status_code == 200, f"Status: {response.status_code}, Body: {response.text}"
|
||||
|
||||
data = response.json()
|
||||
assert 'errors' not in data, f"GraphQL errors: {data.get('errors')}"
|
||||
|
||||
offers = data['data']['nearestOffers']
|
||||
assert isinstance(offers, list)
|
||||
|
||||
if len(offers) > 0:
|
||||
offer = offers[0]
|
||||
assert 'uuid' in offer
|
||||
assert 'productUuid' in offer
|
||||
assert 'distanceKm' in offer
|
||||
assert offer['distanceKm'] <= 500
|
||||
|
||||
# Verify offers are sorted by distance
|
||||
distances = [o['distanceKm'] for o in offers]
|
||||
assert distances == sorted(distances), "Offers should be sorted by distance"
|
||||
|
||||
print(f"✓ nearestOffers: {len(offers)} offers in Central Europe")
|
||||
|
||||
def test_nearest_offers_with_product_filter(self):
|
||||
"""Test nearestOffers with product UUID filter."""
|
||||
# First get a product UUID
|
||||
products_query = "query { products { uuid name } }"
|
||||
prod_response = requests.post(GEO_URL, json={'query': products_query})
|
||||
products = prod_response.json()['data']['products']
|
||||
|
||||
if not products:
|
||||
pytest.skip("No products in database")
|
||||
|
||||
product_uuid = products[0]['uuid']
|
||||
product_name = products[0]['name']
|
||||
|
||||
query = """
|
||||
query NearestOffers($lat: Float!, $lon: Float!, $radius: Float, $productUuid: String) {
|
||||
nearestOffers(lat: $lat, lon: $lon, radius: $radius, productUuid: $productUuid) {
|
||||
uuid
|
||||
productUuid
|
||||
productName
|
||||
distanceKm
|
||||
}
|
||||
}
|
||||
"""
|
||||
# Global search with large radius
|
||||
variables = {
|
||||
'lat': 0.0,
|
||||
'lon': 0.0,
|
||||
'radius': 20000,
|
||||
'productUuid': product_uuid
|
||||
}
|
||||
|
||||
response = requests.post(GEO_URL, json={'query': query, 'variables': variables})
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert 'errors' not in data, f"GraphQL errors: {data.get('errors')}"
|
||||
|
||||
offers = data['data']['nearestOffers']
|
||||
# All offers should be for the requested product
|
||||
for offer in offers:
|
||||
assert offer['productUuid'] == product_uuid, \
|
||||
f"Offer {offer['uuid']} has wrong product UUID"
|
||||
|
||||
print(f"✓ nearestOffers with product: {len(offers)} offers for '{product_name}'")
|
||||
|
||||
def test_nearest_suppliers(self):
|
||||
"""Test nearestSuppliers query - find suppliers near coordinates."""
|
||||
query = """
|
||||
query NearestSuppliers($lat: Float!, $lon: Float!, $radius: Float, $limit: Int) {
|
||||
nearestSuppliers(lat: $lat, lon: $lon, radius: $radius, limit: $limit) {
|
||||
uuid
|
||||
name
|
||||
latitude
|
||||
longitude
|
||||
distanceKm
|
||||
}
|
||||
}
|
||||
"""
|
||||
variables = {
|
||||
'lat': 52.52, # Berlin
|
||||
'lon': 13.405,
|
||||
'radius': 300,
|
||||
'limit': 10
|
||||
}
|
||||
|
||||
response = requests.post(GEO_URL, json={'query': query, 'variables': variables})
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert 'errors' not in data, f"GraphQL errors: {data.get('errors')}"
|
||||
|
||||
suppliers = data['data']['nearestSuppliers']
|
||||
assert isinstance(suppliers, list)
|
||||
|
||||
if len(suppliers) > 0:
|
||||
supplier = suppliers[0]
|
||||
assert 'uuid' in supplier
|
||||
assert 'name' in supplier
|
||||
assert 'distanceKm' in supplier
|
||||
assert supplier['distanceKm'] <= 300
|
||||
|
||||
# Verify sorted by distance
|
||||
distances = [s['distanceKm'] for s in suppliers]
|
||||
assert distances == sorted(distances)
|
||||
|
||||
print(f"✓ nearestSuppliers: {len(suppliers)} suppliers near Berlin")
|
||||
|
||||
def test_nearest_suppliers_with_product_filter(self):
|
||||
"""Test nearestSuppliers with product filter."""
|
||||
# Get a product UUID
|
||||
products_query = "query { products { uuid } }"
|
||||
prod_response = requests.post(GEO_URL, json={'query': products_query})
|
||||
products = prod_response.json()['data']['products']
|
||||
|
||||
if not products:
|
||||
pytest.skip("No products in database")
|
||||
|
||||
product_uuid = products[0]['uuid']
|
||||
|
||||
query = """
|
||||
query NearestSuppliers($lat: Float!, $lon: Float!, $radius: Float, $productUuid: String) {
|
||||
nearestSuppliers(lat: $lat, lon: $lon, radius: $radius, productUuid: $productUuid) {
|
||||
uuid
|
||||
name
|
||||
distanceKm
|
||||
}
|
||||
}
|
||||
"""
|
||||
variables = {
|
||||
'lat': 50.0,
|
||||
'lon': 10.0,
|
||||
'radius': 1000,
|
||||
'productUuid': product_uuid
|
||||
}
|
||||
|
||||
response = requests.post(GEO_URL, json={'query': query, 'variables': variables})
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert 'errors' not in data, f"GraphQL errors: {data.get('errors')}"
|
||||
|
||||
suppliers = data['data']['nearestSuppliers']
|
||||
print(f"✓ nearestSuppliers with product: {len(suppliers)} suppliers for product {product_uuid[:8]}")
|
||||
|
||||
def test_nearest_offers_with_hub_uuid(self):
|
||||
"""Test nearestOffers with hubUuid - should return offers with calculated routes.
|
||||
|
||||
This tests the fix for the bug where resolve_offer_to_hub was called incorrectly
|
||||
(self was None in graphene resolvers).
|
||||
"""
|
||||
# First, get a hub UUID from the database
|
||||
hubs_query = """
|
||||
query {
|
||||
nearestHubs(lat: 0, lon: 0, radius: 20000, limit: 5) {
|
||||
uuid
|
||||
name
|
||||
latitude
|
||||
longitude
|
||||
}
|
||||
}
|
||||
"""
|
||||
hubs_response = requests.post(GEO_URL, json={'query': hubs_query})
|
||||
hubs_data = hubs_response.json()
|
||||
|
||||
if not hubs_data.get('data', {}).get('nearestHubs'):
|
||||
pytest.skip("No hubs found in database")
|
||||
|
||||
hub = hubs_data['data']['nearestHubs'][0]
|
||||
hub_uuid = hub['uuid']
|
||||
hub_lat = hub['latitude']
|
||||
hub_lon = hub['longitude']
|
||||
|
||||
# Now test nearestOffers with this hub UUID
|
||||
query = """
|
||||
query NearestOffers($lat: Float!, $lon: Float!, $radius: Float, $hubUuid: String, $limit: Int) {
|
||||
nearestOffers(lat: $lat, lon: $lon, radius: $radius, hubUuid: $hubUuid, limit: $limit) {
|
||||
uuid
|
||||
productUuid
|
||||
productName
|
||||
supplierUuid
|
||||
supplierName
|
||||
latitude
|
||||
longitude
|
||||
pricePerUnit
|
||||
currency
|
||||
distanceKm
|
||||
routes {
|
||||
totalDistanceKm
|
||||
totalTimeSeconds
|
||||
stages {
|
||||
fromUuid
|
||||
fromName
|
||||
fromLat
|
||||
fromLon
|
||||
toUuid
|
||||
toName
|
||||
toLat
|
||||
toLon
|
||||
distanceKm
|
||||
travelTimeSeconds
|
||||
transportType
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
# Search around the hub location with large radius
|
||||
variables = {
|
||||
'lat': float(hub_lat) if hub_lat else 0.0,
|
||||
'lon': float(hub_lon) if hub_lon else 0.0,
|
||||
'radius': 5000, # 5000km radius to find offers
|
||||
'hubUuid': hub_uuid,
|
||||
'limit': 10
|
||||
}
|
||||
|
||||
response = requests.post(GEO_URL, json={'query': query, 'variables': variables})
|
||||
assert response.status_code == 200, f"Status: {response.status_code}, Body: {response.text}"
|
||||
|
||||
data = response.json()
|
||||
assert 'errors' not in data, f"GraphQL errors: {data.get('errors')}"
|
||||
|
||||
offers = data['data']['nearestOffers']
|
||||
assert isinstance(offers, list), "nearestOffers should return a list"
|
||||
|
||||
# The key assertion: with hubUuid, we should get offers with routes calculated
|
||||
# (This was the bug - resolve_offer_to_hub was failing silently)
|
||||
print(f"✓ nearestOffers with hubUuid: {len(offers)} offers for hub '{hub['name']}'")
|
||||
|
||||
if len(offers) > 0:
|
||||
# Check first offer structure
|
||||
offer = offers[0]
|
||||
assert 'uuid' in offer
|
||||
assert 'productUuid' in offer
|
||||
assert 'routes' in offer, "Offer should have routes field when hubUuid is provided"
|
||||
|
||||
# If routes exist, verify structure
|
||||
if offer['routes'] and len(offer['routes']) > 0:
|
||||
route = offer['routes'][0]
|
||||
assert 'totalDistanceKm' in route
|
||||
assert 'totalTimeSeconds' in route
|
||||
assert 'stages' in route
|
||||
|
||||
if route['stages'] and len(route['stages']) > 0:
|
||||
stage = route['stages'][0]
|
||||
assert 'fromUuid' in stage
|
||||
assert 'toUuid' in stage
|
||||
assert 'transportType' in stage
|
||||
assert 'distanceKm' in stage
|
||||
print(f" Route has {len(route['stages'])} stages, total {route['totalDistanceKm']:.1f}km")
|
||||
|
||||
def test_nearest_offers_with_hub_and_product(self):
|
||||
"""Test nearestOffers with both hubUuid and productUuid filters."""
|
||||
# Get a product and hub
|
||||
products_query = "query { products { uuid name } }"
|
||||
prod_response = requests.post(GEO_URL, json={'query': products_query})
|
||||
products = prod_response.json().get('data', {}).get('products', [])
|
||||
|
||||
hubs_query = """
|
||||
query {
|
||||
nearestHubs(lat: 0, lon: 0, radius: 20000, limit: 1) {
|
||||
uuid
|
||||
name
|
||||
latitude
|
||||
longitude
|
||||
}
|
||||
}
|
||||
"""
|
||||
hubs_response = requests.post(GEO_URL, json={'query': hubs_query})
|
||||
hubs = hubs_response.json().get('data', {}).get('nearestHubs', [])
|
||||
|
||||
if not products or not hubs:
|
||||
pytest.skip("No products or hubs in database")
|
||||
|
||||
product = products[0]
|
||||
hub = hubs[0]
|
||||
|
||||
query = """
|
||||
query NearestOffers($lat: Float!, $lon: Float!, $radius: Float, $productUuid: String, $hubUuid: String, $limit: Int) {
|
||||
nearestOffers(lat: $lat, lon: $lon, radius: $radius, productUuid: $productUuid, hubUuid: $hubUuid, limit: $limit) {
|
||||
uuid
|
||||
productUuid
|
||||
productName
|
||||
routes {
|
||||
totalDistanceKm
|
||||
stages {
|
||||
transportType
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
variables = {
|
||||
'lat': float(hub['latitude']) if hub['latitude'] else 0.0,
|
||||
'lon': float(hub['longitude']) if hub['longitude'] else 0.0,
|
||||
'radius': 10000,
|
||||
'productUuid': product['uuid'],
|
||||
'hubUuid': hub['uuid'],
|
||||
'limit': 5 # Limit to avoid timeout when calculating routes
|
||||
}
|
||||
|
||||
# Use longer timeout as route calculation takes time
|
||||
response = requests.post(GEO_URL, json={'query': query, 'variables': variables}, timeout=120)
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert 'errors' not in data, f"GraphQL errors: {data.get('errors')}"
|
||||
|
||||
offers = data['data']['nearestOffers']
|
||||
|
||||
# All offers should be for the requested product
|
||||
for offer in offers:
|
||||
assert offer['productUuid'] == product['uuid'], \
|
||||
f"Offer has wrong productUuid: {offer['productUuid']} != {product['uuid']}"
|
||||
|
||||
print(f"✓ nearestOffers with hub+product: {len(offers)} offers for '{product['name']}' via hub '{hub['name']}'")
|
||||
|
||||
|
||||
class TestRoutingEndpoints:
|
||||
"""Test routing and pathfinding endpoints."""
|
||||
|
||||
def test_route_to_coordinate(self):
|
||||
"""Test routeToCoordinate query - find route from offer to coordinates."""
|
||||
# First, get an offer UUID with coordinates
|
||||
offers_query = """
|
||||
query {
|
||||
nearestOffers(lat: 50.0, lon: 10.0, radius: 1000, limit: 1) {
|
||||
uuid
|
||||
latitude
|
||||
longitude
|
||||
}
|
||||
}
|
||||
"""
|
||||
offers_response = requests.post(GEO_URL, json={'query': offers_query})
|
||||
offers_data = offers_response.json()
|
||||
|
||||
if not offers_data.get('data', {}).get('nearestOffers'):
|
||||
pytest.skip("No offers found for routing test")
|
||||
|
||||
offer = offers_data['data']['nearestOffers'][0]
|
||||
offer_uuid = offer['uuid']
|
||||
|
||||
query = """
|
||||
query RouteToCoordinate($offerUuid: String!, $lat: Float!, $lon: Float!) {
|
||||
routeToCoordinate(offerUuid: $offerUuid, lat: $lat, lon: $lon) {
|
||||
offerUuid
|
||||
distanceKm
|
||||
routes {
|
||||
totalDistanceKm
|
||||
totalTimeSeconds
|
||||
stages {
|
||||
fromUuid
|
||||
fromName
|
||||
fromLat
|
||||
fromLon
|
||||
toUuid
|
||||
toName
|
||||
toLat
|
||||
toLon
|
||||
distanceKm
|
||||
travelTimeSeconds
|
||||
transportType
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
# Route to Amsterdam
|
||||
variables = {
|
||||
'offerUuid': offer_uuid,
|
||||
'lat': 52.3676,
|
||||
'lon': 4.9041
|
||||
}
|
||||
|
||||
response = requests.post(GEO_URL, json={'query': query, 'variables': variables})
|
||||
assert response.status_code == 200, f"Status: {response.status_code}, Body: {response.text}"
|
||||
|
||||
data = response.json()
|
||||
assert 'errors' not in data, f"GraphQL errors: {data.get('errors')}"
|
||||
|
||||
route_data = data['data']['routeToCoordinate']
|
||||
assert route_data is not None
|
||||
assert route_data['offerUuid'] == offer_uuid
|
||||
|
||||
if route_data.get('routes'):
|
||||
route = route_data['routes'][0]
|
||||
assert 'totalDistanceKm' in route
|
||||
assert 'totalTimeSeconds' in route
|
||||
assert 'stages' in route
|
||||
assert len(route['stages']) > 0
|
||||
|
||||
# Verify each stage has required fields
|
||||
for stage in route['stages']:
|
||||
assert 'fromUuid' in stage
|
||||
assert 'toUuid' in stage
|
||||
assert 'distanceKm' in stage
|
||||
assert 'transportType' in stage
|
||||
|
||||
print(f"✓ routeToCoordinate: {len(route['stages'])} stages, {route['totalDistanceKm']:.1f}km")
|
||||
else:
|
||||
print(f"✓ routeToCoordinate: no routes found (offer may be isolated)")
|
||||
|
||||
def test_auto_route(self):
|
||||
"""Test autoRoute query - calculate road route between coordinates."""
|
||||
query = """
|
||||
query AutoRoute($fromLat: Float!, $fromLon: Float!, $toLat: Float!, $toLon: Float!) {
|
||||
autoRoute(fromLat: $fromLat, fromLon: $fromLon, toLat: $toLat, toLon: $toLon) {
|
||||
distanceKm
|
||||
geometry
|
||||
}
|
||||
}
|
||||
"""
|
||||
# Route from Amsterdam to Rotterdam
|
||||
variables = {
|
||||
'fromLat': 52.3676,
|
||||
'fromLon': 4.9041,
|
||||
'toLat': 51.9244,
|
||||
'toLon': 4.4777
|
||||
}
|
||||
|
||||
response = requests.post(GEO_URL, json={'query': query, 'variables': variables})
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
# Auto route might not be available (requires OSRM service)
|
||||
if 'errors' in data:
|
||||
print("⚠ autoRoute: service not available (expected if OSRM not configured)")
|
||||
else:
|
||||
route = data['data']['autoRoute']
|
||||
if route:
|
||||
assert 'distanceKm' in route
|
||||
assert route['distanceKm'] > 0
|
||||
print(f"✓ autoRoute: {route['distanceKm']:.1f}km Amsterdam → Rotterdam")
|
||||
else:
|
||||
print("⚠ autoRoute: returned null")
|
||||
|
||||
def test_rail_route(self):
|
||||
"""Test railRoute query - calculate rail route between coordinates."""
|
||||
query = """
|
||||
query RailRoute($fromLat: Float!, $fromLon: Float!, $toLat: Float!, $toLon: Float!) {
|
||||
railRoute(fromLat: $fromLat, fromLon: $fromLon, toLat: $toLat, toLon: $toLon) {
|
||||
distanceKm
|
||||
geometry
|
||||
}
|
||||
}
|
||||
"""
|
||||
variables = {
|
||||
'fromLat': 52.3676,
|
||||
'fromLon': 4.9041,
|
||||
'toLat': 51.9244,
|
||||
'toLon': 4.4777
|
||||
}
|
||||
|
||||
response = requests.post(GEO_URL, json={'query': query, 'variables': variables})
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
# Rail route might not be available
|
||||
if 'errors' in data:
|
||||
print("⚠ railRoute: service not available")
|
||||
else:
|
||||
route = data['data']['railRoute']
|
||||
if route:
|
||||
assert 'distanceKm' in route
|
||||
print(f"✓ railRoute: {route['distanceKm']:.1f}km")
|
||||
else:
|
||||
print("⚠ railRoute: returned null")
|
||||
|
||||
|
||||
class TestEdgeCases:
|
||||
"""Test edge cases and error handling."""
|
||||
|
||||
def test_nearest_with_zero_radius(self):
|
||||
"""Test nearest queries with very small radius."""
|
||||
query = """
|
||||
query NearestHubs($lat: Float!, $lon: Float!, $radius: Float) {
|
||||
nearestHubs(lat: $lat, lon: $lon, radius: $radius) {
|
||||
uuid
|
||||
}
|
||||
}
|
||||
"""
|
||||
variables = {'lat': 50.0, 'lon': 10.0, 'radius': 0.001}
|
||||
|
||||
response = requests.post(GEO_URL, json={'query': query, 'variables': variables})
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
assert 'errors' not in data
|
||||
# Should return empty list or very few results
|
||||
hubs = data['data']['nearestHubs']
|
||||
assert isinstance(hubs, list)
|
||||
print(f"✓ nearestHubs with tiny radius: {len(hubs)} hubs")
|
||||
|
||||
def test_invalid_coordinates(self):
|
||||
"""Test behavior with invalid latitude/longitude values."""
|
||||
query = """
|
||||
query NearestHubs($lat: Float!, $lon: Float!) {
|
||||
nearestHubs(lat: $lat, lon: $lon) {
|
||||
uuid
|
||||
}
|
||||
}
|
||||
"""
|
||||
# Latitude > 90 is invalid
|
||||
variables = {'lat': 100.0, 'lon': 10.0}
|
||||
|
||||
response = requests.post(GEO_URL, json={'query': query, 'variables': variables})
|
||||
# Should either return error or empty results
|
||||
assert response.status_code in [200, 400]
|
||||
print("✓ invalid coordinates handled")
|
||||
|
||||
def test_nonexistent_uuid(self):
|
||||
"""Test route query with non-existent offer UUID."""
|
||||
query = """
|
||||
query RouteToCoordinate($offerUuid: String!, $lat: Float!, $lon: Float!) {
|
||||
routeToCoordinate(offerUuid: $offerUuid, lat: $lat, lon: $lon) {
|
||||
offerUuid
|
||||
}
|
||||
}
|
||||
"""
|
||||
variables = {
|
||||
'offerUuid': 'nonexistent-uuid-12345',
|
||||
'lat': 50.0,
|
||||
'lon': 10.0
|
||||
}
|
||||
|
||||
response = requests.post(GEO_URL, json={'query': query, 'variables': variables})
|
||||
# Should handle gracefully (null result or error)
|
||||
assert response.status_code in [200, 400]
|
||||
print("✓ nonexistent UUID handled")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
pytest.main([__file__, '-v', '-s'])
|
||||
19
tsconfig.json
Normal file
19
tsconfig.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"outDir": "dist",
|
||||
"rootDir": "src",
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"resolveJsonModule": true,
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"sourceMap": true
|
||||
},
|
||||
"include": ["src"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
Reference in New Issue
Block a user