Compare commits
4 Commits
e15976382e
...
b510dd54d6
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b510dd54d6 | ||
|
|
fd7e10c193 | ||
|
|
0330203a58 | ||
|
|
7efa753092 |
122
geo_app/cluster_index.py
Normal file
122
geo_app/cluster_index.py
Normal file
@@ -0,0 +1,122 @@
|
||||
"""
|
||||
Server-side map clustering using Uber H3 hexagonal grid.
|
||||
|
||||
Maps zoom levels to h3 resolutions and groups nodes by cell.
|
||||
"""
|
||||
import logging
|
||||
import threading
|
||||
import h3
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Global cache for nodes
|
||||
_nodes_cache = {}
|
||||
_cache_lock = threading.Lock()
|
||||
|
||||
# Map zoom level to h3 resolution
|
||||
# Higher zoom = higher resolution = smaller cells
|
||||
ZOOM_TO_RES = {
|
||||
0: 0, 1: 0, 2: 1, 3: 1, 4: 2, 5: 2,
|
||||
6: 3, 7: 3, 8: 4, 9: 4, 10: 5, 11: 5,
|
||||
12: 6, 13: 7, 14: 8, 15: 9, 16: 10
|
||||
}
|
||||
|
||||
|
||||
def _fetch_nodes(db, transport_type=None):
|
||||
"""Fetch nodes from database with caching."""
|
||||
cache_key = f"nodes:{transport_type or 'all'}"
|
||||
|
||||
with _cache_lock:
|
||||
if cache_key not in _nodes_cache:
|
||||
aql = """
|
||||
FOR node IN nodes
|
||||
FILTER node.node_type == 'logistics' OR node.node_type == null
|
||||
FILTER node.latitude != null AND node.longitude != null
|
||||
RETURN node
|
||||
"""
|
||||
cursor = db.aql.execute(aql)
|
||||
all_nodes = list(cursor)
|
||||
|
||||
# Filter by transport type if specified
|
||||
if transport_type:
|
||||
all_nodes = [
|
||||
n for n in all_nodes
|
||||
if transport_type in (n.get('transport_types') or [])
|
||||
]
|
||||
|
||||
_nodes_cache[cache_key] = all_nodes
|
||||
logger.info("Cached %d nodes for %s", len(all_nodes), cache_key)
|
||||
|
||||
return _nodes_cache[cache_key]
|
||||
|
||||
|
||||
def get_clustered_nodes(db, west, south, east, north, zoom, transport_type=None):
|
||||
"""
|
||||
Get clustered nodes for given bounding box and zoom level.
|
||||
|
||||
Uses H3 hexagonal grid to group nearby nodes.
|
||||
"""
|
||||
resolution = ZOOM_TO_RES.get(int(zoom), 5)
|
||||
nodes = _fetch_nodes(db, transport_type)
|
||||
|
||||
if not nodes:
|
||||
return []
|
||||
|
||||
# Group nodes by h3 cell
|
||||
cells = {}
|
||||
for node in nodes:
|
||||
lat = node.get('latitude')
|
||||
lng = node.get('longitude')
|
||||
|
||||
# Skip nodes outside bounding box (rough filter)
|
||||
if lat < south or lat > north or lng < west or lng > east:
|
||||
continue
|
||||
|
||||
cell = h3.latlng_to_cell(lat, lng, resolution)
|
||||
if cell not in cells:
|
||||
cells[cell] = []
|
||||
cells[cell].append(node)
|
||||
|
||||
# Build results
|
||||
results = []
|
||||
for cell, nodes_in_cell in cells.items():
|
||||
count = len(nodes_in_cell)
|
||||
|
||||
if count == 1:
|
||||
# Single point — return actual node data
|
||||
node = nodes_in_cell[0]
|
||||
results.append({
|
||||
'id': node.get('_key'),
|
||||
'latitude': node.get('latitude'),
|
||||
'longitude': node.get('longitude'),
|
||||
'count': 1,
|
||||
'expansion_zoom': None,
|
||||
'name': node.get('name'),
|
||||
})
|
||||
else:
|
||||
# Cluster — return cell centroid
|
||||
lat, lng = h3.cell_to_latlng(cell)
|
||||
results.append({
|
||||
'id': f"cluster-{cell}",
|
||||
'latitude': lat,
|
||||
'longitude': lng,
|
||||
'count': count,
|
||||
'expansion_zoom': min(zoom + 2, 16),
|
||||
'name': None,
|
||||
})
|
||||
|
||||
logger.info("Returning %d clusters/points for zoom=%d res=%d", len(results), zoom, resolution)
|
||||
return results
|
||||
|
||||
|
||||
def invalidate_cache(transport_type=None):
|
||||
"""Invalidate node cache after data changes."""
|
||||
with _cache_lock:
|
||||
if transport_type:
|
||||
cache_key = f"nodes:{transport_type}"
|
||||
if cache_key in _nodes_cache:
|
||||
del _nodes_cache[cache_key]
|
||||
else:
|
||||
_nodes_cache.clear()
|
||||
|
||||
logger.info("Cluster cache invalidated")
|
||||
@@ -6,6 +6,7 @@ import requests
|
||||
import graphene
|
||||
from django.conf import settings
|
||||
from .arango_client import get_db, ensure_graph
|
||||
from .cluster_index import get_clustered_nodes, invalidate_cache
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -80,6 +81,16 @@ class ProductRouteOptionType(graphene.ObjectType):
|
||||
routes = graphene.List(RoutePathType)
|
||||
|
||||
|
||||
class ClusterPointType(graphene.ObjectType):
|
||||
"""Cluster or individual point for map display."""
|
||||
id = graphene.String(description="UUID for points, 'cluster-N' for clusters")
|
||||
latitude = graphene.Float()
|
||||
longitude = graphene.Float()
|
||||
count = graphene.Int(description="1 for single point, >1 for cluster")
|
||||
expansion_zoom = graphene.Int(description="Zoom level to expand cluster")
|
||||
name = graphene.String(description="Node name (only for single points)")
|
||||
|
||||
|
||||
class Query(graphene.ObjectType):
|
||||
"""Root query."""
|
||||
MAX_EXPANSIONS = 20000
|
||||
@@ -161,6 +172,45 @@ class Query(graphene.ObjectType):
|
||||
description="Find routes from product offer nodes to destination",
|
||||
)
|
||||
|
||||
clustered_nodes = graphene.List(
|
||||
ClusterPointType,
|
||||
west=graphene.Float(required=True, description="Bounding box west longitude"),
|
||||
south=graphene.Float(required=True, description="Bounding box south latitude"),
|
||||
east=graphene.Float(required=True, description="Bounding box east longitude"),
|
||||
north=graphene.Float(required=True, description="Bounding box north latitude"),
|
||||
zoom=graphene.Int(required=True, description="Map zoom level (0-16)"),
|
||||
transport_type=graphene.String(description="Filter by transport type"),
|
||||
description="Get clustered nodes for map display (server-side clustering)",
|
||||
)
|
||||
|
||||
# Business-oriented queries for catalog navigation
|
||||
find_products_for_hub = graphene.List(
|
||||
graphene.String,
|
||||
hub_uuid=graphene.String(required=True),
|
||||
description="Find unique product UUIDs that can be delivered to this hub",
|
||||
)
|
||||
|
||||
find_hubs_for_product = graphene.List(
|
||||
NodeType,
|
||||
product_uuid=graphene.String(required=True),
|
||||
description="Find logistics hubs where this product can be delivered",
|
||||
)
|
||||
|
||||
find_supplier_product_hubs = graphene.List(
|
||||
NodeType,
|
||||
supplier_uuid=graphene.String(required=True),
|
||||
product_uuid=graphene.String(required=True),
|
||||
description="Find hubs where this supplier can deliver this product",
|
||||
)
|
||||
|
||||
find_offers_for_hub_by_product = graphene.List(
|
||||
ProductRouteOptionType,
|
||||
hub_uuid=graphene.String(required=True),
|
||||
product_uuid=graphene.String(required=True),
|
||||
limit_sources=graphene.Int(default_value=10),
|
||||
description="Find product offers that can be delivered to hub (with routes)",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _build_routes(db, from_uuid, to_uuid, limit):
|
||||
"""Shared helper to compute K shortest routes between two nodes."""
|
||||
@@ -740,6 +790,186 @@ class Query(graphene.ObjectType):
|
||||
|
||||
return found_routes
|
||||
|
||||
def resolve_clustered_nodes(self, info, west, south, east, north, zoom, transport_type=None):
|
||||
"""Get clustered nodes for map display using server-side SuperCluster."""
|
||||
db = get_db()
|
||||
clusters = get_clustered_nodes(db, west, south, east, north, zoom, transport_type)
|
||||
return [ClusterPointType(**c) for c in clusters]
|
||||
|
||||
def resolve_find_products_for_hub(self, info, hub_uuid):
|
||||
"""
|
||||
Find unique product UUIDs that can be delivered to this hub.
|
||||
Uses reverse traversal from hub to find reachable offer nodes.
|
||||
"""
|
||||
db = get_db()
|
||||
ensure_graph()
|
||||
|
||||
# Check hub exists
|
||||
nodes_col = db.collection('nodes')
|
||||
hub = nodes_col.get(hub_uuid)
|
||||
if not hub:
|
||||
logger.info("Hub %s not found", hub_uuid)
|
||||
return []
|
||||
|
||||
# Find all offer nodes reachable from hub via graph traversal
|
||||
# Offer nodes have product_uuid field
|
||||
aql = """
|
||||
FOR v, e, p IN 1..10 ANY @hub_id GRAPH 'optovia_graph'
|
||||
FILTER v.product_uuid != null
|
||||
COLLECT product_uuid = v.product_uuid
|
||||
RETURN product_uuid
|
||||
"""
|
||||
try:
|
||||
cursor = db.aql.execute(
|
||||
aql,
|
||||
bind_vars={'hub_id': f'nodes/{hub_uuid}'},
|
||||
)
|
||||
product_uuids = list(cursor)
|
||||
logger.info("Found %d products for hub %s", len(product_uuids), hub_uuid)
|
||||
return product_uuids
|
||||
except Exception as e:
|
||||
logger.error("Error finding products for hub: %s", e)
|
||||
return []
|
||||
|
||||
def resolve_find_hubs_for_product(self, info, product_uuid):
|
||||
"""
|
||||
Find logistics hubs where this product can be delivered.
|
||||
Finds offer nodes with this product, then finds reachable hubs.
|
||||
"""
|
||||
db = get_db()
|
||||
ensure_graph()
|
||||
|
||||
# Find all offer nodes with this product_uuid
|
||||
aql_offers = """
|
||||
FOR node IN nodes
|
||||
FILTER node.product_uuid == @product_uuid
|
||||
RETURN node._key
|
||||
"""
|
||||
try:
|
||||
cursor = db.aql.execute(aql_offers, bind_vars={'product_uuid': product_uuid})
|
||||
offer_keys = list(cursor)
|
||||
except Exception as e:
|
||||
logger.error("Error finding offers for product: %s", e)
|
||||
return []
|
||||
|
||||
if not offer_keys:
|
||||
logger.info("No offers found for product %s", product_uuid)
|
||||
return []
|
||||
|
||||
# Find hubs reachable from these offer nodes
|
||||
aql_hubs = """
|
||||
LET offer_ids = @offer_ids
|
||||
FOR offer_id IN offer_ids
|
||||
FOR v, e, p IN 1..10 ANY CONCAT('nodes/', offer_id) GRAPH 'optovia_graph'
|
||||
FILTER (v.node_type == 'logistics' OR v.node_type == null)
|
||||
FILTER v.product_uuid == null
|
||||
COLLECT hub_key = v._key INTO hubs
|
||||
LET hub = FIRST(hubs).v
|
||||
RETURN hub
|
||||
"""
|
||||
try:
|
||||
cursor = db.aql.execute(aql_hubs, bind_vars={'offer_ids': offer_keys})
|
||||
hubs = list(cursor)
|
||||
|
||||
result = []
|
||||
for hub in hubs:
|
||||
if hub:
|
||||
result.append(NodeType(
|
||||
uuid=hub.get('_key'),
|
||||
name=hub.get('name'),
|
||||
latitude=hub.get('latitude'),
|
||||
longitude=hub.get('longitude'),
|
||||
country=hub.get('country'),
|
||||
country_code=hub.get('country_code'),
|
||||
synced_at=hub.get('synced_at'),
|
||||
transport_types=hub.get('transport_types') or [],
|
||||
edges=[],
|
||||
))
|
||||
|
||||
logger.info("Found %d hubs for product %s", len(result), product_uuid)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error finding hubs for product: %s", e)
|
||||
return []
|
||||
|
||||
def resolve_find_supplier_product_hubs(self, info, supplier_uuid, product_uuid):
|
||||
"""
|
||||
Find hubs where this supplier can deliver this product.
|
||||
Finds offer nodes matching both supplier and product, then finds reachable hubs.
|
||||
"""
|
||||
db = get_db()
|
||||
ensure_graph()
|
||||
|
||||
# Find offer nodes with this supplier_uuid AND product_uuid
|
||||
aql_offers = """
|
||||
FOR node IN nodes
|
||||
FILTER node.product_uuid == @product_uuid
|
||||
FILTER node.supplier_uuid == @supplier_uuid
|
||||
RETURN node._key
|
||||
"""
|
||||
try:
|
||||
cursor = db.aql.execute(
|
||||
aql_offers,
|
||||
bind_vars={'product_uuid': product_uuid, 'supplier_uuid': supplier_uuid}
|
||||
)
|
||||
offer_keys = list(cursor)
|
||||
except Exception as e:
|
||||
logger.error("Error finding supplier offers: %s", e)
|
||||
return []
|
||||
|
||||
if not offer_keys:
|
||||
logger.info("No offers found for supplier %s, product %s", supplier_uuid, product_uuid)
|
||||
return []
|
||||
|
||||
# Find hubs reachable from these offer nodes
|
||||
aql_hubs = """
|
||||
LET offer_ids = @offer_ids
|
||||
FOR offer_id IN offer_ids
|
||||
FOR v, e, p IN 1..10 ANY CONCAT('nodes/', offer_id) GRAPH 'optovia_graph'
|
||||
FILTER (v.node_type == 'logistics' OR v.node_type == null)
|
||||
FILTER v.product_uuid == null
|
||||
COLLECT hub_key = v._key INTO hubs
|
||||
LET hub = FIRST(hubs).v
|
||||
RETURN hub
|
||||
"""
|
||||
try:
|
||||
cursor = db.aql.execute(aql_hubs, bind_vars={'offer_ids': offer_keys})
|
||||
hubs = list(cursor)
|
||||
|
||||
result = []
|
||||
for hub in hubs:
|
||||
if hub:
|
||||
result.append(NodeType(
|
||||
uuid=hub.get('_key'),
|
||||
name=hub.get('name'),
|
||||
latitude=hub.get('latitude'),
|
||||
longitude=hub.get('longitude'),
|
||||
country=hub.get('country'),
|
||||
country_code=hub.get('country_code'),
|
||||
synced_at=hub.get('synced_at'),
|
||||
transport_types=hub.get('transport_types') or [],
|
||||
edges=[],
|
||||
))
|
||||
|
||||
logger.info("Found %d hubs for supplier %s product %s", len(result), supplier_uuid, product_uuid)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error("Error finding supplier product hubs: %s", e)
|
||||
return []
|
||||
|
||||
def resolve_find_offers_for_hub_by_product(self, info, hub_uuid, product_uuid, limit_sources=10):
|
||||
"""
|
||||
Find product offers that can be delivered to hub (with routes).
|
||||
Same as find_product_routes but with business-oriented naming.
|
||||
"""
|
||||
return self.resolve_find_product_routes(
|
||||
info,
|
||||
product_uuid=product_uuid,
|
||||
to_uuid=hub_uuid,
|
||||
limit_sources=limit_sources,
|
||||
limit_routes=1,
|
||||
)
|
||||
|
||||
|
||||
schema = graphene.Schema(query=Query)
|
||||
|
||||
@@ -767,6 +997,11 @@ def _build_route_from_edges(path_edges, node_docs):
|
||||
if not path_edges:
|
||||
return None
|
||||
|
||||
# Фильтруем offer edges - это не транспортные этапы, а связь оффера с локацией
|
||||
path_edges = [(f, t, e) for f, t, e in path_edges if e.get('transport_type') != 'offer']
|
||||
if not path_edges:
|
||||
return None
|
||||
|
||||
stages = []
|
||||
current_edges = []
|
||||
current_type = None
|
||||
|
||||
@@ -15,7 +15,8 @@ dependencies = [
|
||||
"infisicalsdk (>=1.0.12,<2.0.0)",
|
||||
"gunicorn (>=23.0.0,<24.0.0)",
|
||||
"whitenoise (>=6.7.0,<7.0.0)",
|
||||
"sentry-sdk (>=2.47.0,<3.0.0)"
|
||||
"sentry-sdk (>=2.47.0,<3.0.0)",
|
||||
"h3 (>=4.0.0,<5.0.0)"
|
||||
]
|
||||
|
||||
[tool.poetry]
|
||||
|
||||
Reference in New Issue
Block a user