|
|
@@ -1,21 +1,74 @@
|
|
|
from __future__ import annotations
|
|
|
|
|
|
from collections import defaultdict, deque
|
|
|
+from concurrent.futures import ThreadPoolExecutor
|
|
|
from datetime import datetime, timezone
|
|
|
+import json
|
|
|
from typing import Any
|
|
|
|
|
|
-from sqlalchemy import Index, Integer, String, Text, UniqueConstraint, delete, select
|
|
|
+from sqlalchemy import (
|
|
|
+ Index,
|
|
|
+ Integer,
|
|
|
+ String,
|
|
|
+ Text,
|
|
|
+ UniqueConstraint,
|
|
|
+ delete,
|
|
|
+ inspect,
|
|
|
+ select,
|
|
|
+ text,
|
|
|
+)
|
|
|
from sqlalchemy.orm import Mapped, Session, mapped_column
|
|
|
|
|
|
from .config_api import list_topologies_with_group as api_list_topologies_with_group
|
|
|
+from .config_api import list_locations as api_list_locations
|
|
|
+from .config_api import list_system_tree as api_list_system_tree
|
|
|
+from .config_api import list_systems as api_list_systems
|
|
|
+from .config_api import list_device_types as api_list_device_types
|
|
|
+from .config_api import list_meter_types as api_list_meter_types
|
|
|
+from .config_api import search_devices as api_search_devices
|
|
|
+from .config_api import search_meters as api_search_meters
|
|
|
+from .config_api import get_topology_data as api_get_topology_data
|
|
|
from .config_api import get_topology as api_get_topology
|
|
|
from .db import Base, sql_engine
|
|
|
|
|
|
|
|
|
+PT_OBJ_TYPE_LOCATION = 11
|
|
|
+PT_OBJ_TYPE_SYSTEMTYPE = 12
|
|
|
+PT_OBJ_TYPE_SYSTEM = 13
|
|
|
+PT_OBJ_TYPE_DEVICETYPE = 14
|
|
|
+PT_OBJ_TYPE_DEVICE = 15
|
|
|
+PT_OBJ_TYPE_METERTYPE = 16
|
|
|
+PT_OBJ_TYPE_METERMODEL = 17
|
|
|
+PT_OBJ_TYPE_METER = 18
|
|
|
+PT_OBJ_TYPE_TOPOGROUP = 19
|
|
|
+PT_OBJ_TYPE_TOPODIAGRAM = 20
|
|
|
+
|
|
|
+OBJECT_TYPE_LABELS = {
|
|
|
+ PT_OBJ_TYPE_LOCATION: "位置",
|
|
|
+ PT_OBJ_TYPE_SYSTEMTYPE: "系统类型",
|
|
|
+ PT_OBJ_TYPE_SYSTEM: "系统",
|
|
|
+ PT_OBJ_TYPE_DEVICETYPE: "设备类型",
|
|
|
+ PT_OBJ_TYPE_DEVICE: "设备",
|
|
|
+ PT_OBJ_TYPE_METERTYPE: "仪表类型",
|
|
|
+ PT_OBJ_TYPE_METERMODEL: "仪表型号",
|
|
|
+ PT_OBJ_TYPE_METER: "仪表",
|
|
|
+ PT_OBJ_TYPE_TOPOGROUP: "拓扑图分组",
|
|
|
+ PT_OBJ_TYPE_TOPODIAGRAM: "拓扑图",
|
|
|
+}
|
|
|
+
|
|
|
+ORDER_LABELS = {1: "顺序", 2: "逆序"}
|
|
|
+FILTER_TYPE_LABELS = {1: "所有", 2: "任一"}
|
|
|
+MATCH_TYPE_LABELS = {1: "等于", 2: "不等于", 3: "包含", 4: "不包含"}
|
|
|
+
|
|
|
+
|
|
|
def _utc_now_iso() -> str:
|
|
|
return datetime.now(timezone.utc).isoformat()
|
|
|
|
|
|
|
|
|
+def _current_unix_ts() -> int:
|
|
|
+ return int(datetime.now().timestamp())
|
|
|
+
|
|
|
+
|
|
|
def _safe_int(raw_value: Any) -> int | None:
|
|
|
if raw_value is None:
|
|
|
return None
|
|
|
@@ -81,6 +134,10 @@ class TopologyRegistry(Base):
|
|
|
source_updated_time: Mapped[str] = mapped_column(
|
|
|
String(64), nullable=False, default=""
|
|
|
)
|
|
|
+ data_options_json: Mapped[str] = mapped_column(Text, nullable=False, default="")
|
|
|
+ dimension_config_json: Mapped[str] = mapped_column(
|
|
|
+ Text, nullable=False, default=""
|
|
|
+ )
|
|
|
refreshed_at: Mapped[str] = mapped_column(String(64), nullable=False)
|
|
|
is_active: Mapped[int] = mapped_column(Integer, nullable=False, default=1)
|
|
|
|
|
|
@@ -192,6 +249,340 @@ class TopologyEntityIndex(Base):
|
|
|
|
|
|
def ensure_topology_cache_tables() -> None:
|
|
|
Base.metadata.create_all(sql_engine())
|
|
|
+ engine = sql_engine()
|
|
|
+ topology_registry_columns = {
|
|
|
+ item["name"] for item in inspect(engine).get_columns("topology_registry")
|
|
|
+ }
|
|
|
+ with engine.begin() as connection:
|
|
|
+ if "data_options_json" not in topology_registry_columns:
|
|
|
+ connection.execute(
|
|
|
+ text("ALTER TABLE topology_registry ADD COLUMN data_options_json TEXT")
|
|
|
+ )
|
|
|
+ if "dimension_config_json" not in topology_registry_columns:
|
|
|
+ connection.execute(
|
|
|
+ text(
|
|
|
+ "ALTER TABLE topology_registry ADD COLUMN dimension_config_json TEXT"
|
|
|
+ )
|
|
|
+ )
|
|
|
+
|
|
|
+
|
|
|
+def _dump_json_text(raw_value: Any) -> str:
|
|
|
+ if raw_value is None:
|
|
|
+ return ""
|
|
|
+ return json.dumps(raw_value, ensure_ascii=False, separators=(",", ":"))
|
|
|
+
|
|
|
+
|
|
|
+def _load_json_text(raw_value: str) -> Any:
|
|
|
+ text_value = _text(raw_value)
|
|
|
+ if not text_value:
|
|
|
+ return None
|
|
|
+ try:
|
|
|
+ return json.loads(text_value)
|
|
|
+ except json.JSONDecodeError:
|
|
|
+ return text_value
|
|
|
+
|
|
|
+
|
|
|
+def _build_metric_definitions(data_options: Any) -> dict[str, dict[str, Any]]:
|
|
|
+ if not isinstance(data_options, dict):
|
|
|
+ return {"instant": {}, "accu": {}}
|
|
|
+
|
|
|
+ result: dict[str, dict[str, Any]] = {"instant": {}, "accu": {}}
|
|
|
+ for display in ("instant", "accu"):
|
|
|
+ items = data_options.get(display)
|
|
|
+ if not isinstance(items, list):
|
|
|
+ continue
|
|
|
+ mapped: dict[str, Any] = {}
|
|
|
+ for item in items:
|
|
|
+ if not isinstance(item, dict):
|
|
|
+ continue
|
|
|
+ code = _text(item.get("code"))
|
|
|
+ if not code:
|
|
|
+ continue
|
|
|
+ mapped[code] = {
|
|
|
+ "name": _text(item.get("name")),
|
|
|
+ "unit": _text(item.get("unit")),
|
|
|
+ "type": _safe_int(item.get("type")),
|
|
|
+ "display": bool(item.get("display")),
|
|
|
+ "value_key": _text(item.get("value")),
|
|
|
+ }
|
|
|
+ result[display] = mapped
|
|
|
+ return result
|
|
|
+
|
|
|
+
|
|
|
+def _extract_page_items(payload: Any) -> list[dict[str, Any]]:
|
|
|
+ if not isinstance(payload, dict):
|
|
|
+ return []
|
|
|
+ data = payload.get("data")
|
|
|
+ if isinstance(data, dict):
|
|
|
+ items = data.get("data")
|
|
|
+ if isinstance(items, list):
|
|
|
+ return [item for item in items if isinstance(item, dict)]
|
|
|
+ if isinstance(data, list):
|
|
|
+ return [item for item in data if isinstance(item, dict)]
|
|
|
+ return []
|
|
|
+
|
|
|
+
|
|
|
+def _load_all_pages(fetch_page: Any) -> list[dict[str, Any]]:
|
|
|
+ page_num = 1
|
|
|
+ result: list[dict[str, Any]] = []
|
|
|
+ while True:
|
|
|
+ payload = fetch_page(page_num)
|
|
|
+ result.extend(_extract_page_items(payload))
|
|
|
+ data = payload.get("data") if isinstance(payload, dict) else None
|
|
|
+ if not isinstance(data, dict):
|
|
|
+ break
|
|
|
+ total_page = _safe_int(data.get("total_page")) or page_num
|
|
|
+ if page_num >= total_page:
|
|
|
+ break
|
|
|
+ page_num += 1
|
|
|
+ return result
|
|
|
+
|
|
|
+
|
|
|
+def _flatten_tree_items(items: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
|
|
+ result: list[dict[str, Any]] = []
|
|
|
+ queue = deque(items)
|
|
|
+ while queue:
|
|
|
+ item = queue.popleft()
|
|
|
+ result.append(item)
|
|
|
+ children = item.get("children")
|
|
|
+ if isinstance(children, list):
|
|
|
+ queue.extend(child for child in children if isinstance(child, dict))
|
|
|
+ return result
|
|
|
+
|
|
|
+
|
|
|
+def _to_id_name_map(items: list[dict[str, Any]]) -> dict[int, str]:
|
|
|
+ result: dict[int, str] = {}
|
|
|
+ for item in items:
|
|
|
+ item_id = _safe_int(item.get("id"))
|
|
|
+ if item_id is None:
|
|
|
+ continue
|
|
|
+ result[item_id] = _text(item.get("name"))
|
|
|
+ return result
|
|
|
+
|
|
|
+
|
|
|
+def _load_location_name_map(project_key: str) -> dict[int, str]:
|
|
|
+ items = _load_all_pages(
|
|
|
+ lambda page_num: api_list_locations(
|
|
|
+ project_key, keyword="", page_size=100, page_num=page_num
|
|
|
+ )
|
|
|
+ )
|
|
|
+ return _to_id_name_map(_flatten_tree_items(items))
|
|
|
+
|
|
|
+
|
|
|
+def _load_system_type_name_map(project_key: str) -> dict[int, str]:
|
|
|
+ payload = api_list_system_tree(project_key)
|
|
|
+ items = _extract_page_items(payload)
|
|
|
+ return _to_id_name_map(
|
|
|
+ [item for item in _flatten_tree_items(items) if _safe_int(item.get("type")) == 1]
|
|
|
+ )
|
|
|
+
|
|
|
+
|
|
|
+def _load_system_name_map(project_key: str) -> dict[int, str]:
|
|
|
+ items = _load_all_pages(
|
|
|
+ lambda page_num: api_list_systems(
|
|
|
+ project_key,
|
|
|
+ page_size=100,
|
|
|
+ page_num=page_num,
|
|
|
+ system_type_id=0,
|
|
|
+ show_below=True,
|
|
|
+ )
|
|
|
+ )
|
|
|
+ return _to_id_name_map(items)
|
|
|
+
|
|
|
+
|
|
|
+def _load_device_type_name_map(project_key: str) -> dict[int, str]:
|
|
|
+ return _to_id_name_map(_extract_page_items(api_list_device_types(project_key)))
|
|
|
+
|
|
|
+
|
|
|
+def _load_meter_type_name_map(project_key: str) -> dict[int, str]:
|
|
|
+ return _to_id_name_map(_extract_page_items(api_list_meter_types(project_key)))
|
|
|
+
|
|
|
+
|
|
|
+def _load_device_name_map(project_key: str) -> dict[int, str]:
|
|
|
+ items = _load_all_pages(
|
|
|
+ lambda page_num: api_search_devices(
|
|
|
+ project_key,
|
|
|
+ page_size=100,
|
|
|
+ page_num=page_num,
|
|
|
+ keyword="",
|
|
|
+ location_id=0,
|
|
|
+ show_below=True,
|
|
|
+ system_ids=[],
|
|
|
+ device_type_ids=[],
|
|
|
+ )
|
|
|
+ )
|
|
|
+ return _to_id_name_map(items)
|
|
|
+
|
|
|
+
|
|
|
+def _load_meter_name_map(project_key: str) -> dict[int, str]:
|
|
|
+ items = _load_all_pages(
|
|
|
+ lambda page_num: api_search_meters(
|
|
|
+ project_key,
|
|
|
+ page_size=100,
|
|
|
+ page_num=page_num,
|
|
|
+ keyword="",
|
|
|
+ location_id=0,
|
|
|
+ show_below=True,
|
|
|
+ meter_type_id=0,
|
|
|
+ measurement_location_ids=[],
|
|
|
+ measurement_system_ids=[],
|
|
|
+ measurement_device_type_ids=[],
|
|
|
+ status=None,
|
|
|
+ )
|
|
|
+ )
|
|
|
+ return _to_id_name_map(items)
|
|
|
+
|
|
|
+
|
|
|
+def _load_topology_group_name_map(session: Session, project_key: str) -> dict[int, str]:
|
|
|
+ return {row.group_id: row.group_name for row in _load_group_rows(session, project_key)}
|
|
|
+
|
|
|
+
|
|
|
+def _load_topology_name_map(session: Session, project_key: str) -> dict[int, str]:
|
|
|
+ return {
|
|
|
+ row.topology_id: row.topology_name
|
|
|
+ for row in _load_registry_rows(session, project_key)
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+def _object_type_label(type_code: int | None) -> str:
|
|
|
+ return OBJECT_TYPE_LABELS.get(type_code or 0, "")
|
|
|
+
|
|
|
+
|
|
|
+def _resolve_field_name_map(
|
|
|
+ session: Session, project_key: str, type_code: int
|
|
|
+) -> dict[int, str]:
|
|
|
+ if type_code == PT_OBJ_TYPE_LOCATION:
|
|
|
+ return _load_location_name_map(project_key)
|
|
|
+ if type_code == PT_OBJ_TYPE_SYSTEMTYPE:
|
|
|
+ return _load_system_type_name_map(project_key)
|
|
|
+ if type_code == PT_OBJ_TYPE_SYSTEM:
|
|
|
+ return _load_system_name_map(project_key)
|
|
|
+ if type_code == PT_OBJ_TYPE_DEVICETYPE:
|
|
|
+ return _load_device_type_name_map(project_key)
|
|
|
+ if type_code == PT_OBJ_TYPE_DEVICE:
|
|
|
+ return _load_device_name_map(project_key)
|
|
|
+ if type_code == PT_OBJ_TYPE_METERTYPE:
|
|
|
+ return _load_meter_type_name_map(project_key)
|
|
|
+ if type_code == PT_OBJ_TYPE_METERMODEL:
|
|
|
+ raise ValueError("type=17 (仪表型号) needs additional API mapping confirmation")
|
|
|
+ if type_code == PT_OBJ_TYPE_METER:
|
|
|
+ return _load_meter_name_map(project_key)
|
|
|
+ if type_code == PT_OBJ_TYPE_TOPOGROUP:
|
|
|
+ return _load_topology_group_name_map(session, project_key)
|
|
|
+ if type_code == PT_OBJ_TYPE_TOPODIAGRAM:
|
|
|
+ return _load_topology_name_map(session, project_key)
|
|
|
+ return {}
|
|
|
+
|
|
|
+
|
|
|
+def _resolve_field_items(
|
|
|
+ session: Session, project_key: str, type_code: int, field_ids: list[int]
|
|
|
+) -> list[dict[str, Any]]:
|
|
|
+ name_map = _resolve_field_name_map(session, project_key, type_code)
|
|
|
+ return [
|
|
|
+ {"id": field_id, "name": name_map.get(field_id, "")}
|
|
|
+ for field_id in field_ids
|
|
|
+ ]
|
|
|
+
|
|
|
+
|
|
|
+def _normalize_int_list(raw_value: Any) -> list[int]:
|
|
|
+ if not isinstance(raw_value, list):
|
|
|
+ return []
|
|
|
+ result: list[int] = []
|
|
|
+ for item in raw_value:
|
|
|
+ normalized = _safe_int(item)
|
|
|
+ if normalized is None:
|
|
|
+ continue
|
|
|
+ result.append(normalized)
|
|
|
+ return result
|
|
|
+
|
|
|
+
|
|
|
+def get_topology_group_config(project_key: str, topology_id: int) -> dict[str, Any]:
|
|
|
+ project_key = _text(project_key)
|
|
|
+ if not project_key:
|
|
|
+ raise ValueError("project_key is required")
|
|
|
+
|
|
|
+ ensure_topology_cache_tables()
|
|
|
+ with Session(sql_engine()) as session:
|
|
|
+ _require_topology_cache(session, project_key)
|
|
|
+ group_path_map = _group_path_map(_load_group_rows(session, project_key))
|
|
|
+ registry_row = _get_registry_or_error(session, project_key, topology_id)
|
|
|
+ if registry_row.topology_type != 2:
|
|
|
+ return {
|
|
|
+ "supported": False,
|
|
|
+ "raw_dimension_config": _load_json_text(
|
|
|
+ registry_row.dimension_config_json
|
|
|
+ ),
|
|
|
+ "groupings": [],
|
|
|
+ "filter": {
|
|
|
+ "filter_type": None,
|
|
|
+ "filter_type_label": "",
|
|
|
+ "conditions": [],
|
|
|
+ },
|
|
|
+ "mcp_note": "topology.get_group_config only applies to topology_type=2 topologies.",
|
|
|
+ }
|
|
|
+
|
|
|
+ raw_dimension_config = _load_json_text(registry_row.dimension_config_json)
|
|
|
+ if not isinstance(raw_dimension_config, dict):
|
|
|
+ raw_dimension_config = {}
|
|
|
+
|
|
|
+ raw_dimensions = raw_dimension_config.get("dimensions")
|
|
|
+ raw_filter = raw_dimension_config.get("filter")
|
|
|
+ dimensions = raw_dimensions if isinstance(raw_dimensions, list) else []
|
|
|
+ filter_payload = raw_filter if isinstance(raw_filter, dict) else {}
|
|
|
+
|
|
|
+ groupings: list[dict[str, Any]] = []
|
|
|
+ for item in dimensions:
|
|
|
+ if not isinstance(item, dict):
|
|
|
+ continue
|
|
|
+ type_code = _safe_int(item.get("type"))
|
|
|
+ level = _safe_int(item.get("level"))
|
|
|
+ order = _safe_int(item.get("order"))
|
|
|
+ groupings.append(
|
|
|
+ {
|
|
|
+ "name": _text(item.get("name")),
|
|
|
+ "type": type_code,
|
|
|
+ "type_label": _object_type_label(type_code),
|
|
|
+ "level": level,
|
|
|
+ "order": order,
|
|
|
+ "order_label": ORDER_LABELS.get(order or 0, ""),
|
|
|
+ }
|
|
|
+ )
|
|
|
+
|
|
|
+ conditions_payload = filter_payload.get("conditions")
|
|
|
+ conditions = conditions_payload if isinstance(conditions_payload, list) else []
|
|
|
+ resolved_conditions: list[dict[str, Any]] = []
|
|
|
+ for item in conditions:
|
|
|
+ if not isinstance(item, dict):
|
|
|
+ continue
|
|
|
+ type_code = _safe_int(item.get("type")) or 0
|
|
|
+ level = _safe_int(item.get("level"))
|
|
|
+ match_type = _safe_int(item.get("match_type"))
|
|
|
+ field_ids = _normalize_int_list(item.get("fields"))
|
|
|
+ resolved_conditions.append(
|
|
|
+ {
|
|
|
+ "type": type_code,
|
|
|
+ "type_label": _object_type_label(type_code),
|
|
|
+ "level": level,
|
|
|
+ "match_type": match_type,
|
|
|
+ "match_type_label": MATCH_TYPE_LABELS.get(match_type or 0, ""),
|
|
|
+ "fields": field_ids,
|
|
|
+ "field_items": _resolve_field_items(
|
|
|
+ session, project_key, type_code, field_ids
|
|
|
+ ),
|
|
|
+ }
|
|
|
+ )
|
|
|
+
|
|
|
+ filter_type = _safe_int(filter_payload.get("filter_type"))
|
|
|
+ return {
|
|
|
+ "supported": True,
|
|
|
+ "raw_dimension_config": raw_dimension_config,
|
|
|
+ "groupings": groupings,
|
|
|
+ "filter": {
|
|
|
+ "filter_type": filter_type,
|
|
|
+ "filter_type_label": FILTER_TYPE_LABELS.get(filter_type or 0, ""),
|
|
|
+ "conditions": resolved_conditions,
|
|
|
+ },
|
|
|
+ }
|
|
|
|
|
|
|
|
|
def _collect_group_and_topology_refs(
|
|
|
@@ -616,6 +1007,10 @@ def refresh_topology_cache(
|
|
|
or topology_ref.get("group_id"),
|
|
|
"root_shape": root_shape,
|
|
|
"source_updated_time": _text(detail_data.get("updated_time")),
|
|
|
+ "data_options_json": _dump_json_text(detail_data.get("data_options")),
|
|
|
+ "dimension_config_json": _dump_json_text(
|
|
|
+ detail_data.get("dimension_config")
|
|
|
+ ),
|
|
|
"refreshed_at": refreshed_at,
|
|
|
"is_active": 1,
|
|
|
}
|
|
|
@@ -942,6 +1337,124 @@ def _node_payload(node_row: TopologyNode) -> dict[str, Any]:
|
|
|
}
|
|
|
|
|
|
|
|
|
+def _floor_hour_ts(ts: int) -> int:
|
|
|
+ return max(0, int(ts) - (int(ts) % 3600))
|
|
|
+
|
|
|
+
|
|
|
+def _floor_day_ts(ts: int) -> int:
|
|
|
+ current = datetime.fromtimestamp(int(ts)).astimezone()
|
|
|
+ return int(
|
|
|
+ current.replace(hour=0, minute=0, second=0, microsecond=0).timestamp()
|
|
|
+ )
|
|
|
+
|
|
|
+
|
|
|
+def _hourly_window_timestamps(base_ts: int) -> list[int]:
|
|
|
+ current_hour_ts = _floor_hour_ts(base_ts)
|
|
|
+ return [current_hour_ts - 3600 * offset for offset in range(1, 13)]
|
|
|
+
|
|
|
+
|
|
|
+def _daily_window_timestamps(base_ts: int) -> list[int]:
|
|
|
+ current_day_ts = _floor_day_ts(base_ts)
|
|
|
+ return [current_day_ts - 86400 * offset for offset in range(0, 7)]
|
|
|
+
|
|
|
+
|
|
|
+def _extract_topology_data_map(payload: Any) -> dict[str, Any]:
|
|
|
+ if not isinstance(payload, dict):
|
|
|
+ return {}
|
|
|
+ data = payload.get("data")
|
|
|
+ if not isinstance(data, dict):
|
|
|
+ return {}
|
|
|
+ return {str(node_id): node_values for node_id, node_values in data.items()}
|
|
|
+
|
|
|
+
|
|
|
+def _fetch_topology_runtime_data(
|
|
|
+ project_key: str, topology_id: int, *, base_ts: int | None = None
|
|
|
+) -> dict[str, Any]:
|
|
|
+ effective_base_ts = int(base_ts if base_ts is not None else _current_unix_ts())
|
|
|
+ hourly_timestamps = _hourly_window_timestamps(effective_base_ts)
|
|
|
+ daily_timestamps = _daily_window_timestamps(effective_base_ts)
|
|
|
+
|
|
|
+ def fetch_instant() -> dict[str, Any]:
|
|
|
+ return _extract_topology_data_map(
|
|
|
+ api_get_topology_data(project_key, topology_id, display="instant")
|
|
|
+ )
|
|
|
+
|
|
|
+ def fetch_accu(accu_step: int, ts: int) -> tuple[int, dict[str, Any]]:
|
|
|
+ payload = api_get_topology_data(
|
|
|
+ project_key,
|
|
|
+ topology_id,
|
|
|
+ display="accu",
|
|
|
+ accu_step=accu_step,
|
|
|
+ ts=ts,
|
|
|
+ )
|
|
|
+ return ts, _extract_topology_data_map(payload)
|
|
|
+
|
|
|
+ max_workers = max(1, min(8, 1 + len(hourly_timestamps) + len(daily_timestamps)))
|
|
|
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
|
|
+ instant_future = executor.submit(fetch_instant)
|
|
|
+ hourly_futures = [
|
|
|
+ executor.submit(fetch_accu, 2, ts_value) for ts_value in hourly_timestamps
|
|
|
+ ]
|
|
|
+ daily_futures = [
|
|
|
+ executor.submit(fetch_accu, 3, ts_value) for ts_value in daily_timestamps
|
|
|
+ ]
|
|
|
+
|
|
|
+ instant_map = instant_future.result()
|
|
|
+ hourly_series = [future.result() for future in hourly_futures]
|
|
|
+ daily_series = [future.result() for future in daily_futures]
|
|
|
+
|
|
|
+ return {
|
|
|
+ "base_ts": effective_base_ts,
|
|
|
+ "instant": instant_map,
|
|
|
+ "hourly": [
|
|
|
+ {"ts": ts_value, "data_map": data_map} for ts_value, data_map in hourly_series
|
|
|
+ ],
|
|
|
+ "daily": [
|
|
|
+ {"ts": ts_value, "data_map": data_map} for ts_value, data_map in daily_series
|
|
|
+ ],
|
|
|
+ "data_window": {
|
|
|
+ "hourly_ts": hourly_timestamps,
|
|
|
+ "daily_ts": daily_timestamps,
|
|
|
+ },
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+def _attach_runtime_data_to_node(
|
|
|
+ node_payload: dict[str, Any], runtime_bundle: dict[str, Any]
|
|
|
+) -> dict[str, Any]:
|
|
|
+ node_id = str(node_payload.get("node_id") or "").strip()
|
|
|
+ instant_map = runtime_bundle.get("instant") or {}
|
|
|
+ hourly_series = runtime_bundle.get("hourly") or []
|
|
|
+ daily_series = runtime_bundle.get("daily") or []
|
|
|
+
|
|
|
+ node_payload["data"] = {
|
|
|
+ "instant": instant_map.get(node_id),
|
|
|
+ "accu": {
|
|
|
+ "hourly": [
|
|
|
+ {
|
|
|
+ "ts": item["ts"],
|
|
|
+ "values": item["data_map"].get(node_id),
|
|
|
+ }
|
|
|
+ for item in hourly_series
|
|
|
+ ],
|
|
|
+ "daily": [
|
|
|
+ {
|
|
|
+ "ts": item["ts"],
|
|
|
+ "values": item["data_map"].get(node_id),
|
|
|
+ }
|
|
|
+ for item in daily_series
|
|
|
+ ],
|
|
|
+ },
|
|
|
+ }
|
|
|
+ return node_payload
|
|
|
+
|
|
|
+
|
|
|
+def _attach_runtime_data_to_nodes(
|
|
|
+ nodes: list[dict[str, Any]], runtime_bundle: dict[str, Any]
|
|
|
+) -> list[dict[str, Any]]:
|
|
|
+ return [_attach_runtime_data_to_node(node_payload, runtime_bundle) for node_payload in nodes]
|
|
|
+
|
|
|
+
|
|
|
def _load_node_map(
|
|
|
session: Session, project_key: str, topology_id: int
|
|
|
) -> dict[str, TopologyNode]:
|
|
|
@@ -1079,6 +1592,7 @@ def _collect_descendants(
|
|
|
def _topology_metadata_payload(
|
|
|
registry_row: TopologyRegistry, group_path_text: str
|
|
|
) -> dict[str, Any]:
|
|
|
+ data_options = _load_json_text(registry_row.data_options_json)
|
|
|
return {
|
|
|
"topology_id": registry_row.topology_id,
|
|
|
"topology_name": registry_row.topology_name,
|
|
|
@@ -1087,6 +1601,9 @@ def _topology_metadata_payload(
|
|
|
"group_id": registry_row.group_id,
|
|
|
"group_path_text": group_path_text,
|
|
|
"root_shape": registry_row.root_shape,
|
|
|
+ "data_options": data_options,
|
|
|
+ "metric_definitions": _build_metric_definitions(data_options),
|
|
|
+ "dimension_config": _load_json_text(registry_row.dimension_config_json),
|
|
|
}
|
|
|
|
|
|
|
|
|
@@ -1121,6 +1638,7 @@ def get_topology_node(
|
|
|
node_row = _get_node_or_error(
|
|
|
session, project_key, topology_id, resolved_node_id
|
|
|
)
|
|
|
+ runtime_bundle = _fetch_topology_runtime_data(project_key, topology_id)
|
|
|
|
|
|
parent_ids = parents_by_node.get(resolved_node_id, [])
|
|
|
child_ids = children_by_node.get(resolved_node_id, []) if include_children else []
|
|
|
@@ -1133,13 +1651,20 @@ def get_topology_node(
|
|
|
]
|
|
|
|
|
|
return {
|
|
|
+ "data_window": runtime_bundle["data_window"],
|
|
|
"topology": _topology_metadata_payload(
|
|
|
registry_row, group_path_map.get(registry_row.group_id or -1, "")
|
|
|
),
|
|
|
- "node": _node_payload(node_row),
|
|
|
- "parents": _node_list_payload(node_map, parent_ids),
|
|
|
- "children": _node_list_payload(node_map, child_ids),
|
|
|
- "siblings": _node_list_payload(node_map, sibling_ids),
|
|
|
+ "node": _attach_runtime_data_to_node(_node_payload(node_row), runtime_bundle),
|
|
|
+ "parents": _attach_runtime_data_to_nodes(
|
|
|
+ _node_list_payload(node_map, parent_ids), runtime_bundle
|
|
|
+ ),
|
|
|
+ "children": _attach_runtime_data_to_nodes(
|
|
|
+ _node_list_payload(node_map, child_ids), runtime_bundle
|
|
|
+ ),
|
|
|
+ "siblings": _attach_runtime_data_to_nodes(
|
|
|
+ _node_list_payload(node_map, sibling_ids), runtime_bundle
|
|
|
+ ),
|
|
|
}
|
|
|
|
|
|
|