You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 

125 rivejä
3.9 KiB

  1. import csv
  2. import logging
  3. from typing import Any, Dict, List, Optional
  4. import httpx
  5. log = logging.getLogger(__name__)
  6. def _norm_mac(v: Any) -> str:
  7. if v is None:
  8. return ""
  9. return "".join(ch for ch in str(v).strip().lower() if ch.isalnum())
  10. def _none_if_empty(v: Any) -> Any:
  11. return None if v in ("", None, 0, "0") else v
  12. def _str_or_none(v: Any) -> Optional[str]:
  13. if v in ("", None):
  14. return None
  15. if isinstance(v, (int, float, bool)):
  16. return str(v)
  17. return v
  18. def _normalize_tracker(row: dict) -> dict:
  19. row = dict(row)
  20. row["floor"] = _none_if_empty(row.get("floor"))
  21. row["building"] = _none_if_empty(row.get("building"))
  22. row["battery"] = _str_or_none(row.get("battery"))
  23. row["temperature"] = _str_or_none(row.get("temperature"))
  24. row["acceleration"] = _str_or_none(row.get("acceleration"))
  25. row["heartRate"] = _str_or_none(row.get("heartRate"))
  26. return row
  27. async def _fetch_algorithm(core_base_url: str, timeout: float) -> Optional[str]:
  28. try:
  29. async with httpx.AsyncClient(timeout=timeout) as client:
  30. resp = await client.get(f"{core_base_url}/reslevis/settings")
  31. if 200 <= resp.status_code < 300:
  32. payload = resp.json()
  33. if isinstance(payload, list) and payload:
  34. value = payload[0].get("current_algorithm")
  35. if value is not None:
  36. return str(value).lower()
  37. except (httpx.RequestError, ValueError):
  38. pass
  39. return None
  40. async def _filter_mode_trackers(
  41. tracker_repo, core_base_url: str, timeout: float
  42. ) -> List[Dict[str, Any]]:
  43. try:
  44. async with httpx.AsyncClient(timeout=timeout) as client:
  45. resp = await client.get(f"{core_base_url}/reslevis/getTrackers")
  46. if 200 <= resp.status_code < 300:
  47. data = resp.json()
  48. if isinstance(data, list):
  49. normalized = [_normalize_tracker(r) for r in data if isinstance(r, dict)]
  50. tracker_repo._write_all(normalized)
  51. return normalized
  52. except (httpx.RequestError, ValueError):
  53. pass
  54. return tracker_repo.list()
  55. def _read_infer_positions(infer_csv_path: str) -> Dict[str, Dict[str, Optional[float]]]:
  56. positions: Dict[str, Dict[str, Optional[float]]] = {}
  57. try:
  58. with open(infer_csv_path, newline="") as f:
  59. reader = csv.DictReader(f, delimiter=";")
  60. for row in reader:
  61. mac = _norm_mac(row.get("mac"))
  62. if not mac:
  63. continue
  64. try:
  65. positions[mac] = {
  66. "x": int(row["x"]) if row.get("x") not in (None, "") else None,
  67. "y": int(row["y"]) if row.get("y") not in (None, "") else None,
  68. }
  69. except (KeyError, ValueError):
  70. continue
  71. except OSError:
  72. log.warning("BLE-AI infer CSV not found: %s", infer_csv_path)
  73. return positions
  74. def _ai_mode_trackers(
  75. tracker_repo, infer_csv_path: str
  76. ) -> List[Dict[str, Any]]:
  77. trackers = tracker_repo.list()
  78. positions = _read_infer_positions(infer_csv_path)
  79. if not positions:
  80. return trackers
  81. result = []
  82. for tracker in trackers:
  83. t = dict(tracker)
  84. mac = _norm_mac(t.get("mac"))
  85. if mac and mac in positions:
  86. t["x"] = positions[mac]["x"]
  87. t["y"] = positions[mac]["y"]
  88. result.append(t)
  89. return result
  90. async def get_mode_aware_trackers(
  91. tracker_repo,
  92. core_base_url: str,
  93. infer_csv_path: str,
  94. timeout: float,
  95. ) -> List[Dict[str, Any]]:
  96. algorithm = await _fetch_algorithm(core_base_url, timeout)
  97. if algorithm == "filter":
  98. return await _filter_mode_trackers(tracker_repo, core_base_url, timeout)
  99. if algorithm == "ai":
  100. return _ai_mode_trackers(tracker_repo, infer_csv_path)
  101. return tracker_repo.list()