package location import ( "context" "encoding/json" "log/slog" "sync" "time" "github.com/AFASystems/presence/internal/pkg/common/appcontext" "github.com/AFASystems/presence/internal/pkg/config" "github.com/AFASystems/presence/internal/pkg/kafkaclient" pkglocation "github.com/AFASystems/presence/internal/pkg/location" "github.com/AFASystems/presence/internal/pkg/logger" "github.com/AFASystems/presence/internal/pkg/model" "github.com/segmentio/kafka-go" ) // LocationApp holds dependencies for the location service. type LocationApp struct { Cfg *config.Config KafkaManager *kafkaclient.KafkaManager AppState *appcontext.AppState Inferencer pkglocation.Inferencer ChRaw chan appcontext.BeaconAdvertisement ChSettings chan map[string]any Cleanup func() wg sync.WaitGroup } // New creates a LocationApp with Kafka readers (rawbeacons, settings) and writer (locevents). func New(cfg *config.Config) (*LocationApp, error) { appState := appcontext.NewAppState() kafkaManager := kafkaclient.InitKafkaManager() srvLogger, cleanup := logger.CreateLogger("location.log") slog.SetDefault(srvLogger) readerTopics := []string{"rawbeacons", "settings"} writerTopics := []string{"locevents", "healthlocation"} kafkaManager.PopulateKafkaManager(cfg.KafkaURL, "location", readerTopics) kafkaManager.PopulateKafkaManager(cfg.KafkaURL, "", writerTopics) slog.Info("location service initialized", "readers", readerTopics, "writers", writerTopics) return &LocationApp{ Cfg: cfg, KafkaManager: kafkaManager, AppState: appState, Inferencer: pkglocation.NewDefaultInferencer(cfg.TLSInsecureSkipVerify), ChRaw: make(chan appcontext.BeaconAdvertisement, config.LARGE_CHANNEL_SIZE), ChSettings: make(chan map[string]any, config.SMALL_CHANNEL_SIZE), Cleanup: cleanup, }, nil } // Run starts consumers and the event loop until ctx is cancelled. func (a *LocationApp) Run(ctx context.Context) { a.wg.Add(2) go kafkaclient.Consume(a.KafkaManager.GetReader("rawbeacons"), a.ChRaw, ctx, &a.wg) go kafkaclient.Consume(a.KafkaManager.GetReader("settings"), a.ChSettings, ctx, &a.wg) locTicker := time.NewTicker(config.LARGE_TICKER_INTERVAL) defer locTicker.Stop() healthTicker := time.NewTicker(config.LARGE_TICKER_INTERVAL) defer healthTicker.Stop() for { select { case <-ctx.Done(): return case <-healthTicker.C: health, err := a.AppState.GetLocationHealth(a.KafkaManager) if err != nil { slog.Error("getting location health", "err", err) continue } m := kafka.Message{ Value: health, } if err := kafkaclient.Write(ctx, a.KafkaManager.GetWriter("healthlocation"), m); err != nil { slog.Error("writing location health", "err", err) continue } case <-locTicker.C: settings := a.AppState.GetSettings() slog.Info("current algorithm", "algorithm", settings.CurrentAlgorithm) switch settings.CurrentAlgorithm { case "filter": pkglocation.GetLikelyLocations(a.AppState, a.KafkaManager.GetWriter("locevents")) case "ai": inferred, err := a.Inferencer.Infer(ctx, a.Cfg) if err != nil { slog.Error("AI inference", "err", err) continue } for _, item := range inferred.Items { r := model.HTTPLocation{ Method: "AI", Y: item.Y, X: item.X, Z: item.Z, MAC: item.Mac, LastSeen: time.Now().Unix(), } js, err := json.Marshal(r) if err != nil { slog.Error("marshaling location", "err", err, "beacon_id", item.Mac) continue } if err := a.KafkaManager.GetWriter("locevents").WriteMessages(ctx, kafka.Message{Value: js}); err != nil { slog.Error("sending kafka location message", "err", err, "beacon_id", item.Mac) } } slog.Info("AI algorithm", "count", inferred.Count, "items", len(inferred.Items)) } case msg := <-a.ChRaw: pkglocation.AssignBeaconToList(msg, a.AppState) case msg := <-a.ChSettings: slog.Info("settings update", "msg", msg) a.AppState.UpdateSettings(msg) } } } // Shutdown waits for consumers and cleans up Kafka and logger. func (a *LocationApp) Shutdown() { a.wg.Wait() a.KafkaManager.CleanKafkaReaders() a.KafkaManager.CleanKafkaWriters() if a.Cleanup != nil { a.Cleanup() } slog.Info("location service shutdown complete") }