| @@ -71,35 +71,15 @@ func mqtthandler(writer *kafka.Writer, topic string, message []byte, appState *a | |||||
| break | break | ||||
| } | } | ||||
| } | } | ||||
| } else { | |||||
| s := strings.Split(string(message), ",") | |||||
| if len(s) < 6 { | |||||
| log.Printf("Messaggio CSV non valido: %s", msgStr) | |||||
| return | |||||
| } | |||||
| fmt.Println("this gateway is also sending data: ", s) | |||||
| } | } | ||||
| // } else { | |||||
| // s := strings.Split(string(message), ",") | |||||
| // if len(s) < 6 { | |||||
| // log.Printf("Messaggio CSV non valido: %s", msgStr) | |||||
| // return | |||||
| // } | |||||
| // rawdata := s[4] | |||||
| // buttonCounter := parseButtonState(rawdata) | |||||
| // if buttonCounter > 0 { | |||||
| // adv := model.BeaconAdvertisement{} | |||||
| // i, _ := strconv.ParseInt(s[3], 10, 64) | |||||
| // adv.Hostname = hostname | |||||
| // adv.BeaconType = "hb_button" | |||||
| // adv.MAC = s[1] | |||||
| // adv.RSSI = i | |||||
| // adv.Data = rawdata | |||||
| // adv.HSButtonCounter = buttonCounter | |||||
| // read_line := strings.TrimRight(string(s[5]), "\r\n") | |||||
| // it, err33 := strconv.Atoi(read_line) | |||||
| // if err33 != nil { | |||||
| // fmt.Println(it) | |||||
| // fmt.Println(err33) | |||||
| // os.Exit(2) | |||||
| // } | |||||
| // } | |||||
| // } | |||||
| } | } | ||||
| var messagePubHandler = func(msg mqtt.Message, writer *kafka.Writer, appState *appcontext.AppState) { | var messagePubHandler = func(msg mqtt.Message, writer *kafka.Writer, appState *appcontext.AppState) { | ||||
| @@ -118,6 +98,7 @@ func main() { | |||||
| // Load global context to init beacons and latest list | // Load global context to init beacons and latest list | ||||
| appState := appcontext.NewAppState() | appState := appcontext.NewAppState() | ||||
| cfg := config.Load() | cfg := config.Load() | ||||
| kafkaManager := kafkaclient.InitKafkaManager() | |||||
| // Set logger -> terminal and log file | // Set logger -> terminal and log file | ||||
| slog.SetDefault(logger.CreateLogger("bridge.log")) | slog.SetDefault(logger.CreateLogger("bridge.log")) | ||||
| @@ -126,13 +107,11 @@ func main() { | |||||
| ctx, stop := signal.NotifyContext(context.Background(), syscall.SIGTERM, syscall.SIGINT) | ctx, stop := signal.NotifyContext(context.Background(), syscall.SIGTERM, syscall.SIGINT) | ||||
| defer stop() | defer stop() | ||||
| // define kafka readers | |||||
| apiReader := appState.AddKafkaReader(cfg.KafkaURL, "apibeacons", "bridge-api") | |||||
| alertReader := appState.AddKafkaReader(cfg.KafkaURL, "alert", "bridge-alert") | |||||
| mqttReader := appState.AddKafkaReader(cfg.KafkaURL, "mqtt", "bridge-mqtt") | |||||
| readerTopics := []string{"apibeacons", "alert", "mqtt"} | |||||
| kafkaManager.PopulateKafkaManager(cfg.KafkaURL, "bridge", readerTopics) | |||||
| // define kafka writer | |||||
| writer := appState.AddKafkaWriter(cfg.KafkaURL, "rawbeacons") | |||||
| writerTopics := []string{"rawbeacons"} | |||||
| kafkaManager.PopulateKafkaManager(cfg.KafkaURL, "", writerTopics) | |||||
| slog.Info("Bridge initialized, subscribed to kafka topics") | slog.Info("Bridge initialized, subscribed to kafka topics") | ||||
| @@ -141,9 +120,9 @@ func main() { | |||||
| chMqtt := make(chan []model.Tracker, 200) | chMqtt := make(chan []model.Tracker, 200) | ||||
| wg.Add(3) | wg.Add(3) | ||||
| go kafkaclient.Consume(apiReader, chApi, ctx, &wg) | |||||
| go kafkaclient.Consume(alertReader, chAlert, ctx, &wg) | |||||
| go kafkaclient.Consume(mqttReader, chMqtt, ctx, &wg) | |||||
| go kafkaclient.Consume(kafkaManager.GetReader("apibeacons"), chApi, ctx, &wg) | |||||
| go kafkaclient.Consume(kafkaManager.GetReader("alert"), chAlert, ctx, &wg) | |||||
| go kafkaclient.Consume(kafkaManager.GetReader("mqtt"), chMqtt, ctx, &wg) | |||||
| opts := mqtt.NewClientOptions() | opts := mqtt.NewClientOptions() | ||||
| opts.AddBroker(fmt.Sprintf("tcp://%s:%d", cfg.MQTTHost, 1883)) | opts.AddBroker(fmt.Sprintf("tcp://%s:%d", cfg.MQTTHost, 1883)) | ||||
| @@ -154,7 +133,9 @@ func main() { | |||||
| opts.SetMaxReconnectInterval(600 * time.Second) | opts.SetMaxReconnectInterval(600 * time.Second) | ||||
| opts.SetCleanSession(false) | opts.SetCleanSession(false) | ||||
| opts.SetDefaultPublishHandler(func(c mqtt.Client, m mqtt.Message) { messagePubHandler(m, writer, appState) }) | |||||
| opts.SetDefaultPublishHandler(func(c mqtt.Client, m mqtt.Message) { | |||||
| messagePubHandler(m, kafkaManager.GetWriter("rawbeacons"), appState) | |||||
| }) | |||||
| opts.OnConnect = connectHandler | opts.OnConnect = connectHandler | ||||
| opts.OnConnectionLost = connectLostHandler | opts.OnConnectionLost = connectLostHandler | ||||
| client := mqtt.NewClient(opts) | client := mqtt.NewClient(opts) | ||||
| @@ -188,14 +169,12 @@ eventloop: | |||||
| slog.Info(lMsg) | slog.Info(lMsg) | ||||
| } | } | ||||
| case msg := <-chAlert: | case msg := <-chAlert: | ||||
| fmt.Printf("Alerts: %+v\n", msg) | |||||
| p, err := json.Marshal(msg) | p, err := json.Marshal(msg) | ||||
| if err != nil { | if err != nil { | ||||
| continue | continue | ||||
| } | } | ||||
| client.Publish("/alerts", 0, true, p) | client.Publish("/alerts", 0, true, p) | ||||
| case msg := <-chMqtt: | case msg := <-chMqtt: | ||||
| fmt.Printf("trackers: %+v\n", msg) | |||||
| p, err := json.Marshal(msg) | p, err := json.Marshal(msg) | ||||
| if err != nil { | if err != nil { | ||||
| continue | continue | ||||
| @@ -208,8 +187,8 @@ eventloop: | |||||
| wg.Wait() | wg.Wait() | ||||
| slog.Info("All go routines have stopped, Beggining to close Kafka connections") | slog.Info("All go routines have stopped, Beggining to close Kafka connections") | ||||
| appState.CleanKafkaReaders() | |||||
| appState.CleanKafkaWriters() | |||||
| kafkaManager.CleanKafkaReaders() | |||||
| kafkaManager.CleanKafkaWriters() | |||||
| client.Disconnect(250) | client.Disconnect(250) | ||||
| slog.Info("Closing connection to MQTT broker") | slog.Info("Closing connection to MQTT broker") | ||||
| @@ -1,5 +0,0 @@ | |||||
| DB_HOST=localhost | |||||
| DB_PORT=5432 | |||||
| DB_USER=postgres | |||||
| DB_PASSWORD=postgres | |||||
| DB_NAME=go_crud_db | |||||
| @@ -1,37 +0,0 @@ | |||||
| package main | |||||
| import ( | |||||
| "fmt" | |||||
| "log" | |||||
| "os" | |||||
| "github.com/joho/godotenv" | |||||
| "gorm.io/driver/postgres" | |||||
| "gorm.io/gorm" | |||||
| ) | |||||
| var DB *gorm.DB | |||||
| func main() { | |||||
| err := godotenv.Load() | |||||
| if err != nil { | |||||
| log.Fatal("Error loading .env file") | |||||
| } | |||||
| dsn := fmt.Sprintf( | |||||
| "host=%s user=%s password=%s dbname=%s port=%s sslmode=disable", | |||||
| os.Getenv("DB_HOST"), | |||||
| os.Getenv("DB_USER"), | |||||
| os.Getenv("DB_PASSWORD"), | |||||
| os.Getenv("DB_NAME"), | |||||
| os.Getenv("DB_PORT"), | |||||
| ) | |||||
| db, err := gorm.Open(postgres.Open(dsn), &gorm.Config{}) | |||||
| if err != nil { | |||||
| log.Fatal("Failed to connect to the database:", err) | |||||
| } | |||||
| DB = db | |||||
| fmt.Println("Database connection established") | |||||
| } | |||||
| @@ -1,7 +0,0 @@ | |||||
| package models | |||||
| type Book struct { | |||||
| ID uint `json:"id" gorm:"primaryKey"` | |||||
| Title string `json:"title"` | |||||
| Author string `json:"author"` | |||||
| } | |||||
| @@ -1,22 +0,0 @@ | |||||
| [ | |||||
| { | |||||
| "name": "config1", | |||||
| "min": 10, | |||||
| "max": 15, | |||||
| "pattern": ["0x02", "0x01", "0x06"], | |||||
| "configs": { | |||||
| "battery": {"offset": 3, "length": 1}, | |||||
| "accX": {"offset": 4, "length": 2, "order": "bigendian"} | |||||
| } | |||||
| }, | |||||
| { | |||||
| "name": "config2", | |||||
| "min": 10, | |||||
| "max": 15, | |||||
| "pattern": ["0x02", "0x01", "0x06"], | |||||
| "configs": { | |||||
| "battery": {"offset": 3, "length": 1}, | |||||
| "accY": {"offset": 4, "length": 2, "order": "bigendian"} | |||||
| } | |||||
| } | |||||
| ] | |||||
| @@ -1,137 +0,0 @@ | |||||
| package main | |||||
| import ( | |||||
| "bytes" | |||||
| "encoding/binary" | |||||
| "encoding/json" | |||||
| "fmt" | |||||
| "io" | |||||
| "os" | |||||
| "sync" | |||||
| "github.com/AFASystems/presence/internal/pkg/model" | |||||
| ) | |||||
| type parserConfig struct { | |||||
| Length int `json:"length"` | |||||
| Offset int `json:"offset"` | |||||
| Order string `json:"order"` | |||||
| } | |||||
| type beaconParser struct { | |||||
| name string | |||||
| canParse func([]byte) bool | |||||
| configs map[string]parserConfig | |||||
| } | |||||
| type parserRegistry struct { | |||||
| parserList []beaconParser | |||||
| rw sync.RWMutex | |||||
| } | |||||
| type config struct { | |||||
| Name string `json:"name"` | |||||
| Min int `json:"min"` | |||||
| Max int `json:"max"` | |||||
| Pattern []string `json:"pattern"` | |||||
| Configs map[string]parserConfig `json:"configs"` | |||||
| } | |||||
| func (pc parserConfig) GetOrder() binary.ByteOrder { | |||||
| if pc.Order == "bigendian" { | |||||
| return binary.BigEndian | |||||
| } | |||||
| return binary.LittleEndian | |||||
| } | |||||
| func (p *parserRegistry) Register(name string, c config) { | |||||
| p.rw.Lock() | |||||
| defer p.rw.Unlock() | |||||
| b := beaconParser{ | |||||
| name: name, | |||||
| canParse: func(ad []byte) bool { | |||||
| return len(ad) >= c.Min && len(ad) <= c.Max && bytes.HasPrefix(ad, c.GetPatternBytes()) | |||||
| }, | |||||
| configs: c.Configs, | |||||
| } | |||||
| p.parserList = append(p.parserList, b) | |||||
| } | |||||
| func (b *beaconParser) Parse(ad []byte) (model.BeaconEvent, bool) { | |||||
| flag := false | |||||
| event := model.BeaconEvent{Type: b.name} | |||||
| if cfg, ok := b.configs["battery"]; ok { | |||||
| event.Battery = uint32(b.extract(ad, cfg)) | |||||
| flag = true | |||||
| } | |||||
| if cfg, ok := b.configs["accX"]; ok { | |||||
| event.AccX = int16(b.extract(ad, cfg)) | |||||
| flag = true | |||||
| } | |||||
| if cfg, ok := b.configs["accY"]; ok { | |||||
| event.AccY = int16(b.extract(ad, cfg)) | |||||
| flag = true | |||||
| } | |||||
| if cfg, ok := b.configs["accZ"]; ok { | |||||
| event.AccZ = int16(b.extract(ad, cfg)) | |||||
| flag = true | |||||
| } | |||||
| return event, flag | |||||
| } | |||||
| func (b *beaconParser) extract(ad []byte, pc parserConfig) uint16 { | |||||
| if len(ad) < pc.Offset+pc.Length { | |||||
| return 0 | |||||
| } | |||||
| data := ad[pc.Offset : pc.Offset+pc.Length] | |||||
| if pc.Length == 1 { | |||||
| return uint16(data[0]) | |||||
| } | |||||
| return pc.GetOrder().Uint16(data) | |||||
| } | |||||
| func (c config) GetPatternBytes() []byte { | |||||
| res := make([]byte, len(c.Pattern)) | |||||
| for i, s := range c.Pattern { | |||||
| fmt.Sscanf(s, "0x%02x", &res[i]) | |||||
| } | |||||
| return res | |||||
| } | |||||
| func main() { | |||||
| parserRegistry := parserRegistry{ | |||||
| parserList: make([]beaconParser, 0), | |||||
| } | |||||
| seq := []byte{0x02, 0x01, 0x06, 0x64, 0x01, 0xF4, 0x00, 0x0A, 0xFF, 0x05} | |||||
| jsonFile, err := os.Open("configs.json") | |||||
| if err != nil { | |||||
| fmt.Println(err) | |||||
| } | |||||
| fmt.Println("succesfully opened json file") | |||||
| b, _ := io.ReadAll(jsonFile) | |||||
| var configs []config | |||||
| json.Unmarshal(b, &configs) | |||||
| for _, config := range configs { | |||||
| parserRegistry.Register(config.Name, config) | |||||
| } | |||||
| for _, parser := range parserRegistry.parserList { | |||||
| if parser.canParse(seq) { | |||||
| event, ok := parser.Parse(seq) | |||||
| if ok { | |||||
| fmt.Printf("Device: %s | Battery: %d%% | AccX: %d | AccY: %d | AccZ: %d\n", event.Type, event.Battery, event.AccX, event.AccY, event.AccZ) | |||||
| } | |||||
| } | |||||
| } | |||||
| fmt.Printf("configs: %+v\n", configs) | |||||
| jsonFile.Close() | |||||
| } | |||||
| @@ -26,6 +26,7 @@ func main() { | |||||
| // Load global context to init beacons and latest list | // Load global context to init beacons and latest list | ||||
| appState := appcontext.NewAppState() | appState := appcontext.NewAppState() | ||||
| cfg := config.Load() | cfg := config.Load() | ||||
| kafkaManager := kafkaclient.InitKafkaManager() | |||||
| parserRegistry := model.ParserRegistry{ | parserRegistry := model.ParserRegistry{ | ||||
| ParserList: make(map[string]model.BeaconParser), | ParserList: make(map[string]model.BeaconParser), | ||||
| @@ -38,10 +39,11 @@ func main() { | |||||
| ctx, stop := signal.NotifyContext(context.Background(), syscall.SIGTERM, syscall.SIGINT) | ctx, stop := signal.NotifyContext(context.Background(), syscall.SIGTERM, syscall.SIGINT) | ||||
| defer stop() | defer stop() | ||||
| rawReader := appState.AddKafkaReader(cfg.KafkaURL, "rawbeacons", "gid-raw") | |||||
| parserReader := appState.AddKafkaReader(cfg.KafkaURL, "parser", "gid-parser") | |||||
| readerTopics := []string{"rawbeacons", "parser"} | |||||
| kafkaManager.PopulateKafkaManager(cfg.KafkaURL, "decoder", readerTopics) | |||||
| alertWriter := appState.AddKafkaWriter(cfg.KafkaURL, "alertbeacons") | |||||
| writerTopics := []string{"alertbeacons"} | |||||
| kafkaManager.PopulateKafkaManager(cfg.KafkaURL, "", writerTopics) | |||||
| slog.Info("Decoder initialized, subscribed to Kafka topics") | slog.Info("Decoder initialized, subscribed to Kafka topics") | ||||
| @@ -49,8 +51,8 @@ func main() { | |||||
| chParser := make(chan model.KafkaParser, 200) | chParser := make(chan model.KafkaParser, 200) | ||||
| wg.Add(3) | wg.Add(3) | ||||
| go kafkaclient.Consume(rawReader, chRaw, ctx, &wg) | |||||
| go kafkaclient.Consume(parserReader, chParser, ctx, &wg) | |||||
| go kafkaclient.Consume(kafkaManager.GetReader("rawbeacons"), chRaw, ctx, &wg) | |||||
| go kafkaclient.Consume(kafkaManager.GetReader("parser"), chParser, ctx, &wg) | |||||
| eventloop: | eventloop: | ||||
| for { | for { | ||||
| @@ -58,7 +60,7 @@ eventloop: | |||||
| case <-ctx.Done(): | case <-ctx.Done(): | ||||
| break eventloop | break eventloop | ||||
| case msg := <-chRaw: | case msg := <-chRaw: | ||||
| processIncoming(msg, appState, alertWriter, &parserRegistry) | |||||
| processIncoming(msg, appState, kafkaManager.GetWriter("alertbeacons"), &parserRegistry) | |||||
| case msg := <-chParser: | case msg := <-chParser: | ||||
| switch msg.ID { | switch msg.ID { | ||||
| case "add": | case "add": | ||||
| @@ -77,8 +79,8 @@ eventloop: | |||||
| wg.Wait() | wg.Wait() | ||||
| slog.Info("All go routines have stopped, Beggining to close Kafka connections") | slog.Info("All go routines have stopped, Beggining to close Kafka connections") | ||||
| appState.CleanKafkaReaders() | |||||
| appState.CleanKafkaWriters() | |||||
| kafkaManager.CleanKafkaReaders() | |||||
| kafkaManager.CleanKafkaWriters() | |||||
| } | } | ||||
| func processIncoming(adv model.BeaconAdvertisement, appState *appcontext.AppState, writer *kafka.Writer, parserRegistry *model.ParserRegistry) { | func processIncoming(adv model.BeaconAdvertisement, appState *appcontext.AppState, writer *kafka.Writer, parserRegistry *model.ParserRegistry) { | ||||
| @@ -25,6 +25,7 @@ func main() { | |||||
| // Load global context to init beacons and latest list | // Load global context to init beacons and latest list | ||||
| appState := appcontext.NewAppState() | appState := appcontext.NewAppState() | ||||
| cfg := config.Load() | cfg := config.Load() | ||||
| kafkaManager := kafkaclient.InitKafkaManager() | |||||
| // Set logger -> terminal and log file | // Set logger -> terminal and log file | ||||
| slog.SetDefault(logger.CreateLogger("location.log")) | slog.SetDefault(logger.CreateLogger("location.log")) | ||||
| @@ -33,10 +34,11 @@ func main() { | |||||
| ctx, stop := signal.NotifyContext(context.Background(), syscall.SIGTERM, syscall.SIGINT) | ctx, stop := signal.NotifyContext(context.Background(), syscall.SIGTERM, syscall.SIGINT) | ||||
| defer stop() | defer stop() | ||||
| rawReader := appState.AddKafkaReader(cfg.KafkaURL, "rawbeacons", "gid-raw-loc") | |||||
| settingsReader := appState.AddKafkaReader(cfg.KafkaURL, "settings", "gid-settings-loc") | |||||
| readerTopics := []string{"rawbeacons", "settings"} | |||||
| kafkaManager.PopulateKafkaManager(cfg.KafkaURL, "location", readerTopics) | |||||
| writer := appState.AddKafkaWriter(cfg.KafkaURL, "locevents") | |||||
| writerTopics := []string{"locevents"} | |||||
| kafkaManager.PopulateKafkaManager(cfg.KafkaURL, "", writerTopics) | |||||
| slog.Info("Locations algorithm initialized, subscribed to Kafka topics") | slog.Info("Locations algorithm initialized, subscribed to Kafka topics") | ||||
| @@ -47,8 +49,8 @@ func main() { | |||||
| chSettings := make(chan map[string]any, 5) | chSettings := make(chan map[string]any, 5) | ||||
| wg.Add(3) | wg.Add(3) | ||||
| go kafkaclient.Consume(rawReader, chRaw, ctx, &wg) | |||||
| go kafkaclient.Consume(settingsReader, chSettings, ctx, &wg) | |||||
| go kafkaclient.Consume(kafkaManager.GetReader("rawbeacons"), chRaw, ctx, &wg) | |||||
| go kafkaclient.Consume(kafkaManager.GetReader("settings"), chSettings, ctx, &wg) | |||||
| eventLoop: | eventLoop: | ||||
| for { | for { | ||||
| @@ -60,7 +62,7 @@ eventLoop: | |||||
| fmt.Printf("Settings: %+v\n", settings) | fmt.Printf("Settings: %+v\n", settings) | ||||
| switch settings.CurrentAlgorithm { | switch settings.CurrentAlgorithm { | ||||
| case "filter": | case "filter": | ||||
| getLikelyLocations(appState, writer) | |||||
| getLikelyLocations(appState, kafkaManager.GetWriter("locevents")) | |||||
| case "ai": | case "ai": | ||||
| fmt.Println("AI algorithm selected") | fmt.Println("AI algorithm selected") | ||||
| } | } | ||||
| @@ -76,8 +78,8 @@ eventLoop: | |||||
| wg.Wait() | wg.Wait() | ||||
| slog.Info("All go routines have stopped, Beggining to close Kafka connections") | slog.Info("All go routines have stopped, Beggining to close Kafka connections") | ||||
| appState.CleanKafkaReaders() | |||||
| appState.CleanKafkaWriters() | |||||
| kafkaManager.CleanKafkaReaders() | |||||
| kafkaManager.CleanKafkaWriters() | |||||
| } | } | ||||
| func getLikelyLocations(appState *appcontext.AppState, writer *kafka.Writer) { | func getLikelyLocations(appState *appcontext.AppState, writer *kafka.Writer) { | ||||
| @@ -25,21 +25,16 @@ import ( | |||||
| "github.com/AFASystems/presence/internal/pkg/service" | "github.com/AFASystems/presence/internal/pkg/service" | ||||
| "github.com/gorilla/handlers" | "github.com/gorilla/handlers" | ||||
| "github.com/gorilla/mux" | "github.com/gorilla/mux" | ||||
| "github.com/gorilla/websocket" | |||||
| "github.com/segmentio/kafka-go" | "github.com/segmentio/kafka-go" | ||||
| ) | ) | ||||
| var upgrader = websocket.Upgrader{ | |||||
| ReadBufferSize: 1024, | |||||
| WriteBufferSize: 1024, | |||||
| } | |||||
| var _ io.Writer = (*os.File)(nil) | var _ io.Writer = (*os.File)(nil) | ||||
| var wg sync.WaitGroup | var wg sync.WaitGroup | ||||
| func main() { | func main() { | ||||
| cfg := config.Load() | cfg := config.Load() | ||||
| appState := appcontext.NewAppState() | appState := appcontext.NewAppState() | ||||
| kafkaManager := kafkaclient.InitKafkaManager() | |||||
| // Set logger -> terminal and log file | // Set logger -> terminal and log file | ||||
| slog.SetDefault(logger.CreateLogger("server.log")) | slog.SetDefault(logger.CreateLogger("server.log")) | ||||
| @@ -57,11 +52,9 @@ func main() { | |||||
| originsOk := handlers.AllowedOrigins([]string{"*"}) | originsOk := handlers.AllowedOrigins([]string{"*"}) | ||||
| methodsOk := handlers.AllowedMethods([]string{"GET", "HEAD", "POST", "PUT", "DELETE", "OPTIONS"}) | methodsOk := handlers.AllowedMethods([]string{"GET", "HEAD", "POST", "PUT", "DELETE", "OPTIONS"}) | ||||
| writer := appState.AddKafkaWriter(cfg.KafkaURL, "apibeacons") | |||||
| settingsWriter := appState.AddKafkaWriter(cfg.KafkaURL, "settings") | |||||
| alertWriter := appState.AddKafkaWriter(cfg.KafkaURL, "alert") | |||||
| parserWriter := appState.AddKafkaWriter(cfg.KafkaURL, "parser") | |||||
| mqttWriter := appState.AddKafkaWriter(cfg.KafkaURL, "mqtt") | |||||
| writerTopics := []string{"apibeacons", "alert", "mqtt", "settings", "parser"} | |||||
| kafkaManager.PopulateKafkaManager(cfg.KafkaURL, "", writerTopics) | |||||
| slog.Info("Kafka writers topics: apibeacons, settings initialized") | slog.Info("Kafka writers topics: apibeacons, settings initialized") | ||||
| configFile, err := os.Open("/app/cmd/server/config.json") | configFile, err := os.Open("/app/cmd/server/config.json") | ||||
| @@ -86,25 +79,25 @@ func main() { | |||||
| Config: config, | Config: config, | ||||
| } | } | ||||
| if err := service.SendParserConfig(kp, parserWriter, ctx); err != nil { | |||||
| if err := service.SendParserConfig(kp, kafkaManager.GetWriter("parser"), ctx); err != nil { | |||||
| fmt.Printf("Unable to send parser config to kafka broker %v\n", err) | fmt.Printf("Unable to send parser config to kafka broker %v\n", err) | ||||
| } | } | ||||
| } | } | ||||
| if err := apiclient.UpdateDB(db, ctx, cfg, writer, appState); err != nil { | |||||
| if err := apiclient.UpdateDB(db, ctx, cfg, kafkaManager.GetWriter("apibeacons"), appState); err != nil { | |||||
| fmt.Printf("Error in getting token: %v\n", err) | fmt.Printf("Error in getting token: %v\n", err) | ||||
| } | } | ||||
| locationReader := appState.AddKafkaReader(cfg.KafkaURL, "locevents", "gid-loc-server") | |||||
| alertsReader := appState.AddKafkaReader(cfg.KafkaURL, "alertbeacons", "gid-alert-serv") | |||||
| readerTopics := []string{"locevents", "alertbeacons"} | |||||
| kafkaManager.PopulateKafkaManager(cfg.KafkaURL, "server", readerTopics) | |||||
| slog.Info("Kafka readers topics: locevents, alertbeacons initialized") | slog.Info("Kafka readers topics: locevents, alertbeacons initialized") | ||||
| chLoc := make(chan model.HTTPLocation, 200) | chLoc := make(chan model.HTTPLocation, 200) | ||||
| chEvents := make(chan model.BeaconEvent, 500) | chEvents := make(chan model.BeaconEvent, 500) | ||||
| wg.Add(2) | wg.Add(2) | ||||
| go kafkaclient.Consume(locationReader, chLoc, ctx, &wg) | |||||
| go kafkaclient.Consume(alertsReader, chEvents, ctx, &wg) | |||||
| go kafkaclient.Consume(kafkaManager.GetReader("locevents"), chLoc, ctx, &wg) | |||||
| go kafkaclient.Consume(kafkaManager.GetReader("alertbeacons"), chEvents, ctx, &wg) | |||||
| r := mux.NewRouter() | r := mux.NewRouter() | ||||
| @@ -124,16 +117,16 @@ func main() { | |||||
| r.HandleFunc("/reslevis/updateTrackerZone", controller.TrackerZoneUpdateController(db)).Methods("PUT") | r.HandleFunc("/reslevis/updateTrackerZone", controller.TrackerZoneUpdateController(db)).Methods("PUT") | ||||
| r.HandleFunc("/reslevis/getTrackers", controller.TrackerList(db)).Methods("GET") | r.HandleFunc("/reslevis/getTrackers", controller.TrackerList(db)).Methods("GET") | ||||
| r.HandleFunc("/reslevis/postTracker", controller.TrackerAdd(db, writer, ctx)).Methods("POST") | |||||
| r.HandleFunc("/reslevis/removeTracker/{id}", controller.TrackerDelete(db, writer, ctx)).Methods("DELETE") | |||||
| r.HandleFunc("/reslevis/postTracker", controller.TrackerAdd(db, kafkaManager.GetWriter("apibeacons"), ctx)).Methods("POST") | |||||
| r.HandleFunc("/reslevis/removeTracker/{id}", controller.TrackerDelete(db, kafkaManager.GetWriter("apibeacons"), ctx)).Methods("DELETE") | |||||
| r.HandleFunc("/reslevis/updateTracker", controller.TrackerUpdate(db)).Methods("PUT") | r.HandleFunc("/reslevis/updateTracker", controller.TrackerUpdate(db)).Methods("PUT") | ||||
| r.HandleFunc("/configs/beacons", controller.ParserListController(db)).Methods("GET") | r.HandleFunc("/configs/beacons", controller.ParserListController(db)).Methods("GET") | ||||
| r.HandleFunc("/configs/beacons", controller.ParserAddController(db, parserWriter, ctx)).Methods("POST") | |||||
| r.HandleFunc("/configs/beacons/{id}", controller.ParserUpdateController(db, parserWriter, ctx)).Methods("PUT") | |||||
| r.HandleFunc("/configs/beacons/{id}", controller.ParserDeleteController(db, parserWriter, ctx)).Methods("DELETE") | |||||
| r.HandleFunc("/configs/beacons", controller.ParserAddController(db, kafkaManager.GetWriter("parser"), ctx)).Methods("POST") | |||||
| r.HandleFunc("/configs/beacons/{id}", controller.ParserUpdateController(db, kafkaManager.GetWriter("parser"), ctx)).Methods("PUT") | |||||
| r.HandleFunc("/configs/beacons/{id}", controller.ParserDeleteController(db, kafkaManager.GetWriter("parser"), ctx)).Methods("DELETE") | |||||
| r.HandleFunc("/reslevis/settings", controller.SettingsUpdateController(db, settingsWriter, ctx)).Methods("PATCH") | |||||
| r.HandleFunc("/reslevis/settings", controller.SettingsUpdateController(db, kafkaManager.GetWriter("settings"), ctx)).Methods("PATCH") | |||||
| r.HandleFunc("/reslevis/settings", controller.SettingsListController(db)).Methods("GET") | r.HandleFunc("/reslevis/settings", controller.SettingsListController(db)).Methods("GET") | ||||
| beaconTicker := time.NewTicker(2 * time.Second) | beaconTicker := time.NewTicker(2 * time.Second) | ||||
| @@ -156,7 +149,7 @@ eventLoop: | |||||
| case <-ctx.Done(): | case <-ctx.Done(): | ||||
| break eventLoop | break eventLoop | ||||
| case msg := <-chLoc: | case msg := <-chLoc: | ||||
| service.LocationToBeaconService(msg, db, alertWriter, ctx) | |||||
| service.LocationToBeaconService(msg, db, kafkaManager.GetWriter("alert"), ctx) | |||||
| case msg := <-chEvents: | case msg := <-chEvents: | ||||
| fmt.Printf("event: %+v\n", msg) | fmt.Printf("event: %+v\n", msg) | ||||
| id := msg.ID | id := msg.ID | ||||
| @@ -182,7 +175,7 @@ eventLoop: | |||||
| Value: eMsg, | Value: eMsg, | ||||
| } | } | ||||
| mqttWriter.WriteMessages(ctx, msg) | |||||
| kafkaManager.GetWriter("mqtt").WriteMessages(ctx, msg) | |||||
| } | } | ||||
| } | } | ||||
| @@ -196,8 +189,8 @@ eventLoop: | |||||
| wg.Wait() | wg.Wait() | ||||
| slog.Info("All go routines have stopped, Beggining to close Kafka connections\n") | slog.Info("All go routines have stopped, Beggining to close Kafka connections\n") | ||||
| appState.CleanKafkaReaders() | |||||
| appState.CleanKafkaWriters() | |||||
| kafkaManager.CleanKafkaReaders() | |||||
| kafkaManager.CleanKafkaWriters() | |||||
| slog.Info("All kafka clients shutdown, starting shutdown of valkey client") | slog.Info("All kafka clients shutdown, starting shutdown of valkey client") | ||||
| slog.Info("API server shutting down") | slog.Info("API server shutting down") | ||||
| @@ -1,86 +0,0 @@ | |||||
| package main | |||||
| import ( | |||||
| "bufio" | |||||
| "encoding/hex" | |||||
| "fmt" | |||||
| "log" | |||||
| "os" | |||||
| "strings" | |||||
| ) | |||||
| func main() { | |||||
| file, err := os.Open("save.txt") | |||||
| if err != nil { | |||||
| log.Fatalf("Failed to open file: %s", err) | |||||
| } | |||||
| defer file.Close() | |||||
| scanner := bufio.NewScanner(file) | |||||
| for scanner.Scan() { | |||||
| line := scanner.Text() | |||||
| decodeBeacon(line) | |||||
| } | |||||
| } | |||||
| func decodeBeacon(beacon string) { | |||||
| beacon = strings.TrimSpace(beacon) | |||||
| if beacon == "" { | |||||
| return | |||||
| } | |||||
| // convert to bytes for faster operations | |||||
| b, err := hex.DecodeString(beacon) | |||||
| if err != nil { | |||||
| fmt.Println("invalid line: ", beacon) | |||||
| return | |||||
| } | |||||
| // remove flag bytes - they hold no structural information | |||||
| if len(b) > 1 && b[1] == 0x01 { | |||||
| l := int(b[0]) | |||||
| if 1+l <= len(b) { | |||||
| b = b[1+l:] | |||||
| } | |||||
| } | |||||
| adBlockIndeces := parseADFast(b) | |||||
| for _, r := range adBlockIndeces { | |||||
| ad := b[r[0]:r[1]] | |||||
| if len(ad) >= 4 && | |||||
| ad[1] == 0x16 && | |||||
| ad[2] == 0xAA && | |||||
| ad[3] == 0xFE { | |||||
| // fmt.Println("Eddystone:", hex.EncodeToString(b)) | |||||
| return | |||||
| } | |||||
| if len(ad) >= 7 && | |||||
| ad[1] == 0xFF && | |||||
| ad[2] == 0x4C && ad[3] == 0x00 && | |||||
| ad[4] == 0x02 && ad[5] == 0x15 { | |||||
| // fmt.Println("iBeacon:", hex.EncodeToString(b)) | |||||
| return | |||||
| } | |||||
| } | |||||
| fmt.Println(hex.EncodeToString(b)) | |||||
| } | |||||
| func parseADFast(b []byte) [][2]int { | |||||
| var res [][2]int | |||||
| i := 0 | |||||
| for i < len(b) { | |||||
| l := int(b[i]) | |||||
| if l == 0 || i+1+l > len(b) { | |||||
| break | |||||
| } | |||||
| res = append(res, [2]int{i, i + 1 + l}) | |||||
| i += 1 + l | |||||
| } | |||||
| return res | |||||
| } | |||||
| @@ -1,161 +0,0 @@ | |||||
| package main | |||||
| import ( | |||||
| "context" | |||||
| "crypto/tls" | |||||
| "encoding/json" | |||||
| "fmt" | |||||
| "net/http" | |||||
| "net/url" | |||||
| "strings" | |||||
| "github.com/AFASystems/presence/internal/pkg/model" | |||||
| ) | |||||
| type response struct { | |||||
| A string `json:"access_token"` | |||||
| } | |||||
| func main() { | |||||
| ctx := context.Background() | |||||
| tr := &http.Transport{ | |||||
| TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, | |||||
| } | |||||
| client := &http.Client{Transport: tr} | |||||
| formData := url.Values{} | |||||
| formData.Set("grant_type", "password") | |||||
| formData.Set("client_id", "Fastapi") | |||||
| formData.Set("client_secret", "wojuoB7Z5xhlPFrF2lIxJSSdVHCApEgC") | |||||
| formData.Set("username", "core") | |||||
| formData.Set("password", "C0r3_us3r_Cr3d3nt14ls") | |||||
| formData.Set("audience", "Fastapi") | |||||
| req, err := http.NewRequest("POST", "https://10.251.0.30:10002/realms/API.Server.local/protocol/openid-connect/token", strings.NewReader(formData.Encode())) | |||||
| if err != nil { | |||||
| panic(err) | |||||
| } | |||||
| req.Header.Add("Content-Type", "application/x-www-form-urlencoded") | |||||
| req = req.WithContext(ctx) | |||||
| res, err := client.Do(req) | |||||
| if err != nil { | |||||
| panic(err) | |||||
| } | |||||
| var j response | |||||
| err = json.NewDecoder(res.Body).Decode(&j) | |||||
| if err != nil { | |||||
| panic(err) | |||||
| } | |||||
| token := j.A | |||||
| trackers, err := GetTrackers(token, client) | |||||
| if err != nil { | |||||
| panic(err) | |||||
| } | |||||
| fmt.Printf("trackers: %+v\n", trackers) | |||||
| gateways, err := getGateways(token, client) | |||||
| if err != nil { | |||||
| panic(err) | |||||
| } | |||||
| fmt.Printf("gateways: %+v\n", gateways) | |||||
| zones, err := GetZones(token, client) | |||||
| if err != nil { | |||||
| panic(err) | |||||
| } | |||||
| fmt.Printf("zones: %+v\n", zones) | |||||
| trackerZones, err := GetTrackerZones(token, client) | |||||
| if err != nil { | |||||
| panic(err) | |||||
| } | |||||
| fmt.Printf("tracker zones: %+v\n", trackerZones) | |||||
| } | |||||
| func GetTrackers(token string, client *http.Client) ([]model.Tracker, error) { | |||||
| res, err := getRequest(token, "getTrackers", client) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| var i []model.Tracker | |||||
| err = json.NewDecoder(res.Body).Decode(&i) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| return i, nil | |||||
| } | |||||
| func getGateways(token string, client *http.Client) ([]model.Gateway, error) { | |||||
| res, err := getRequest(token, "getGateways", client) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| var i []model.Gateway | |||||
| err = json.NewDecoder(res.Body).Decode(&i) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| return i, nil | |||||
| } | |||||
| func GetTrackerZones(token string, client *http.Client) ([]model.TrackerZones, error) { | |||||
| res, err := getRequest(token, "getTrackerZones", client) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| var i []model.TrackerZones | |||||
| err = json.NewDecoder(res.Body).Decode(&i) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| return i, nil | |||||
| } | |||||
| func GetZones(token string, client *http.Client) ([]model.Zone, error) { | |||||
| res, err := getRequest(token, "getZones", client) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| var i []model.Zone | |||||
| err = json.NewDecoder(res.Body).Decode(&i) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| return i, nil | |||||
| } | |||||
| func getRequest(token, route string, client *http.Client) (*http.Response, error) { | |||||
| url := fmt.Sprintf("https://10.251.0.30:5050/reslevis/%s", route) | |||||
| req, err := http.NewRequest("GET", url, nil) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| header := fmt.Sprintf("Bearer %s", token) | |||||
| req.Header.Add("Authorization", header) | |||||
| res, err := client.Do(req) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| return res, nil | |||||
| } | |||||
| @@ -1,131 +0,0 @@ | |||||
| package main | |||||
| import ( | |||||
| "context" | |||||
| "encoding/json" | |||||
| "fmt" | |||||
| "reflect" | |||||
| "github.com/redis/go-redis/v9" | |||||
| ) | |||||
| type Per struct { | |||||
| Name string `json:"name"` | |||||
| Age int `json:"age"` | |||||
| } | |||||
| type Beacon struct { | |||||
| ID string `json:"id"` // Use JSON tags to ensure correct field names | |||||
| Type string `json:"type"` | |||||
| Temp int `json:"temp"` | |||||
| Name string `json:"name"` | |||||
| } | |||||
| func ConvertStructToMap(obj any) (map[string]any, error) { | |||||
| // 1. Marshal the struct into a JSON byte slice | |||||
| data, err := json.Marshal(obj) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| // 2. Unmarshal the JSON byte slice into the map | |||||
| var result map[string]any | |||||
| err = json.Unmarshal(data, &result) | |||||
| if err != nil { | |||||
| return nil, err | |||||
| } | |||||
| return result, nil | |||||
| } | |||||
| // func main() { ... } | |||||
| // client.HSet(ctx, "beacon:123", resultMap).Err() | |||||
| func main() { | |||||
| client := redis.NewClient(&redis.Options{ | |||||
| Addr: "127.0.0.1:6379", | |||||
| Password: "", | |||||
| }) | |||||
| ctx := context.Background() | |||||
| err := client.Set(ctx, "testkey", "hello world", 0).Err() | |||||
| if err != nil { | |||||
| fmt.Println("Ok") | |||||
| } | |||||
| val, err := client.Get(ctx, "testkey").Result() | |||||
| if err != nil { | |||||
| fmt.Println("Ok") | |||||
| } | |||||
| fmt.Println(val) | |||||
| err = client.SAdd(ctx, "myset", "b-1").Err() | |||||
| if err != nil { | |||||
| fmt.Println(err) | |||||
| } | |||||
| res, err := client.SMembers(ctx, "myset").Result() | |||||
| if err != nil { | |||||
| fmt.Println(err) | |||||
| } | |||||
| fmt.Println("res1: ", res) | |||||
| err = client.SAdd(ctx, "myset", "b-2").Err() | |||||
| if err != nil { | |||||
| fmt.Println(err) | |||||
| } | |||||
| res, err = client.SMembers(ctx, "myset").Result() | |||||
| if err != nil { | |||||
| fmt.Println(err) | |||||
| } | |||||
| fmt.Println("res1: ", res) | |||||
| err = client.SAdd(ctx, "myset", "b-1").Err() | |||||
| if err != nil { | |||||
| fmt.Println(err) | |||||
| } | |||||
| res, err = client.SMembers(ctx, "myset").Result() | |||||
| if err != nil { | |||||
| fmt.Println(err) | |||||
| } | |||||
| fmt.Println("res1: ", res) | |||||
| fmt.Println("type: ", reflect.TypeOf(res)) | |||||
| // b := Beacon{ | |||||
| // ID: "hello", | |||||
| // Type: "node", | |||||
| // Temp: 10, | |||||
| // Name: "Peter", | |||||
| // } | |||||
| // per := Per{ | |||||
| // Name: "Janez", | |||||
| // Age: 10, | |||||
| // } | |||||
| // bEncoded, err := ConvertStructToMap(b) | |||||
| // if err != nil { | |||||
| // fmt.Print("error\n") | |||||
| // } | |||||
| // perEncoded, err := ConvertStructToMap(per) | |||||
| // if err != nil { | |||||
| // fmt.Print("error\n") | |||||
| // } | |||||
| // err = client.HSet(ctx, "myhash", bEncoded).Err() | |||||
| // fmt.Println(err) | |||||
| // res, _ := client.HGetAll(ctx, "myhash").Result() | |||||
| // fmt.Println(res) | |||||
| // err = client.HSet(ctx, "myhash", perEncoded).Err() | |||||
| // fmt.Println(err) | |||||
| // res, _ = client.HGetAll(ctx, "myhash").Result() | |||||
| // fmt.Println(res) | |||||
| } | |||||
| @@ -2,25 +2,18 @@ package appcontext | |||||
| import ( | import ( | ||||
| "fmt" | "fmt" | ||||
| "maps" | |||||
| "strings" | |||||
| "time" | |||||
| "github.com/AFASystems/presence/internal/pkg/model" | "github.com/AFASystems/presence/internal/pkg/model" | ||||
| "github.com/mitchellh/mapstructure" | "github.com/mitchellh/mapstructure" | ||||
| "github.com/segmentio/kafka-go" | |||||
| ) | ) | ||||
| // AppState provides centralized access to application state | // AppState provides centralized access to application state | ||||
| type AppState struct { | type AppState struct { | ||||
| beacons model.BeaconsList | |||||
| httpResults model.HTTPResultList | |||||
| settings model.Settings | |||||
| beaconEvents model.BeaconEventList | |||||
| beaconsLookup map[string]string | |||||
| latestList model.LatestBeaconsList | |||||
| kafkaReadersList model.KafkaReadersList | |||||
| kafkaWritersList model.KafkaWritersList | |||||
| beacons model.BeaconsList | |||||
| httpResults model.HTTPResultList | |||||
| settings model.Settings | |||||
| beaconEvents model.BeaconEventList | |||||
| beaconsLookup map[string]string | |||||
| } | } | ||||
| // NewAppState creates a new application context AppState with default values | // NewAppState creates a new application context AppState with default values | ||||
| @@ -47,72 +40,7 @@ func NewAppState() *AppState { | |||||
| Beacons: make(map[string]model.BeaconEvent), | Beacons: make(map[string]model.BeaconEvent), | ||||
| }, | }, | ||||
| beaconsLookup: make(map[string]string), | beaconsLookup: make(map[string]string), | ||||
| latestList: model.LatestBeaconsList{ | |||||
| LatestList: make(map[string]model.Beacon), | |||||
| }, | |||||
| kafkaReadersList: model.KafkaReadersList{ | |||||
| KafkaReaders: make([]*kafka.Reader, 0), | |||||
| }, | |||||
| kafkaWritersList: model.KafkaWritersList{ | |||||
| KafkaWriters: make([]*kafka.Writer, 0), | |||||
| }, | |||||
| } | |||||
| } | |||||
| func (m *AppState) AddKafkaWriter(kafkaUrl, topic string) *kafka.Writer { | |||||
| kafkaWriter := &kafka.Writer{ | |||||
| Addr: kafka.TCP(kafkaUrl), | |||||
| Topic: topic, | |||||
| Balancer: &kafka.LeastBytes{}, | |||||
| Async: false, | |||||
| RequiredAcks: kafka.RequireAll, | |||||
| BatchSize: 100, | |||||
| BatchTimeout: 10 * time.Millisecond, | |||||
| } | |||||
| m.kafkaWritersList.KafkaWritersLock.Lock() | |||||
| m.kafkaWritersList.KafkaWriters = append(m.kafkaWritersList.KafkaWriters, kafkaWriter) | |||||
| m.kafkaWritersList.KafkaWritersLock.Unlock() | |||||
| return kafkaWriter | |||||
| } | |||||
| func (m *AppState) CleanKafkaWriters() { | |||||
| fmt.Println("shutdown of kafka readers starts") | |||||
| for _, r := range m.kafkaWritersList.KafkaWriters { | |||||
| if err := r.Close(); err != nil { | |||||
| fmt.Printf("Error in closing kafka writer %v", err) | |||||
| } | |||||
| } | |||||
| fmt.Println("Kafka writers graceful shutdown complete") | |||||
| } | |||||
| func (m *AppState) AddKafkaReader(kafkaUrl, topic, groupID string) *kafka.Reader { | |||||
| brokers := strings.Split(kafkaUrl, ",") | |||||
| kafkaReader := kafka.NewReader(kafka.ReaderConfig{ | |||||
| Brokers: brokers, | |||||
| GroupID: groupID, | |||||
| Topic: topic, | |||||
| MinBytes: 1, | |||||
| MaxBytes: 10e6, | |||||
| }) | |||||
| m.kafkaReadersList.KafkaReadersLock.Lock() | |||||
| m.kafkaReadersList.KafkaReaders = append(m.kafkaReadersList.KafkaReaders, kafkaReader) | |||||
| m.kafkaReadersList.KafkaReadersLock.Unlock() | |||||
| return kafkaReader | |||||
| } | |||||
| func (m *AppState) CleanKafkaReaders() { | |||||
| for _, r := range m.kafkaReadersList.KafkaReaders { | |||||
| if err := r.Close(); err != nil { | |||||
| fmt.Printf("Error in closing kafka reader %v", err) | |||||
| } | |||||
| } | } | ||||
| fmt.Println("Kafka readers graceful shutdown complete") | |||||
| } | } | ||||
| // GetBeacons returns thread-safe access to beacons list | // GetBeacons returns thread-safe access to beacons list | ||||
| @@ -135,11 +63,6 @@ func (m *AppState) GetBeaconsLookup() map[string]string { | |||||
| return m.beaconsLookup | return m.beaconsLookup | ||||
| } | } | ||||
| // GetLatestList returns thread-safe access to latest beacons list | |||||
| func (m *AppState) GetLatestList() *model.LatestBeaconsList { | |||||
| return &m.latestList | |||||
| } | |||||
| // AddBeaconToLookup adds a beacon ID to the lookup map | // AddBeaconToLookup adds a beacon ID to the lookup map | ||||
| func (m *AppState) AddBeaconToLookup(id, value string) { | func (m *AppState) AddBeaconToLookup(id, value string) { | ||||
| m.beaconsLookup[id] = value | m.beaconsLookup[id] = value | ||||
| @@ -223,23 +146,6 @@ func (m *AppState) UpdateBeaconEvent(id string, event model.BeaconEvent) { | |||||
| m.beaconEvents.Beacons[id] = event | m.beaconEvents.Beacons[id] = event | ||||
| } | } | ||||
| // GetLatestBeacon returns the latest beacon by ID (thread-safe) | |||||
| func (m *AppState) GetLatestBeacon(id string) (model.Beacon, bool) { | |||||
| m.latestList.Lock.RLock() | |||||
| defer m.latestList.Lock.RUnlock() | |||||
| beacon, exists := m.latestList.LatestList[id] | |||||
| return beacon, exists | |||||
| } | |||||
| // UpdateLatestBeacon updates the latest beacon in the list (thread-safe) | |||||
| func (m *AppState) UpdateLatestBeacon(id string, beacon model.Beacon) { | |||||
| m.latestList.Lock.Lock() | |||||
| defer m.latestList.Lock.Unlock() | |||||
| m.latestList.LatestList[id] = beacon | |||||
| } | |||||
| // GetAllBeacons returns a copy of all beacons | // GetAllBeacons returns a copy of all beacons | ||||
| func (m *AppState) GetAllBeacons() map[string]model.Beacon { | func (m *AppState) GetAllBeacons() map[string]model.Beacon { | ||||
| m.beacons.Lock.RLock() | m.beacons.Lock.RLock() | ||||
| @@ -264,16 +170,6 @@ func (m *AppState) GetAllHttpResults() map[string]model.HTTPResult { | |||||
| return beacons | return beacons | ||||
| } | } | ||||
| // GetAllLatestBeacons returns a copy of all latest beacons | |||||
| func (m *AppState) GetAllLatestBeacons() map[string]model.Beacon { | |||||
| m.latestList.Lock.RLock() | |||||
| defer m.latestList.Lock.RUnlock() | |||||
| beacons := make(map[string]model.Beacon) | |||||
| maps.Copy(beacons, m.latestList.LatestList) | |||||
| return beacons | |||||
| } | |||||
| // GetBeaconCount returns the number of tracked beacons | // GetBeaconCount returns the number of tracked beacons | ||||
| func (m *AppState) GetBeaconCount() int { | func (m *AppState) GetBeaconCount() int { | ||||
| m.beacons.Lock.RLock() | m.beacons.Lock.RLock() | ||||
| @@ -1,21 +0,0 @@ | |||||
| package kafkaclient | |||||
| import ( | |||||
| "strings" | |||||
| "github.com/segmentio/kafka-go" | |||||
| ) | |||||
| // Create Kafka reader | |||||
| // | |||||
| // Deprecated: Use context manager object instead | |||||
| func KafkaReader(kafkaURL, topic, groupID string) *kafka.Reader { | |||||
| brokers := strings.Split(kafkaURL, ",") | |||||
| return kafka.NewReader(kafka.ReaderConfig{ | |||||
| Brokers: brokers, | |||||
| GroupID: groupID, | |||||
| Topic: topic, | |||||
| MinBytes: 1, | |||||
| MaxBytes: 10e6, | |||||
| }) | |||||
| } | |||||
| @@ -1,22 +0,0 @@ | |||||
| package kafkaclient | |||||
| import ( | |||||
| "time" | |||||
| "github.com/segmentio/kafka-go" | |||||
| ) | |||||
| // Create Kafka writer | |||||
| // | |||||
| // Deprecated: Use context manager object instead | |||||
| func KafkaWriter(kafkaURL, topic string) *kafka.Writer { | |||||
| return &kafka.Writer{ | |||||
| Addr: kafka.TCP(kafkaURL), | |||||
| Topic: topic, | |||||
| Balancer: &kafka.LeastBytes{}, | |||||
| Async: false, | |||||
| RequiredAcks: kafka.RequireAll, | |||||
| BatchSize: 100, | |||||
| BatchTimeout: 10 * time.Millisecond, | |||||
| } | |||||
| } | |||||
| @@ -2,8 +2,6 @@ package model | |||||
| import ( | import ( | ||||
| "sync" | "sync" | ||||
| "github.com/segmentio/kafka-go" | |||||
| ) | ) | ||||
| // BeaconAdvertisement represents the JSON payload received from beacon advertisements. | // BeaconAdvertisement represents the JSON payload received from beacon advertisements. | ||||
| @@ -161,16 +159,6 @@ type ApiUpdate struct { | |||||
| MAC string | MAC string | ||||
| } | } | ||||
| type KafkaReadersList struct { | |||||
| KafkaReadersLock sync.RWMutex | |||||
| KafkaReaders []*kafka.Reader | |||||
| } | |||||
| type KafkaWritersList struct { | |||||
| KafkaWritersLock sync.RWMutex | |||||
| KafkaWriters []*kafka.Writer | |||||
| } | |||||
| type Alert struct { | type Alert struct { | ||||
| ID string `json:"id"` // tracker id | ID string `json:"id"` // tracker id | ||||
| Type string `json:"type"` // type of alert | Type string `json:"type"` // type of alert | ||||