Du kannst nicht mehr als 25 Themen auswählen Themen müssen entweder mit einem Buchstaben oder einer Ziffer beginnen. Sie können Bindestriche („-“) enthalten und bis zu 35 Zeichen lang sein.
 
 
 
 

111 Zeilen
3.4 KiB

  1. package decoder
  2. import (
  3. "context"
  4. "log/slog"
  5. "sync"
  6. "time"
  7. "github.com/AFASystems/presence/internal/pkg/common/appcontext"
  8. "github.com/AFASystems/presence/internal/pkg/config"
  9. "github.com/AFASystems/presence/internal/pkg/decoder"
  10. "github.com/AFASystems/presence/internal/pkg/kafkaclient"
  11. "github.com/AFASystems/presence/internal/pkg/logger"
  12. "github.com/AFASystems/presence/internal/pkg/model"
  13. "github.com/segmentio/kafka-go"
  14. )
  15. // DecoderApp holds dependencies for the decoder service.
  16. type DecoderApp struct {
  17. Cfg *config.Config
  18. KafkaManager *kafkaclient.KafkaManager
  19. AppState *appcontext.AppState
  20. ParserRegistry *model.ParserRegistry
  21. ChRaw chan appcontext.BeaconAdvertisement
  22. ChParser chan model.KafkaParser
  23. Cleanup func()
  24. wg sync.WaitGroup
  25. }
  26. // New creates a DecoderApp with Kafka readers (rawbeacons, parser) and writer (alertbeacons).
  27. func New(cfg *config.Config) (*DecoderApp, error) {
  28. appState := appcontext.NewAppState()
  29. kafkaManager := kafkaclient.InitKafkaManager()
  30. srvLogger, cleanup := logger.CreateLogger("decoder.log")
  31. slog.SetDefault(srvLogger)
  32. readerTopics := []string{"rawbeacons", "parser"}
  33. writerTopics := []string{"alertbeacons", "healthdecoder"}
  34. kafkaManager.PopulateKafkaManager(cfg.KafkaURL, "decoder", readerTopics)
  35. kafkaManager.PopulateKafkaManager(cfg.KafkaURL, "", writerTopics)
  36. slog.Info("decoder service initialized", "readers", readerTopics, "writers", writerTopics)
  37. registry := &model.ParserRegistry{
  38. ParserList: make(map[string]model.BeaconParser),
  39. }
  40. return &DecoderApp{
  41. Cfg: cfg,
  42. KafkaManager: kafkaManager,
  43. AppState: appState,
  44. ParserRegistry: registry,
  45. ChRaw: make(chan appcontext.BeaconAdvertisement, config.LARGE_CHANNEL_SIZE),
  46. ChParser: make(chan model.KafkaParser, config.SMALL_CHANNEL_SIZE),
  47. Cleanup: cleanup,
  48. }, nil
  49. }
  50. // Run starts Kafka consumers and the event loop until ctx is cancelled.
  51. func (a *DecoderApp) Run(ctx context.Context) {
  52. a.wg.Add(2)
  53. go kafkaclient.Consume(a.KafkaManager.GetReader("rawbeacons"), a.ChRaw, ctx, &a.wg)
  54. go kafkaclient.Consume(a.KafkaManager.GetReader("parser"), a.ChParser, ctx, &a.wg)
  55. healthTicker := time.NewTicker(config.LARGE_TICKER_INTERVAL)
  56. defer healthTicker.Stop()
  57. for {
  58. select {
  59. case <-ctx.Done():
  60. return
  61. case <-healthTicker.C:
  62. health, err := a.AppState.GetDecoderHealth(a.KafkaManager)
  63. slog.Info("decoder health", "health", string(health))
  64. if err != nil {
  65. slog.Error("getting decoder health", "err", err)
  66. continue
  67. }
  68. m := kafka.Message{
  69. Value: health,
  70. }
  71. if err := kafkaclient.Write(ctx, a.KafkaManager.GetWriter("healthdecoder"), m); err != nil {
  72. slog.Error("writing decoder health", "err", err)
  73. continue
  74. }
  75. case msg := <-a.ChRaw:
  76. decoder.ProcessIncoming(msg, a.AppState, a.KafkaManager.GetWriter("alertbeacons"), a.ParserRegistry)
  77. case msg := <-a.ChParser:
  78. switch msg.ID {
  79. case "add":
  80. a.ParserRegistry.Register(msg.Config.Name, msg.Config)
  81. case "delete":
  82. a.ParserRegistry.Unregister(msg.Name)
  83. case "update":
  84. a.ParserRegistry.Register(msg.Config.Name, msg.Config)
  85. }
  86. }
  87. }
  88. }
  89. // Shutdown waits for consumers and cleans up.
  90. func (a *DecoderApp) Shutdown() {
  91. a.wg.Wait()
  92. a.KafkaManager.CleanKafkaReaders()
  93. a.KafkaManager.CleanKafkaWriters()
  94. if a.Cleanup != nil {
  95. a.Cleanup()
  96. }
  97. slog.Info("decoder service shutdown complete")
  98. }