package main

import (
	"context"
	"encoding/json"
	"log"
	"saasems/database"
	"saasems/services"

	"github.com/segmentio/kafka-go"
)

type KafkaProcessor struct {
	reader     *kafka.Reader
	writer     *kafka.Writer
	postgresDB *database.PostgresDB
	influxDB   *database.InfluxDB
}

func NewKafkaProcessor(brokers []string, inputTopic string, outputTopic string,
	postgresDB *database.PostgresDB, influxDB *database.InfluxDB) *KafkaProcessor {
	return &KafkaProcessor{
		reader: kafka.NewReader(kafka.ReaderConfig{
			Brokers:  brokers,
			Topic:    inputTopic,
			GroupID:  "data-processor",
			MinBytes: 10e3,
			MaxBytes: 10e6,
		}),
		writer: &kafka.Writer{
			Addr:     kafka.TCP(brokers...),
			Topic:    outputTopic,
			Balancer: &kafka.LeastBytes{},
		},
		postgresDB: postgresDB,
		influxDB:   influxDB,
	}
}

func (kp *KafkaProcessor) Start() {
	for {
		msg, err := kp.reader.ReadMessage(context.Background())
		if err != nil {
			log.Printf("Error reading from Kafka: %v", err)
			continue
		}

		var data map[string]interface{}
		if err := json.Unmarshal(msg.Value, &data); err != nil {
			log.Printf("Error parsing Kafka message: %v", err)
			continue
		}

		// 处理数据
		processedData := services.ProcessData(data)

		// 存储到PostgreSQL
		if err := kp.postgresDB.GetDB().Create(processedData).Error; err != nil {
			log.Printf("Error storing data to PostgreSQL: %v", err)
		}

		// 存储到InfluxDB
		// ... 实现InfluxDB存储逻辑 ...

		// 发送处理后的数据到 Kafka
		jsonData, err := json.Marshal(processedData)
		if err != nil {
			log.Printf("Error marshaling processed data: %v", err)
			continue
		}

		err = kp.writer.WriteMessages(context.Background(),
			kafka.Message{
				Key:   msg.Key,
				Value: jsonData,
			},
		)
		if err != nil {
			log.Printf("Error writing to Kafka: %v", err)
		}
	}
}
