package kafka

import (
	"context"
	"encoding/json"
	"saasems/database"
	"saasems/services"

	influxdb2 "github.com/influxdata/influxdb-client-go/v2"

	"log"
	"time"

	"github.com/segmentio/kafka-go"
	"gorm.io/gorm"
)

type KafkaProcessor struct {
	reader            *kafka.Reader
	writer            *kafka.Writer
	postgresDBFactory func(string) *database.PostgresDB
	influxDBFactory   func(string) *database.InfluxDB
}

type LogEntry struct {
	gorm.Model
	Timestamp   time.Time
	TenantID    string
	MessageType string
	Data        map[string]interface{} `gorm:"type:jsonb"`
	Error       string
}

var logChannel chan LogEntry

func NewKafkaProcessor(brokers []string, inputTopic string, outputTopic string,
	postgresDBFactory func(string) *database.PostgresDB,
	influxDBFactory func(string) *database.InfluxDB) *KafkaProcessor {
	return &KafkaProcessor{
		reader: kafka.NewReader(kafka.ReaderConfig{
			Brokers:  brokers,
			Topic:    inputTopic,
			GroupID:  "data-processor",
			MinBytes: 10e3,
			MaxBytes: 10e6,
		}),
		writer: &kafka.Writer{
			Addr:     kafka.TCP(brokers...),
			Topic:    outputTopic,
			Balancer: &kafka.LeastBytes{},
		},
		postgresDBFactory: postgresDBFactory,
		influxDBFactory:   influxDBFactory,
	}
}

func (kp *KafkaProcessor) Start() {
	for {
		msg, err := kp.reader.ReadMessage(context.Background())
		if err != nil {
			logChannel <- LogEntry{
				Timestamp:   time.Now(),
				MessageType: "kafka_read_error",
				Error:       err.Error(),
			}
			log.Printf("Error reading from Kafka: %v", err)
			continue
		}

		var data map[string]interface{}
		if err := json.Unmarshal(msg.Value, &data); err != nil {
			logChannel <- LogEntry{
				Timestamp:   time.Now(),
				MessageType: "json_parse_error",
				Error:       err.Error(),
			}
			log.Printf("Error parsing Kafka message: %v", err)
			continue
		}

		// 获取租户ID
		tenantID, ok := data["tenant_id"].(string)
		if !ok {
			logChannel <- LogEntry{
				Timestamp:   time.Now(),
				MessageType: "missing_tenant_id",
				Error:       "tenant_id not found in message",
			}
			log.Println("Missing tenant_id in message")
			continue
		}

		// 获取租户特定的数据库连接
		postgresDB := kp.postgresDBFactory(tenantID)
		influxDB := kp.influxDBFactory(tenantID)

		// 处理数据
		processedData := services.ProcessData(data)

		// 存储到租户特定的PostgreSQL数据库
		if err := postgresDB.GetDB().Create(processedData).Error; err != nil {
			log.Printf("Error storing data to PostgreSQL: %v", err)
		}

		// 存储到租户特定的InfluxDB数据库
		writeAPI := influxDB.GetDB()
		writeAPI.WritePoint(influxdb2.NewPointWithMeasurement("device_data").
			AddTag("tenant_id", tenantID).
			AddField("data", processedData))
		writeAPI.Flush()

		// 发送处理后的数据到 Kafka
		jsonData, err := json.Marshal(processedData)
		if err != nil {
			log.Printf("Error marshaling processed data: %v", err)
			continue
		}

		err = kp.writer.WriteMessages(context.Background(),
			kafka.Message{
				Key:   msg.Key,
				Value: jsonData,
			},
		)
		if err != nil {
			log.Printf("Error writing to Kafka: %v", err)
		}
	}
}

func processLogs() {
	for entry := range logChannel {
		// 将日志存储到数据库
		db := database.GetDB() // Changed from GetLogDB to GetDB
		if err := db.Create(&entry).Error; err != nil {
			log.Printf("Error saving log entry: %v", err)
		}
	}
}

// Remove the duplicate Start method (lines 138-210)

func QueryLogs(tenantID string, startTime, endTime time.Time) ([]LogEntry, error) {
	db := database.GetDB() // Changed from GetLogDB to GetDB
	var logs []LogEntry
	err := db.Where("tenant_id = ? AND timestamp BETWEEN ? AND ?",
		tenantID, startTime, endTime).Find(&logs).Error
	return logs, err
}
