package kafka

import (
	"context"
	"errors"
	"fmt"
	"io"
	"time"

	"gitee.com/xwengf/go-event/v2/event"

	"github.com/segmentio/kafka-go"
	"github.com/segmentio/kafka-go/sasl/plain"
)

var (
	_ event.Sender   = (*kafkaSender)(nil)
	_ event.Receiver = (*kafkaReceiver)(nil)
	_ event.Event    = (*Message)(nil)
)

type Message struct {
	key   string
	value []byte
}

func (m *Message) Key() string {
	return m.key
}

func (m *Message) Value() []byte {
	return m.value
}

func NewMessage(key string, value []byte) event.Event {
	return &Message{
		key:   key,
		value: value,
	}
}

type kafkaSender struct {
	writer *kafka.Writer
	topic  string
}

func (s *kafkaSender) Send(ctx context.Context, topic string, message event.Event) error {
	err := s.writer.WriteMessages(ctx, kafka.Message{
		Topic: topic,
		Key:   []byte(message.Key()),
		Value: message.Value(),
	})
	if err != nil {
		return err
	}
	return nil
}

func (s *kafkaSender) Close() error {
	err := s.writer.Close()
	if err != nil {
		return err
	}
	return nil
}

func NewKafkaSender(address []string, username, password string) (event.Sender, error) {
	mechanism := plain.Mechanism{
		Username: username,
		Password: password,
	}

	dialer := &kafka.Dialer{
		Timeout:       10 * time.Second,
		DualStack:     true,
		SASLMechanism: mechanism,
	}
	w := kafka.NewWriter(kafka.WriterConfig{
		Brokers:  address,
		Balancer: &kafka.LeastBytes{},
		Dialer:   dialer,
	})
	return &kafkaSender{writer: w}, nil
}

type kafkaReceiver struct {
	reader *kafka.Reader
	topics []string
	close  chan struct{}
}

func (k *kafkaReceiver) Receive(ctx context.Context, handler event.Handler) error {
	defer func() {
		fmt.Println("[kafka] receive stopped!")
		_ = k.reader.Close()
	}()
	for {
		select {
		case <-ctx.Done():
			fmt.Println("Receive: context canceled")
			return ctx.Err()
		case <-k.close:
			fmt.Println("Receive: k.close")
			return nil
		default:
			msg, err := k.reader.FetchMessage(ctx)
			if err != nil {
				if ctx.Err() != nil {
					// 正常退出
					fmt.Println("FetchMessage stopped by context")
					return nil
				}
				// 忽略 EOF 或短暂错误，等待重连
				if errors.Is(err, io.EOF) {
					fmt.Printf("Kafka fetch error: %v, retrying...\n", err)
					time.Sleep(2 * time.Second)
					continue
				}
				return fmt.Errorf("fetch error: %w", err)
			}

			if err := handler(ctx, msg.Topic, &Message{
				key:   string(msg.Key),
				value: msg.Value,
			}); err != nil {
				fmt.Println("message handling exception:", err)
			}

			if err := k.reader.CommitMessages(ctx, msg); err != nil {
				fmt.Printf("failed to commit messages: %v \n", err)
			}
		}
	}
}

func (k *kafkaReceiver) Close() error {
	fmt.Println("[kafka] receiver closing")
	k.close <- struct{}{}
	return nil
}

func NewKafkaReceiver(address []string, username, password string, topics []string, groupID string) (event.Receiver, error) {
	fmt.Println("创建kafka消费者：", address)
	fmt.Println("Group id:", groupID)
	mechanism := plain.Mechanism{
		Username: username,
		Password: password,
	}

	dialer := &kafka.Dialer{
		Timeout:       10 * time.Second,
		DualStack:     true,
		SASLMechanism: mechanism,
	}
	r := kafka.NewReader(kafka.ReaderConfig{
		Brokers:     address,
		GroupID:     groupID,
		GroupTopics: topics,
		MinBytes:    10e3, // 10KB
		MaxBytes:    10e6, // 10MB
		StartOffset: kafka.FirstOffset,
		Dialer:      dialer,
	})
	return &kafkaReceiver{reader: r, topics: topics, close: make(chan struct{})}, nil
}
