package middleware

import (
	"log"

	"github.com/confluentinc/confluent-kafka-go/kafka"
)

type KafkaConsumer struct {
	consumer *kafka.Consumer
	cfg      *KafkaConfig
}

func doInitConsumer(cfg *KafkaConfig) *kafka.Consumer {
	log.Print("init kafka consumer, it may take a few seconds to init the connection\n")
	//common arguments
	var kafkaconf = &kafka.ConfigMap{
		"api.version.request":       "true",
		"auto.offset.reset":         "latest",
		"heartbeat.interval.ms":     3000,
		"session.timeout.ms":        30000,
		"max.poll.interval.ms":      120000,
		"fetch.max.bytes":           1024000,
		"max.partition.fetch.bytes": 256000}
	kafkaconf.SetKey("bootstrap.servers", cfg.BootstrapServers)
	kafkaconf.SetKey("group.id", cfg.GroupId)

	switch cfg.SecurityProtocol {
	case "plaintext":
		kafkaconf.SetKey("security.protocol", "plaintext")
	case "sasl_ssl":
		kafkaconf.SetKey("security.protocol", "sasl_ssl")
		kafkaconf.SetKey("ssl.ca.location", "./conf/ca-cert.pem")
		kafkaconf.SetKey("sasl.username", cfg.SaslUsername)
		kafkaconf.SetKey("sasl.password", cfg.SaslPassword)
		kafkaconf.SetKey("sasl.mechanism", cfg.SaslMechanism)
	case "sasl_plaintext":
		kafkaconf.SetKey("security.protocol", "sasl_plaintext")
		kafkaconf.SetKey("sasl.username", cfg.SaslUsername)
		kafkaconf.SetKey("sasl.password", cfg.SaslPassword)
		kafkaconf.SetKey("sasl.mechanism", cfg.SaslMechanism)

	default:
		panic(kafka.NewError(kafka.ErrUnknownProtocol, "unknown protocol", true))
	}

	consumer, err := kafka.NewConsumer(kafkaconf)
	if err != nil {
		panic(err)
	}
	return consumer
}

func NewKafkaConsumer(brokerList string) *KafkaConsumer {
	cfg := NewConfig(brokerList)
	consumer := doInitConsumer(cfg)
	consumer.SubscribeTopics([]string{cfg.Topic}, nil)

	log.Printf("Start Kafka Consumer, brokerList: %s\n", brokerList)
	return &KafkaConsumer{
		consumer: consumer,
		cfg:      cfg,
	}
}

func (c *KafkaConsumer) Consume() {
	for {
		msg, err := c.consumer.ReadMessage(-1)
		if err == nil {
			log.Printf("Receive Kafka message on %s: %s\n", msg.TopicPartition, string(msg.Value))
		} else {
			// The client will automatically try to recover from all errors.
			log.Printf("Fail to receive Kafka error: %v (%v)\n", err, msg)
		}
	}
}

func (c *KafkaConsumer) Close() {
	_ = c.consumer.Close()
}
