package middleware

import (
	"log"
	"os"

	"github.com/confluentinc/confluent-kafka-go/kafka"
)

const (
	KafkaProtocol  = "KAFKA_PROTOCOL"
	KafkaMechanism = "KAFKA_MECHANISM"
	KafkaUserName  = "KAFKA_USERNAME"
	KafkaPassword  = "KAFKA_PASSWORD"
)

type KafkaConfig struct {
	Topic            string `json:"topic"`
	GroupId          string `json:"group.id"`
	BootstrapServers string `json:"bootstrap.servers"`
	SecurityProtocol string `json:"security.protocol"`
	SslCaLocation    string `json:"ssl.ca.location"`
	SaslMechanism    string `json:"sasl.mechanism"`
	SaslUsername     string `json:"sasl.username"`
	SaslPassword     string `json:"sasl.password"`
}

type KafkaProducer struct {
	producer *kafka.Producer
	cfg      *KafkaConfig
}

func NewConfig(brokerList string) *KafkaConfig {
	return &KafkaConfig{
		Topic:            "container-monitor",
		GroupId:          "test-cg",
		BootstrapServers: brokerList,
		SecurityProtocol: os.Getenv(KafkaProtocol),
		SaslMechanism:    os.Getenv(KafkaMechanism),
		SaslUsername:     os.Getenv(KafkaUserName),
		SaslPassword:     os.Getenv(KafkaPassword),
	}
}

func doInitProducer(cfg *KafkaConfig) *kafka.Producer {
	log.Print("init kafka producer, it may take a few seconds to init the connection\n")
	//common arguments
	var kafkaconf = &kafka.ConfigMap{
		"batch.size":          1,
		"api.version.request": "true",
		"message.max.bytes":   1000000,
		"linger.ms":           10,
		"retries":             30,
		"retry.backoff.ms":    1000,
		"acks":                "1"}
	kafkaconf.SetKey("bootstrap.servers", cfg.BootstrapServers)

	switch cfg.SecurityProtocol {
	case "plaintext":
		kafkaconf.SetKey("security.protocol", "plaintext")
	case "sasl_ssl":
		kafkaconf.SetKey("security.protocol", "sasl_ssl")
		kafkaconf.SetKey("ssl.ca.location", "conf/ca-cert.pem")
		kafkaconf.SetKey("sasl.username", cfg.SaslUsername)
		kafkaconf.SetKey("sasl.password", cfg.SaslPassword)
		kafkaconf.SetKey("sasl.mechanism", cfg.SaslMechanism)
	case "sasl_plaintext":
		kafkaconf.SetKey("sasl.mechanism", "PLAIN")
		kafkaconf.SetKey("security.protocol", "sasl_plaintext")
		kafkaconf.SetKey("sasl.username", cfg.SaslUsername)
		kafkaconf.SetKey("sasl.password", cfg.SaslPassword)
		kafkaconf.SetKey("sasl.mechanism", cfg.SaslMechanism)
	default:
		panic(kafka.NewError(kafka.ErrUnknownProtocol, "unknown protocol", true))
	}

	producer, err := kafka.NewProducer(kafkaconf)
	if err != nil {
		panic(err)
	}
	return producer
}

func NewKafkaProducer(brokerList string) KafkaProducer {
	// Choose the correct protocol
	// 9092 for PLAINTEXT
	// 9093 for SASL_SSL, need to provide sasl.username and sasl.password
	// 9094 for SASL_PLAINTEXT, need to provide sasl.username and sasl.password
	cfg := NewConfig(brokerList)
	producer := doInitProducer(cfg)
	log.Printf("Start Kafka Producer, brokerList: %s\n", brokerList)
	// Delivery report handler for produced messages
	go func() {
		for e := range producer.Events() {
			switch ev := e.(type) {
			case *kafka.Message:
				if ev.TopicPartition.Error != nil {
					log.Printf("Fail to send Kafaka message: %v\n", ev.TopicPartition)
				} else {
					log.Printf("Send Kafka Message to %v\n", ev.TopicPartition)
				}
			}
		}
	}()

	return KafkaProducer{
		producer: producer,
		cfg:      cfg,
	}
}

func (p KafkaProducer) Visit() {
	// Produce messages to topic (asynchronously)
	topic := p.cfg.Topic
	_ = p.producer.Produce(&kafka.Message{
		TopicPartition: kafka.TopicPartition{Topic: &topic, Partition: kafka.PartitionAny},
		Value:          []byte("hello kafka"),
	}, nil)
}

func (p KafkaProducer) Close() {
	// Wait for message deliveries before shutting down
	p.producer.Flush(500)
	p.producer.Close()
}
