package kafka

import (
	"context"
	"github.com/Shopify/sarama"
	"github.com/pkg/errors"
)

// abstracts kafka.Producer message type
type ProducerMessage struct {
	Topic string
	Key   []byte
	Value []byte
}

type Producer interface {
	// caller should run the returned function in a goroutine, and consume
	// the returned error channel until it's closed at shutdown.
	// Background() (func(), chan error)

	// user-facing event emit API
	Send([]ProducerMessage) error

	// close product
	Close() error
}

type kafkaSyncProducer struct {
	producer sarama.SyncProducer
}

// internal type implementing kafka.Async.Producer contract
type kafkaAsyncProducer struct {
	producer sarama.AsyncProducer
	ctx  context.Context
	errors   chan error
}

// the caller the producer's
func NewSyncProducer(addrs []string, conf *sarama.Config) (*kafkaSyncProducer, error) {
	producer, err := sarama.NewSyncProducer(addrs, conf)
	if err != nil {
		return nil, errors.Wrapf(err, "failed to create Kafka sync producer")
	}

	return &kafkaSyncProducer{
		producer: producer,
	}, nil
}

// user-facing event emit API
func (ksp *kafkaSyncProducer) Send(topic string, messages []map[string]string) error {
	var msgs []ProducerMessage
	for _, msg := range messages  {
		for k, v := range msg {
			msgs = append(msgs, ProducerMessage{
				Topic: topic,
				Key:   []byte(k),
				Value: []byte(v),
			})
		}
	}

	if len(msgs) == 0 {
		return errors.New("message is required")
	}

	kmsg := []*sarama.ProducerMessage{}
	for _, msg := range msgs {

		if len(msg.Topic) == 0 {
			return errors.New("message Topic is required")
		}

		if len(msg.Value) == 0 {
			return errors.New("at least one of message fields Key or Value is required")
		}

		kmsg = append(kmsg, &sarama.ProducerMessage{
			Topic: msg.Topic,
			Key:   sarama.ByteEncoder(msg.Key),
			Value: sarama.ByteEncoder(msg.Value),
		})
	}
	return ksp.producer.SendMessages(kmsg)
}

// user-facing event emit API
func (ksp *kafkaSyncProducer) SendMsg(topic string, messages map[string]string) (int32, int64, error) {
	var msg ProducerMessage
	for k, v := range messages  {
		msg = ProducerMessage{
			Topic: topic,
			Key:   []byte(k),
			Value: []byte(v),
		}
	}

	if len(msg.Topic) == 0 {
		return 0, 0, errors.New("message Topic is required")
	}

	if len(msg.Value) == 0 {
		return 0, 0, errors.New("at least one of message fields Key or Value is required")
	}

	kmsg := &sarama.ProducerMessage{
		Topic: msg.Topic,
		Key:   sarama.ByteEncoder(msg.Key),
		Value: sarama.ByteEncoder(msg.Value),
	}

	return ksp.producer.SendMessage(kmsg)
}

// close kafka sync producer
func (ksp *kafkaSyncProducer) Close() (err error) {
	if ksp == nil {
		return
	}
	return ksp.producer.Close()
}

// the caller can cancel the producer's context to initiate shutdown.
func NewAsyncProducer(addrs []string, conf *sarama.Config, ctx context.Context) (*kafkaAsyncProducer, error) {
	producer, err := sarama.NewAsyncProducer(addrs, conf)
	if err != nil {
		return nil, errors.Wrapf(err, "failed to create Kafka async producer")
	}

	return &kafkaAsyncProducer{
		ctx:      ctx,
		producer: producer,
		errors:   make(chan error, errorQueueSize),
	}, nil
}

// user-facing event emit API
func (kap *kafkaAsyncProducer) Send(topic string, messages []map[string]string) error {
	var msgs []ProducerMessage
	for _, msg := range messages  {
		for k, v := range msg {
			msgs = append(msgs, ProducerMessage{
				Topic: topic,
				Key:   []byte(k),
				Value: []byte(v),
			})
		}
	}

	if len(msgs) == 0 {
		return errors.New("message is required")
	}

	for _, msg := range msgs {

		if len(msg.Topic) == 0 {
			return errors.New("message Topic is required")
		}

		if len(msg.Value) == 0 {
			return errors.New("at least one of message fields Key or Value is required")
		}

		kmsg := &sarama.ProducerMessage{
			Topic: msg.Topic,
			Key:   sarama.ByteEncoder(msg.Key),
			Value: sarama.ByteEncoder(msg.Value),
		}

		// if shutdown is triggered, drop the message
		select {
		case <-kap.ctx.Done():
			return errors.Wrapf(kap.ctx.Err(), "message lost: shutdown triggered during send")

		case kap.producer.Input() <- kmsg:
			// fall through, msg has been queued for write
		}
	}

	return nil
}

// caller should run the returned function in a goroutine, and consume
// the returned error channel until it's closed at shutdown.
func (kap *kafkaAsyncProducer) Background() (func(), chan error) {
	// proxy all Sarama errors to the caller until Close() drains and closes it
	go func() {
		for err := range kap.producer.Errors() {
			kap.errors <- err
		}
	}()

	return func() {
		defer func() {
			kap.errors <- kap.producer.Close()
			close(kap.errors)
		}()

		<-kap.ctx.Done()
	}, kap.errors
}

// close kafka async producer
func (kap *kafkaAsyncProducer) Close() (err error) {
	if kap == nil {
		return
	}
	close(kap.errors)
	return kap.producer.Close()
}