package comsumer

import (
	"encoding/json"
	"errors"
	"github.com/Shopify/sarama"

	"github.com/upmio/horus-polymerizer/log"
	"github.com/upmio/horus-polymerizer/queue"
	"strings"
	"sync"
	"time"
)

//KafkaMetricComsumer is exported
type KafkaMetricComsumer struct {
	metricTopic string
	eventTopic  string
	addrs       string

	curnum int
	maxnum int
	lock   *sync.Mutex
}

//NewKafkaMetricComsumer  is exported
func NewKafkaMetricComsumer(addrs, eventTopic, metricTopic string) *KafkaMetricComsumer {
	return &KafkaMetricComsumer{
		curnum:      0,
		maxnum:      1,
		lock:        new(sync.Mutex),
		eventTopic:  eventTopic,
		metricTopic: metricTopic,
		addrs:       addrs,
	}
}

func (c *KafkaMetricComsumer) addComsumer() error {
	c.lock.Lock()
	defer c.lock.Unlock()
	c.curnum++

	if c.curnum > c.maxnum {
		return errors.New("biger than maxnum")
	}

	return nil
}

func (c *KafkaMetricComsumer) delComsumer() {
	c.lock.Lock()
	defer c.lock.Unlock()
	c.curnum--
}

//StartManager is exported
func (c *KafkaMetricComsumer) StartManager() {

	for {
		for i := c.curnum; i < c.maxnum; i++ {
			log.Info("restart one KafkaMetric  AsyncProducer")
			go c.asyncProducer()
		}

		time.Sleep(2 * 60 * time.Second)
	}
}

func (kc *KafkaMetricComsumer) asyncProducer() {

	stopC := make(chan int, 1)

	defer func() {
		if err := recover(); err != nil {
			log.Error("KafkaMetricComsumer asyncProducer occur panic : ", err)
		}
		close(stopC)
		kc.delComsumer()

	}()

	if err := kc.addComsumer(); err != nil {
		log.Warn("start KafkaMetric  asyncProducer  conflict:", err, " curlen:", kc.curnum, "maxlen:", kc.maxnum)
		return
	}

	config := sarama.NewConfig()
	config.Producer.Return.Successes = true
	config.Producer.Timeout = 5 * time.Second
	config.Producer.Partitioner = sarama.NewRandomPartitioner

	p, err := sarama.NewAsyncProducer(strings.Split(kc.addrs, ","), config)
	if err != nil {
		log.Errorf("NewAsyncProducer fail:%s", err.Error())
		return
	}

	defer p.Close()

	go func(p sarama.AsyncProducer) {
		errors := p.Errors()
		success := p.Successes()
		for {
			select {
			case err := <-errors:
				if err != nil {
					log.Errorf("AsyncProducer err:%s", err.Error())
				}
			case <-success:

			case <-stopC:
				log.Info("get stopC chan")
				return
			}
		}
	}(p)

	go func(p sarama.AsyncProducer) {
		for {

			select {
			case <-stopC:
				log.Info("get stopC chan")
				return
			default:
			}

			events, _ := queue.GetDefaultKafkaEventQue().GetEventSlice()

			for _, event := range events {
				eventByte, err := json.Marshal(*event)
				if err != nil {
					log.Errorf("%s: marshal fail:%s", event, err.Error())
					continue
				}

				msg := &sarama.ProducerMessage{
					Topic: kc.eventTopic,
					Value: sarama.ByteEncoder(eventByte),
				}
				p.Input() <- msg
				log.Debugf("send event(%s) to kafka ", string(eventByte))
			}

		}

	}(p)

	for {

		metrics, _ := queue.GetDefaultKafkaMetricQue().GetMetric()

		for _, metric := range metrics {
			metricByte, err := json.Marshal(*metric)
			if err != nil {
				log.Errorf("%s: marshal fail:%s", metric, err.Error())
				continue
			}

			msg := &sarama.ProducerMessage{
				Topic: kc.metricTopic,
				Value: sarama.ByteEncoder(metricByte),
			}
			p.Input() <- msg
			log.Debugf("send metric(%s) to kafka ", string(metricByte))
		}

	}

}
