package job

import (
	"context"
	"fmt"
	"github.com/Shopify/sarama"
	"week13/log"
	"os"
	"week13/configs"
	"week13/errors"
)

const (
	TOPIC_DEAL_FINISH = "deal.finish"
	TOPIC_ITEM_CHANGE = "item.change"
)
var KafkaConfig configs.KafkaConfig
//初始化kafa配置
func init() {
	dir := "C:\\Users\\03\\Desktop\\training-camp\\week13\\configs\\json\\kafka.json"
	f, err := os.Open(dir)
	defer f.Close()
	if err != nil {
		log.Fatal("init kafkaconfig err:",err)
	}
	KafkaConfig = configs.LoadKafkaConfig(f)
}
//MQ消息发送 接口
type SendJobMessage interface {
	Send (jsonData []byte) error
}
//MQ消息消费 接口
type ConsumeMessage interface {
	Consume(ctx context.Context) error
}
//kafka 消费者
type KafkaConsumer struct {
	group string
	topic []string
	handler sarama.ConsumerGroupHandler
}
//实例化kafka消费者
func NewKafkaConsumer(group string,topic []string,handler sarama.ConsumerGroupHandler) *KafkaConsumer{
	return &KafkaConsumer{
		group: group,
		topic: topic,
		handler:handler,
	}
}
func (cm *KafkaConsumer) Consume(ctx context.Context) error{
	addr := fmt.Sprintf("%s:%s",KafkaConfig.Host,KafkaConfig.Port)
	var err error
	config := sarama.NewConfig()
	group, err := sarama.NewConsumerGroup([]string{addr}, cm.group, config)
	if err != nil {
		return errors.NewExecuteFail(err.Error())
	}
	defer func() { _ = group.Close() }()
	go func() {
		for err := range group.Errors() {
			fmt.Println("ERROR", err)
		}
	}()
	go func() {
		select {
		case <-ctx.Done():
			group.Close()
		}
	}()
	for  {
		err = group.Consume(ctx, cm.topic, cm.handler)
		if err != nil {
			break
		}
	}
	if err == nil {
		return nil
	}
	return errors.NewExecuteFail(err.Error())
}
type KafkaSender struct {
	topic string
}
//实例化kafka 发送消息接口
func NewKafkaSender(topic string) *KafkaSender{
	return &KafkaSender{topic: topic}
}
func (kf *KafkaSender ) Send(jsonData []byte) error  {
	config := sarama.NewConfig()
	config.Producer.RequiredAcks = sarama.WaitForAll          // 发送完数据需要leader和follow都确认
	config.Producer.Partitioner = sarama.NewRandomPartitioner // 新选出一个partition
	config.Producer.Return.Successes = true
	addr := fmt.Sprintf("%s:%s",KafkaConfig.Host,KafkaConfig.Port)
	client,err := sarama.NewSyncProducer([]string{addr}, config)
	if err != nil {
		return errors.NewDialFail("connect kafka error:"+err.Error())
	}
	defer func() {
		client.Close()
	}()
	msg := &sarama.ProducerMessage{}
	msg.Topic = kf.topic
	msg.Value = sarama.StringEncoder(jsonData)
	pid, offset, err := client.SendMessage(msg)

	log.Info("client topic:",msg.Topic,"data:",string(jsonData))
	if err != nil {
		return errors.NewDialFail("connect kafka error:"+err.Error())
	}
	fmt.Printf("pid:%v offset:%v\n", pid, offset)
	return nil
}

type IncomeCreateMessege struct {
	DealId int64
}
type ItemChangeMessage struct {
	ItemId int `json:"itemid"`
	Mtime  string `json:"mtime"`
}
