package kafka

import (
	"context"
	"encoding/json"
	"fmt"
	"gitee.com/qzcsu/go-web-study/client"
	"github.com/Shopify/sarama"
	"github.com/gin-gonic/gin"
	"log"
	"net/http"
	"os"
	"os/signal"
)

type SendMsg struct {
	Topic string
	Id    int64
	Name  string
	Addr  string
	Num   int
}

func KafkaSyncSend(c *gin.Context) {
	param := &SendMsg{}

	err := c.ShouldBind(&param)
	if err != nil {
		log.Printf("param bind err:%v\n", err)
		return
	}

	msgJson, merr := json.Marshal(param)
	if merr != nil {
		log.Printf("param Marshal err:%v\n", merr)
		return
	}
	message := &sarama.ProducerMessage{Topic: param.Topic, Value: sarama.StringEncoder(msgJson)}
	sendMessage, offset, err := client.KSyncProducer.SendMessage(message)
	log.Printf("msg:%+v,err:%v send success\n", string(msgJson), err)
	c.JSON(http.StatusOK, gin.H{"message": "success", "msg": param, "partition": sendMessage, "offset": offset, "err": err})

}

func KafkaSyncMultiSend(c *gin.Context) {
	param := &SendMsg{}

	err := c.ShouldBind(&param)
	if err != nil {
		log.Printf("param bind err:%v\n", err)
		return
	}

	msgJson, merr := json.Marshal(param)
	if merr != nil {
		log.Printf("param Marshal err:%v\n", merr)
		return
	}

	msgs := make([]*sarama.ProducerMessage, 0, param.Num)
	for i := 0; i < param.Num; i++ {
		topicName := ""
		if i%2 == 0 {
			topicName = param.Topic + "-1"
		} else {
			topicName = param.Topic + "-2"
		}
		msgs = append(msgs, &sarama.ProducerMessage{Topic: topicName, Value: sarama.StringEncoder(msgJson)})
	}
	// 同步批量发送多条消息，多条消息还可以是不同的topic
	err = client.KSyncProducer.SendMessages(msgs)

	log.Printf("send err:%v \n", err)
	c.JSON(http.StatusOK, gin.H{"message": "success", "msg": msgs})

}

func KafkaAsyncSend(c *gin.Context) {
	param := &SendMsg{}

	err := c.ShouldBind(&param)
	if err != nil {
		log.Printf("param bind err:%v\n", err)
		return
	}

	msgJson, merr := json.Marshal(param)
	if merr != nil {
		log.Printf("param Marshal err:%v\n", merr)
		return
	}

	message := &sarama.ProducerMessage{Topic: param.Topic, Value: sarama.StringEncoder(msgJson)}
	client.KAsyncProducer.Input() <- message

	c.JSON(http.StatusOK, gin.H{"message": "success", "data": message})

}

func KConsumer() {
	// 将consumer从主线程中传过来，协程中在创建 ConsumePartition 不行，前后两个变量不在同一个上下文中
	consumer, err := sarama.NewConsumer(client.EnvConf.Dsn, sarama.NewConfig())
	if err != nil {
		log.Fatalf("kafka NewConsumer fail err:%v", err)
	}

	defer func() {
		if err := consumer.Close(); err != nil {
			log.Fatalf("defer consumer.Close() err:%v\n", err)
		}
	}()

	partitionConsumer, err := consumer.ConsumePartition("qinzhe-test-topic", 0, 0)

	if err != nil {
		log.Fatalf("kafka NewConsumer.ConsumePartition err:%v", err)
	}

	defer func() {
		if err := partitionConsumer.Close(); err != nil {
			log.Fatalf("defer partitionConsumer.Close() err:%v\n", err)
		}
	}()

	// Trap SIGINT to trigger a shutdown.
	signals := make(chan os.Signal, 1)
	signal.Notify(signals, os.Interrupt)

	consumed := 0
ConsumerLoop:
	for {
		select {
		case msg := <-partitionConsumer.Messages():
			log.Printf("msg: partition:%v offset %d topic:%v, value:%v \n", msg.Partition, msg.Offset, msg.Topic, string(msg.Value))
			consumed++
		case <-signals:
			break ConsumerLoop
		}
	}

	log.Printf("Consumed: %d\n", consumed)
}

// 如果想使用 consumerGroup方式进行消费，需要实现 ConsumerGroupHandler 接口，在 ConsumeClaim 方法中实现消息消费的逻辑
type exampleConsumerGroupHandler struct{}

func (exampleConsumerGroupHandler) Setup(_ sarama.ConsumerGroupSession) error   { return nil }
func (exampleConsumerGroupHandler) Cleanup(_ sarama.ConsumerGroupSession) error { return nil }
func (h exampleConsumerGroupHandler) ConsumeClaim(sess sarama.ConsumerGroupSession, claim sarama.ConsumerGroupClaim) error {
	for msg := range claim.Messages() {
		fmt.Printf("group Message consumer => topic:%q partition:%d offset:%d, msg:%v \n ", msg.Topic, msg.Partition, msg.Offset, string(msg.Value))
		sess.MarkMessage(msg, "")
	}
	return nil
}

func KConsumerGroup() {
	kconfig := sarama.NewConfig()
	kconfig.Version = sarama.V2_0_0_0 // specify appropriate version
	kconfig.Consumer.Return.Errors = true

	group, err := sarama.NewConsumerGroup(client.EnvConf.Dsn, "my-group", kconfig)
	if err != nil {
		log.Fatalf("NewConsumerGroup err:%v\n", err)
	}
	defer func() { _ = group.Close() }()

	// Track errors
	go func() {
		for err := range group.Errors() {
			log.Fatalf("KConsumerGroup go func track err:%v\n", err)
		}
	}()

	// Iterate over consumer sessions.
	ctx := context.Background()
	for {
		topics := []string{"go-kafka-test-1", "go-kafka-test-2"}
		handler := exampleConsumerGroupHandler{}

		// `Consume` should be called inside an infinite loop, when a
		// server-side rebalance happens, the consumer session will need to be
		// recreated to get the new claims
		err := group.Consume(ctx, topics, handler)
		if err != nil {
			log.Fatalf("KConsumerGroup consume err:%v\n", err)
		}
	}

}
