package biz

import (
	"context"
	"fmt"
	"github.com/go-kratos/kratos/v2/log"
	transportBroker "github.com/tx7do/kratos-transport/broker"
	"github.com/tx7do/kratos-transport/broker/kafka"
	"hash"
	"hash/fnv"
	v1 "kratos_kafka_new/api/helloworld/v1"
	"kratos_kafka_new/internal/broker"
	"kratos_kafka_new/internal/conf"
	"kratos_kafka_new/internal/utils/mq_kafka"
)

// Greeter is a Greeter model.
type Greeter struct {
	Hello string
}

// GreeterRepo is a Greater repo.
type GreeterRepo interface {
	Save(context.Context, *Greeter) (*Greeter, error)
}

// GreeterUsecase is a Greeter usecase.
type GreeterUsecase struct {
	repo GreeterRepo
	log  *log.Helper
	// Notice broker
	broker *broker.Broker
	// Notice 项目所有的yaml配置项都放在这里
	yamlConf *conf.Bootstrap
}

// NewGreeterUsecase new a Greeter usecase.
func NewGreeterUsecase(repo GreeterRepo, logger log.Logger, broker *broker.Broker, yamlConf *conf.Bootstrap) *GreeterUsecase {
	return &GreeterUsecase{
		repo:     repo,
		log:      log.NewHelper(logger),
		broker:   broker,
		yamlConf: yamlConf,
	}
}

func (uc *GreeterUsecase) CreateGreeter(ctx context.Context, req *v1.SayHelloReq) error {

	userIds := []string{"user1", "user1", "user1", "user1", "user2", "user3", "user4"}
	fmt.Println("userIds: ", userIds)

	// 发送消息
	for idx, userId := range userIds {
		currStr := fmt.Sprintf("hello-%v-%v", idx, userId)
		msg := &transportBroker.Message{
			Headers: transportBroker.Headers{
				"userId": userId,
			},
			Body: currStr,
		}
		err := uc.broker.Kafka.Publish(
			uc.yamlConf.MessageQueue.Kafka.MyTopic,
			msg,
			// Notice 负载均衡器～根据用户id的hash指定分区发送消息
			kafka.WithHashBalancer(hashFunc(userId)),
			// Notice 同时需要将用户id使用WithMessageKey注入进去
			kafka.WithMessageKey([]byte(userId)),
		)
		fmt.Println("err: ", err)
		if err != nil {
			panic(err)
		}
	}

	/*
		currEnv := mq_kafka.ArticleData{
			Title:            "title1",
			Desc:             "desc1",
			Tags:             []string{"tag1", "tag2", "tag3"},
			PublishTime:      time.Now(),
			PublishTimeStamp: "123123",
		}

		// Notice 新版本必须发结构体!
		errPublish := uc.broker.Kafka.Publish(
			uc.yamlConf.MessageQueue.Kafka.ArticleTopic,
			currEnv,
		)
		if errPublish != nil {
			fmt.Println("往kafka中发送数据出现错误了！！", errPublish)
		} else {
			fmt.Printf("成功往kafka中写入了数据! topic: %v, msg: %v \n", uc.yamlConf.MessageQueue.Kafka.ArticleTopic, gconv.String(currEnv))
		}
	*/
	return nil
}

func hashFunc(s string) hash.Hash32 {
	h := fnv.New32a()
	h.Write([]byte(s))
	return h
}

// 可以做一些业务逻辑，接收从kafka中收到的数据并处理
func (uc *GreeterUsecase) AutoHandleArticleData(ctx context.Context, event transportBroker.Event, msg *mq_kafka.ArticleData) error {

	if msg == nil || msg.Title == "" || msg.Tags == nil || len(msg.Tags) < 1 {
		log.Warn("AutoHandleArticleData kafka中ArticleData的数据不正确！", msg.Title, msg.Tags)
		return nil
	}

	fmt.Println("AutoHandleArticleData1 从kafka中接收到正确的articleData数据: ", msg.Title, msg.Desc, msg.Tags, msg.PublishTime)

	event.Ack()

	return nil
}

func (uc *GreeterUsecase) AutoHandleArticleData2(ctx context.Context, event transportBroker.Event, msg *mq_kafka.ArticleData) error {

	if msg == nil || msg.Title == "" || msg.Tags == nil || len(msg.Tags) < 1 {
		log.Warn("AutoHandleArticleData2 kafka中ArticleData的数据不正确！", msg.Title, msg.Tags)
		return nil
	}

	fmt.Println("AutoHandleArticleData2 从kafka中接收到正确的articleData数据: ", msg.Title, msg.Desc, msg.Tags, msg.PublishTime, msg.PublishTimeStamp)
	fmt.Println(">>>>>>> ", msg.PublishTimeStamp)

	// Notice 把手动Ack注释了 消费的offset不会改 重启后还是从原来的offset消费
	//event.Ack()

	return nil
}
