package mq

import (
	"MessageSendService/app/api/etc"
	"MessageSendService/app/common/models"
	"context"
	"encoding/json"
	"fmt"
	"github.com/segmentio/kafka-go"
	"time"
)

var (
	topic    = "sujinke"
	Producer *kafka.Writer
)

func Init() {
	Producer = &kafka.Writer{
		Addr:                   kafka.TCP(etc.Config.KafkaUrl), //TCP函数参数为不定长参数，可以传多个地址组成集群
		Topic:                  topic,
		Balancer:               &kafka.Hash{}, // 用于对key进行hash，决定消息发送到哪个分区
		MaxAttempts:            0,
		WriteBackoffMin:        0,
		WriteBackoffMax:        0,
		BatchSize:              0,
		BatchBytes:             0,
		BatchTimeout:           0,
		ReadTimeout:            0,
		WriteTimeout:           time.Second,       // kafka有时候可能负载很高，写不进去，那么超时后可以放弃写入，用于可以丢消息的场景
		RequiredAcks:           kafka.RequireNone, // 不需要任何节点确认就返回，加快发送速度
		Async:                  false,
		Completion:             nil,
		Compression:            0,
		Logger:                 nil,
		ErrorLogger:            nil,
		Transport:              nil,
		AllowAutoTopicCreation: true, // 第一次发消息的时候，如果topic不存在，就自动创建topic，工作中禁止使用
	}
}

// 生产消息,发送user信息
func SendMessage(ctx context.Context, taskInfo models.TaskInfo) error {
	msgContent, err := json.Marshal(taskInfo)
	if err != nil {
		fmt.Println(fmt.Sprintf("json marshal user err，user:%v,err:%v", taskInfo, err))
	}
	msg := kafka.Message{
		Topic:         "",
		Partition:     0,
		Offset:        0,
		HighWaterMark: 0,
		Key:           []byte(fmt.Sprintf("%d", taskInfo.BusinessId)),
		Value:         msgContent,
		Headers:       nil,
		WriterData:    nil,
		Time:          time.Time{},
	}

	err = Producer.WriteMessages(ctx, msg)
	if err != nil {
		fmt.Println(fmt.Sprintf("写入kafka失败，user:%v,err:%v", taskInfo, err))
		return err
	}
	return nil
}
