package kafka

import (
	"context"
	"encoding/json"
	"fmt"
	"github.com/segmentio/kafka-go"
	"im/models"
	"log"
)

var IMProduce *IMProduceClient

type Producer interface {
	Send(ctx context.Context, messages ...*models.CustomKafkaMsg) error
	Close() error
}
type IMProduceClient struct {
	writer *kafka.Writer
}

func NewImProduce() {
	w := kafka.NewWriter(kafka.WriterConfig{
		Brokers:  []string{"localhost:9092"},
		Topic:    "im",
		Balancer: &kafka.LeastBytes{},
	})
	IMProduce = &IMProduceClient{
		writer: w,
	}
}
func (c *IMProduceClient) Send(ctx context.Context, messages ...*models.CustomKafkaMsg) error {
	kafkaMsgs := make([]kafka.Message, 0, len(messages))
	for _, msg := range messages {
		if msg == nil {
			continue
		}
		//防止共享内存出现数据被覆盖
		now := msg
		kafkaValue, err := json.Marshal(now)
		if err != nil {
			return err
		}
		kafkaMsgs = append(kafkaMsgs, kafka.Message{
			Value: kafkaValue,
		})
		log.Println("IMProduce.Send 开始生产,msg:", msg.Data)
	}
	if err := c.writer.WriteMessages(ctx, kafkaMsgs...); err != nil {
		fmt.Sprintf("Failed to serialize message,error:%v", err)
		return err
	}
	return nil
}

func (c *IMProduceClient) Close() error {
	return c.writer.Close()
}
