package ka

import (
	"context"
	"github.com/Shopify/sarama"
	"github.com/gogf/gf/encoding/gjson"
	"github.com/gogf/gf/frame/g"
	"go.opentelemetry.io/contrib"
	"go.opentelemetry.io/otel"
	"go.opentelemetry.io/otel/codes"
	"go.opentelemetry.io/otel/label"
	"go.opentelemetry.io/otel/semconv"
	"go.opentelemetry.io/otel/trace"
	"strconv"
	"time"
)

func NewProducer(brokers []string) (*Producer, error) {
	cfg := sarama.NewConfig()
	cfg.Net.KeepAlive = 60 * time.Second
	cfg.Producer.Return.Successes = true
	cfg.Producer.Return.Errors = true
	cfg.Version = ver
	cfg.Producer.Flush.Frequency = time.Second
	cfg.Producer.Flush.MaxMessages = 10
	return newProducerWithCfg(brokers, cfg)
}

func newProducerWithCfg(brokers []string, cfg *sarama.Config) (*Producer, error) {
	producer, err := sarama.NewAsyncProducer(brokers, cfg)
	if err != nil {
		return nil, err
	}
	p := new(Producer)
	p.producer = producer
	p.config = cfg

	p.tracer = otel.GetTracerProvider().Tracer(gProducerTracerName,
		trace.WithInstrumentationVersion(contrib.SemVersion()))
	p.metas = g.MapAnyAny{}
	go p.handle()
	return p, nil
}

func (p *Producer) handle() {
	for {
		select {
		case err, ok := <-p.producer.Errors():
			{
				if ok {
					if p.e != nil {
						p.e(err)
					}
					key := err.Msg.Metadata
					if span, ok := p.metas[key]; ok {
						delete(p.metas, key)
						p.finishProducerSpan(span.(trace.Span), err.Msg.Partition, err.Msg.Offset, err.Err)
					}
				}
			}
		case msg, ok := <-p.producer.Successes():
			{
				if ok {
					if p.s != nil {
						p.s(msg)
					}
					key := msg.Metadata
					if span, ok := p.metas[key]; ok {
						delete(p.metas, key)
						p.finishProducerSpan(span.(trace.Span), msg.Partition, msg.Offset, nil)
					}
				}
			}
		}
	}
}

func (p *Producer) Close() error {
	return p.producer.Close()
}

func (p *Producer) HandleError(e HandleErrorFunc) {
	p.e = e
}

func (p *Producer) HandleSucceed(s HandleSucceedFunc) {
	p.s = s
}

func (p *Producer) PushEvent(ctx context.Context, topic, data string, e interface{}) {
	// 这里没办法，需要兼容xevent，多序列化了一次
	eb, _ := gjson.Encode(e)
	event := &eventProducer{
		Type:     data,
		Time:     time.Now().Format("2006-01-02 15:04:05"),
		From:     localAddr,
		Hostname: hostname,
		Data:     eb,
		Kv:       g.MapStrStr{},
	}
	span := p.startProducerSpan(ctx, topic, event)

	b, _ := gjson.Encode(event)
	msg := &sarama.ProducerMessage{
		Topic: topic,
		Value: sarama.ByteEncoder(b),
	}

	msg.Metadata = span.SpanContext().SpanID

	// 发送消息
	p.producer.Input() <- msg
	p.metas[msg.Metadata] = span
}

func (p *Producer) startProducerSpan(ctx context.Context, topic string, e *eventProducer) trace.Span {
	attrs := []label.KeyValue{
		semconv.MessagingSystemKey.String("kafka"),
		semconv.MessagingDestinationKindKeyTopic,
		semconv.MessagingDestinationKindKey.String(topic),
	}
	opts := []trace.SpanOption{
		trace.WithAttributes(attrs...),
		trace.WithSpanKind(trace.SpanKindProducer),
	}
	ctxNew, span := p.tracer.Start(ctx, "kafka.produce", opts...)
	xtrace.GetDefaultTextMapPropagator().Inject(ctxNew, e)
	return span
}

func (p *Producer) finishProducerSpan(span trace.Span, partition int32, offset int64, err error) {
	span.SetAttributes(
		semconv.MessagingMessageIDKey.String(strconv.FormatInt(offset, 10)),
		gKafkaPartitionKey.Int32(partition),
	)
	if err != nil {
		span.SetStatus(codes.Error, err.Error())
	}
	span.End()
}
