package kafka

import (
	"fmt"
	"github.com/Shopify/sarama"
	Cluster "github.com/bsm/sarama-cluster"
)

//专门往kafka写日志的模块

var (
	client sarama.SyncProducer //声明一个全局的连接kafka的生产者client
	Consumer *Cluster.Consumer

)

func InitConsumer(addrs []string,topic, groupId string)  {
	var err error
	config := Cluster.NewConfig()
	config.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategyRange
	config.Consumer.Offsets.Initial = sarama.OffsetNewest
	Consumer, err = Cluster.NewConsumer(addrs,groupId,[]string{topic},config)
	if err != nil {
		fmt.Printf("Consumer init failed -> %v \n", err)
		panic(err.Error())
	}
	if Consumer == nil {
		panic(fmt.Sprintf("Consumer is nil. kafka info -> {brokers:%v, topic: %v, group: %v}", addrs, topic, groupId))
	}
	fmt.Println("Consumer init succeed !")
}

func InitProducer(addrs []string) (err error) {
	config := sarama.NewConfig()
	//tailf包使用
	config.Producer.RequiredAcks = sarama.WaitForAll          //Producer生产者    发送完数据需要leader和follow都确认
	config.Producer.Partitioner = sarama.NewRandomPartitioner //Partitioner分区   新选出一个partition
	config.Producer.Return.Successes = true                   //成功交付的消息将在success channel返回
	//链接kafka
	client, err = sarama.NewSyncProducer(addrs, config)
	if err != nil {
		fmt.Println("producer closed ,err:", err)
		return
	}
	fmt.Println("Connect to kafka succeed!")
	return
}

func SendtoKafka(topic, data string) {
	//构造消息
	msg := &sarama.ProducerMessage{
		Topic: topic,
		Value: sarama.StringEncoder(data),
	}
	//msg.Topic = topic                      //"web_log"
	//msg.Value = sarama.StringEncoder(data) //"this is a test log"
	//发送消息
	//pid, offset, err := client.SendMessage(msg)
	_,_,err := client.SendMessage(msg)
	if err != nil {
		fmt.Println("send msg failed, err:", err)
		return
	}

	//fmt.Printf("pid:%v, offset:%v, insert data: [ %s ] \n", pid, offset, data)
}
