package main

import (
	"log"
	"net/http"
	"os"
	"runtime"
	"strconv"
	"strings"
	"syscall"

	"encoding/json"
	"io/ioutil"
	"os/signal"

	"github.com/confluentinc/confluent-kafka-go/kafka"
	"github.com/prometheus/client_golang/prometheus"
	"github.com/prometheus/client_golang/prometheus/promhttp"
	"gopkg.in/yaml.v2"
)

type BeatInfo struct {
	HostName string `json:"hostname"`
}

type Info struct {
	HttpHost string `json:"http_host"`
	Response string `json:"response"`
	UpstreamAddr string `json:"upstream_addr"`
	RequestTime string `json:"request_time"`
	Uri string `json:"uri"`
	Beat BeatInfo `json:"beat"`
}


var conf Config

func msg2Prom(msgChan chan *kafka.Message) {
	var info Info
	//groutinePoolCount := 10
	//for i := 0; i < groutinePoolCount; i++ {
	//	go func() {
	for msg := range msgChan {
		str := string(msg.Value)
		if conf.Global.LogLevel == "debug" {
			log.Printf("msg is: %s", str)
		}
		json.Unmarshal([]byte(str), &info)


        // log.Printf("info is: %s", info)
		s := info.HttpHost
        upaddr := info.UpstreamAddr
		if strings.Contains(upaddr, ",") {
		   upaddr = strings.Split(s, ",")[0]
		}

		// push prom format record Counter Type
		HttpHost := strings.Split(s, ":")[0]
		// nginxProm.With(prometheus.Labels{"http_host": HttpHost, "response": info.Response, "upstream_addr": info.UpstreamAddr}).Inc()
		if ! IfInArrary(HttpHost, conf.Domains) {
			log.Println(HttpHost)
            continue
        }

		// push summary record SUMMARY type 按主机纬度区分
		RequestTime, err := strconv.ParseFloat(info.RequestTime, 64)
		if err != nil {
            continue
		}
		// 	[]string{"hostname", "http_host", "response", "upstream_addr", "uri"},
		// nginxSummary.WithLabelValues(info.Beat.HostName, HttpHost, info.Response, info.UpstreamAddr, info.Uri).Observe(RequestTime)
        // log.Println(info.Beat.HostName, HttpHost, RequestTime)
		nginxHistogram.WithLabelValues(info.Beat.HostName, HttpHost, info.Response, upaddr).Observe(RequestTime)
	}
		//}()
	//}
}

func kafkaConsumer(msgChan chan *kafka.Message){
	KafkaServer := conf.Kafka.KafkaServer
	KafkaTopic := conf.Kafka.KafkaTopic
	KafkaGroup := conf.Kafka.KafkaGroup

	c, err := kafka.NewConsumer(&kafka.ConfigMap{
		"bootstrap.servers": KafkaServer,
		"broker.address.family": "v4",
		"group.id": KafkaGroup,
		"session.timeout.ms": 30000,
		"auto.offset.reset": "latest"})

	if err != nil {
		log.Fatalf("Failed to create consumer: %s\n", err)
	}

	log.Printf("create Consumer %v \n", c)
	log.Println([]string{KafkaTopic})
	err = c.SubscribeTopics([]string{KafkaTopic}, nil)

	if err != nil {
		log.Fatalf("设置topic失败, stderr: %s, err: %s", os.Stderr, err)
	}

	for {
		ev := c.Poll(100)
		if ev == nil {
			//log.Printf("no more message , msgChan len: %d\n", len(msgChan))
			continue
		}
		switch e := ev.(type){
		case *kafka.Message:
			// json 格式字符串 -> 格式化struct
			msgChan <- e
			if e.Headers != nil {
				log.Printf("%% Headers: %v\n", e.Headers)
			}
		case kafka.Error:
			log.Printf("%% Error: %v: %v\n", e.Code(), e)
			if e.Code() == kafka.ErrAllBrokersDown {
				log.Fatalf("kafka broker down")
			}
		default:
			log.Printf("Ignore err %v , Groutine Num : %d \n", e, runtime.NumGoroutine())
		}
	}
}

func main(){
	yamlFile, err := ioutil.ReadFile("config.yml")

	if err != nil {
		log.Fatalf("yamlFile. Get err #%v", err)
	}
	err = yaml.Unmarshal(yamlFile, &conf)
	if err != nil {
		log.Fatalf("Unmarshal, %v", err)
	}
	log.Println("yamlfile: ", conf)
	// 运行时系统内核调用
	runtime.GOMAXPROCS(runtime.NumCPU())

	// 程序接收信号处理
	signals := make(chan os.Signal, 1)
	signal.Notify(signals, syscall.SIGINT, syscall.SIGTERM)
	go func(){
		sig := <-signals
		log.Fatalf("exit with receive signal %v", sig)
	}()
	var msgChan = make(chan *kafka.Message, 1000)
	go kafkaConsumer(msgChan)
	go msg2Prom(msgChan)

	// prom client exporter
	http.Handle("/metrics", promhttp.Handler())
	log.Fatal(http.ListenAndServe(conf.Global.Listen, nil))
}

func init(){
	//prometheus.MustRegister(nginxProm)
	prometheus.MustRegister(nginxHistogram)

	// Add Go module build info.
	prometheus.MustRegister(prometheus.NewBuildInfoCollector())
}



