package util

import (
	"cloud.base.utils-go.sdk/httplib"
	"cloud.hubbleye.appalert.service_r/internal/middleware"
	"cloud.hubbleye.appalert.service_r/internal/model"
	"encoding/json"
	"fmt"
	"github.com/Shopify/sarama"
	"net/url"
	"path"
	"regexp"
	"strconv"
	"strings"
	"sync"
	"time"
)

var (
	wg sync.WaitGroup
)

type KafkaValue struct {
	Id                string  `json:"id"`
	SystemAlias       string  `json:"SystemAlias"`
	CreatedDate       int64   `json:"createdDate"`
	Topic             string  `json:"Topic"`
	Title             string  `json:"Title"`
	Content           string  `json:"Content"`
	Ip                string  `json:"Ip"`
	Level             int     `json:"Level"`
	Timestamp         string  `json:"Timestamp"`
	Tag               string  `json:"Tag"`
	EventId           string  `json:"EventId"`
	MetricValue       float64 `json:"MetricValue"`
	SpanDepth         int     `json:"SpanDepth"`
	SpanId            string  `json:"SpanId"`
	ParentSpanId      string  `json:"ParentSpanId"`
	TraceId           string  `json:"TraceId"`
	RequestId         string  `json:"RequestId"`
	LogType           string  `json:"LogType"`
	RemoteSystemAlias string  `json:"RemoteSystemAlias"`
	RemoteIpAddress   string  `json:"RemoteIpAddress"`
	IndexName         string  `json:"-"`
}

func BuildAlertEvent(value *KafkaValue) *model.AlertEvent {
	var event = &model.AlertEvent{
		SystemAlias:         value.SystemAlias,
		Env:                 "prd",
		EventDatasource:     "app",
		EventDatasourceName: "应用告警",
		EventOccurTimestamp: value.CreatedDate,
		EventOccurIp:        value.Ip,
		AlertContent:        value.Content,
		EventLayer:          "1",
		EventCategory:       "logError",
		EventId:             value.TraceId,
		TargetSystemAlias:   value.RemoteSystemAlias,
		TargetIP:            value.RemoteIpAddress,
	}
	return event
}

func KafkaBuildAlertEvent(SystemAlias, EventOccurIp, AlertContent, EventId string, EventOccurTimestamp int64) *model.AlertEvent {
	var event = &model.AlertEvent{
		SystemAlias:         SystemAlias,
		Env:                 "prd",
		EventDatasource:     "appError",
		EventDatasourceName: "应用告警",
		EventOccurTimestamp: EventOccurTimestamp,
		EventOccurIp:        EventOccurIp,
		AlertContent:        AlertContent,
		EventLayer:          "3",
		EventCategory:       "logError",
		EventId:             EventId,
		TargetSystemAlias:   "",
		TargetIP:            "",
	}
	return event
}

func ReadKafka(kafka_qun []string, this_topic string, kafka_brokers int) {
	defer func() {
		if err := recover(); err != nil {
			fmt.Println("Pannic ReadKafka error")
		}
	}()
	config := sarama.NewConfig()
	client, err := sarama.NewClient(kafka_qun, config)
	if err != nil {
		fmt.Println("ERROR: Unable to create kafka client, err=[%v]", err)
	}
	defer client.Close()
	topics, err := client.Topics()
	if err != nil {
		fmt.Println("ERROR: Unable to list kafka topics, err=[%v]", err)
	}
	for i, topic := range topics {
		if topic == this_topic {
			fmt.Println(i, topic)
			if kafka_brokers == 1 {
				go Read_kafka_singal_topic(topic, kafka_qun)
			} else if kafka_brokers == 2 {
				go Read_kafka_singal_topic2(topic, kafka_qun)
			}
		}
	}
}
func Read_kafka_singal_topic(topic string, kafka_qun []string) {
	defer func() {
		if err := recover(); err != nil {
			fmt.Println("Pannic Read_kafka_singal_topic error")
		}
	}()
	consumer, err := sarama.NewConsumer(kafka_qun, nil)
	if err != nil {
		fmt.Println("sarama.NewConsumer error----->", err)
	}
	partitionList, err := consumer.Partitions(topic)
	if err != nil {
		fmt.Println("consumer.Partitions error----->", err)
	}
	for partition := range partitionList {
		pc, err := consumer.ConsumePartition(topic, int32(partition), sarama.OffsetNewest)
		if err != nil {
			fmt.Println("consumer.ConsumePartition error----->", err)
		}
		defer pc.AsyncClose()
		wg.Add(1)
		go func(sarama.PartitionConsumer) {
			defer wg.Done()
			for msg := range pc.Messages() {
				var this_kafka_value KafkaValue
				json.Unmarshal(msg.Value, &this_kafka_value)
				if this_kafka_value.Level > 2 {
					fmt.Println(this_kafka_value.CreatedDate, this_kafka_value.Level, this_kafka_value.SystemAlias)
					var alertEvets []*model.AlertEvent
					var alertEvent = BuildAlertEvent(&this_kafka_value)
					alertEvets = append(alertEvets, alertEvent)
					//发送告警数据
					Alert(alertEvets)
					//fmt.Println("NSQL/KAFKA1->alertEvent", alertEvent)
					middleware.Logger.Info("NSQL/KAFKA1->alertEvent success")
				}
			}
		}(pc)
	}
	wg.Wait()
	consumer.Close()
}

func Read_kafka_singal_topic2(topic string, kafka_qun []string) {
	defer func() {
		if err := recover(); err != nil {
			fmt.Println("Pannic Read_kafka_singal_topic error")
		}
	}()

	consumer, err := sarama.NewConsumer(kafka_qun, nil)
	if err != nil {
		fmt.Println("sarama.NewConsumer error----->", err)
	}
	partitionList, err := consumer.Partitions(topic)
	if err != nil {
		fmt.Println("consumer.Partitions error----->", err)
	}

	//IP地址的正则表达式
	regip := `\d+\.\d+\.\d+\.\d+`
	regIp, _ := regexp.Compile(regip)

	//应用的正则表达式
	regstr := `\w+\.\w+\.\w+\.\w+`
	regStr, _ := regexp.Compile(regstr)

	//时间正则表达式
	regtime := `[0-9]{4}\-[0-9]{2}\-[0-9]{2}\s[0-9]{2}\:[0-9]{2}\:[0-9]{2}`
	regTime, _ := regexp.Compile(regtime)

	for partition := range partitionList {
		pc, err := consumer.ConsumePartition(topic, int32(partition), sarama.OffsetNewest)
		if err != nil {
			fmt.Println("consumer.ConsumePartition error----->", err)
		}

		defer pc.AsyncClose()
		wg.Add(1)
		go func(sarama.PartitionConsumer) {
			defer func() {
				if err := recover(); err != nil {
					fmt.Println("===================error======================")
					fmt.Println("===================error======================")
					fmt.Println("Pannic Read_kafka_singal_topic error", err)
					fmt.Println("===================error======================")
					fmt.Println("===================error======================")
				}
			}()
			defer wg.Done()
			for msg := range pc.Messages() {
				if strings.Contains(string(msg.Value), "[ERROR]") {
					this_value := strings.Split(string(msg.Value), "[source_ip]")[1]
					this_ip := regIp.Find([]byte(this_value))
					this_yingyongs := regStr.FindAll([]byte(this_value), -1)
					this_time := regTime.Find([]byte(this_value))
					//alert_time,_:=time.Parse("2006-01-02 15:04:05", string(this_time))
					loc, _ := time.LoadLocation("Asia/Shanghai")
					tt, _ := time.ParseInLocation("2006-01-02 15:04:05", string(this_time), loc)
					alert_time_Timestamp := (tt.Unix() * 1000)
					this_contents := strings.Split(this_value, "] -")
					var alertEvets []*model.AlertEvent
					var alertEvent = KafkaBuildAlertEvent(string(this_yingyongs[1]), string(this_ip), (this_contents[1]), strconv.FormatInt(alert_time_Timestamp, 10), alert_time_Timestamp)
					alertEvets = append(alertEvets, alertEvent)
					//发送告警中心
					Alert(alertEvets)
					//fmt.Println("KAFKA2->alertEvent", alertEvent)
					middleware.Logger.Info("KAFKA2->alertEvent success")
				}
			}
		}(pc)
	}
	wg.Wait()
	consumer.Close()
}

func Alert(alertInfo []*model.AlertEvent) error {
	urls := CombineUri(middleware.ThisNacosData.AlertCfgurls, "/api/alert/events")
	_, err := httplib.Posts(urls, alertInfo)
	return err
}

func CombineUri(uribase []string, urirelative string) []string {
	var urireal []string
	for _, str := range uribase {
		str = strings.TrimSpace(str)
		if strings.Index(str, "http://") != 0 {
			str = "http://" + str
		}
		u, _ := url.Parse(str)
		u.Path = path.Join(u.Path, urirelative)
		var ur = u.String()
		urireal = append(urireal, ur)
	}
	return urireal
}
