package report

import (
	"context"
	"encoding/json"
	"os"
	"strings"
	"time"

	"github.com/Shopify/sarama"
	"github.com/go-logr/logr"

	nodev1 "node-operator/api/v1"

	"k8s.io/apimachinery/pkg/labels"
	cmdutil "k8s.io/kubectl/pkg/cmd/util"
	metricsclientset "k8s.io/metrics/pkg/client/clientset/versioned"
)

type KafkaReporter struct {
	KafkaProducerConfig *sarama.Config
	KafkaBrokers        string
	KafkaTopic          string
	producer            sarama.SyncProducer
	Factory             cmdutil.Factory
	Logger              logr.Logger
}

func (r *KafkaReporter) Start(ctx context.Context) {
	var err error
	address := strings.Split(r.KafkaBrokers, ",")
	r.producer, err = sarama.NewSyncProducer(address, r.KafkaProducerConfig)
	if err != nil {
		r.Logger.Error(err, "Parse kafka config error")
		os.Exit(-1)
	}

	go r.updateNodeMetrics(ctx)
	go func() {
		select {
		case <-ctx.Done():
			r.producer.Close()
			r.Logger.Info("Stop reportNodeMetrics & updateNodeMetrics")
		}
	}()
}

func (r *KafkaReporter) updateNodeMetrics(ctx context.Context) {
	if !nodev1.MetricsAPIAvailable(r.Factory) {
		r.Logger.Error(nil, "Metrics API not available")
		os.Exit(-1)
	}

	config, err := r.Factory.ToRESTConfig()
	if err != nil {
		r.Logger.Error(err, "Get k8s rest config error")
		os.Exit(-1)
	}

	selector := labels.Everything()
	metricsClient, err := metricsclientset.NewForConfig(config)
	clientset, err := r.Factory.KubernetesClientSet()
	if err != nil {
		r.Logger.Error(err, "Get k8s client error")
		os.Exit(-1)
	}
	nodeClient := clientset.CoreV1()

	for {
		select {
		case <-ctx.Done():
			r.Logger.Info("UpdateNodeMetrics loop stop")
			break
		case node := <-DeletedNodeReportChan:
			//将被删除的node加入DeletedNodes
			DeletedNodes = append(DeletedNodes, *node)
		case node := <-UpdateNodeReportChan:
			//将被正常的节点的node从DeletedNodes删除
			for i, n := range DeletedNodes {
				if n.Name == node.Name {
					DeletedNodes = append(DeletedNodes[:i], DeletedNodes[i+1:]...)
					break
				}
			}
		default:
			metrics, err := nodev1.GetNodeMetricsFromMetricsAPI(metricsClient, "", selector)
			if err != nil {
				r.Logger.Error(err, "")
			} else {
				nodes, err := calculateNodeMetrics(nodeClient, metrics)
				if err != nil {
					r.Logger.Error(err, "")
				} else {
					bytes, err := json.Marshal(nodes)
					if err != nil {
						r.Logger.Error(err, "Marshal nodes err")
					}
					//发送正常节点 to kafka
					msg := &sarama.ProducerMessage{
						Topic: r.KafkaTopic,
						Value: sarama.ByteEncoder(bytes),
					}
					_, _, err = r.producer.SendMessage(msg)
					if err != nil {
						r.Logger.Error(err, "Kafka send error")
					}
					if len(DeletedNodes) > 0 {
						//发送异常节点到kafka
						for _, delNode := range DeletedNodes {
							delNode.Status.Ready = "False"
							delNode.InK8s = "False"
							delNode.Status.Time = time.Now().UnixNano() / 1e6
						}
						bytes, err = json.Marshal(DeletedNodes)
						if err != nil {
							r.Logger.Error(err, "Marshal nodes err")
						}
						msg = &sarama.ProducerMessage{
							Topic: r.KafkaTopic,
							Value: sarama.ByteEncoder(bytes),
						}
						_, _, err = r.producer.SendMessage(msg)
						if err != nil {
							r.Logger.Error(err, "Kafka send error")
						}
					}
				}
			}

			time.Sleep(10 * time.Second)
		}
	}
}
