package com.sui.bigdata.flink.sql.function.metric;

import com.alibaba.fastjson.JSON;
import org.apache.flink.metrics.*;
import org.apache.flink.metrics.reporter.AbstractReporter;
import org.apache.flink.metrics.reporter.Scheduled;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.Map;

/**
 * @author YongChen
 * @date 2019/9/26 17:21
 * @description
 * @email yong_chen@sui.com
 */
public class KafkaReporter extends AbstractReporter implements Scheduled{
    private static final Logger logger = LoggerFactory.getLogger(KafkaReporter.class);

    public static  String BOOTSTRAPSERVERS="bootstrapServers";

    public static String TOPIC ="topic";

    public static String STATUS="status";

    public  String topic;

    public KafkaProducer KafkaProducer;

    public String bootstrapServers;

    public String jobName;


    @Override
    public void report() {
        HashMap<String,Object> allMetric = new HashMap();

//        for (Map.Entry<Counter, String> metric : counters.entrySet()) {
//            allMetric.put(metric.getValue(),metric.getKey().getCount());
//
//        }
//        logger.info("counter "+JSON.toJSONString(allMetric));
//        allMetric.clear();
        for (Map.Entry<Gauge<?>, String> metric : gauges.entrySet()) {
            String metricValue=metric.getValue();
            if( metricValue.contains("jobmanager")){
                if(metricValue.contains("fullRestarts")){
                    jobName=metricValue.split("\\.")[2];
                    allMetric.put("jobName",jobName);
                }
            }
        }
        for (Map.Entry<Gauge<?>, String> metric : gauges.entrySet()) {
            String metricValue=metric.getValue();
            if( metricValue.contains("jobmanager")){
                allMetric.put(getMetricName(metricValue),metric.getKey().getValue());
            }
        }


//        for (Map.Entry<Meter, String> metric : meters.entrySet()) {
//            allMetric.put(metric.getValue(),metric.getKey().getRate());
//        }


//        for (Map.Entry<Histogram, String> metric : histograms.entrySet()) {
//            StringBuilder builder = new StringBuilder();
//            HistogramStatistics stats = metric.getKey().getStatistics();
//            builder
//                    .append(metric.getValue()).append(": count=").append(stats.size())
//                    .append(", min=").append(stats.getMin())
//                    .append(", max=").append(stats.getMax())
//                    .append(", mean=").append(stats.getMean())
//                    .append(", stddev=").append(stats.getStdDev())
//                    .append(", p50=").append(stats.getQuantile(0.50))
//                    .append(", p75=").append(stats.getQuantile(0.75))
//                    .append(", p95=").append(stats.getQuantile(0.95))
//                    .append(", p98=").append(stats.getQuantile(0.98))
//                    .append(", p99=").append(stats.getQuantile(0.99))
//                    .append(", p999=").append(stats.getQuantile(0.999));
//            allMetric.put(metric.getValue(),builder.toString());
//        }

        if(!allMetric.isEmpty()){
            allMetric.put("running.status",1);
            try {
                KafkaProducer.send(new ProducerRecord<>(topic, "", JSON.toJSONString(allMetric)));
            } catch (Exception e) {
                logger.error("metric report error "+e.getMessage());
            }
        }
    }

    @Override
    public void open(MetricConfig metricConfig) {

        bootstrapServers = metricConfig.getString(BOOTSTRAPSERVERS, null);
        topic = metricConfig.getString(TOPIC, null);

        logger.info(" topic is : "+topic +"  bootstrap is :"+bootstrapServers);
        try {
            KafkaProducer=getProducer(bootstrapServers);
        } catch (Exception e) {
            logger.error("mertic collect ,init producer error. ");
        }
    }

    @Override
    public void close()  {
        if (KafkaProducer!=null){
            try {
                KafkaProducer.close();
            } catch (Exception e) {
                logger.error("mertic collect ,close producer error. ");
            }
        }

    }


    public KafkaProducer getProducer(String bootstrapServers) {
        Map<String, Object> props = new HashMap<String, Object>();

        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");

        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");

        props.put("bootstrap.servers", bootstrapServers);

        props.put("acks", "1");

        props.put("retries", "3");

        props.put("batch.size", "16384");

        props.put("linger.ms", "10");

        props.put("buffer.memory", "33554432");
        return new KafkaProducer<String, String>(props);
    }

    @Override
    public String filterCharacters(String s) {
        return s;
    }

    public String getMetricName(String s){
        String[] arr = s.split("\\.");
        StringBuilder stringBuilder=new StringBuilder();
        for(int i=2;i<arr.length;i++){
            stringBuilder.append(arr[i]);
            if(i != arr.length-1){
                stringBuilder.append(".");
            }
        }
        return stringBuilder.toString().replace("Status.","")
                .replace(jobName+".","");
    }

}
