package org.apache.flink.metrics.kafka;

import org.apache.flink.metrics.Metric;
import org.apache.flink.metrics.MetricConfig;
import org.apache.flink.metrics.MetricGroup;
import org.apache.flink.metrics.reporter.AbstractReporter;
import org.apache.flink.metrics.reporter.MetricReporter;
import org.apache.flink.metrics.reporter.Scheduled;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.*;

/**
 * {@link MetricReporter} that exports {@link Metric Metrics} via {@link KafkaProducer}.
 *
 * @author jasonlee
 */
public class KafkaReporter extends AbstractReporter implements Scheduled {

    private static final Logger LOGGER = LoggerFactory.getLogger(KafkaReporter.class);
    private static final String JOB_ID_VARIABLE = "<job_id>";
    private static final String JOB_NAME_VARIABLE = "<job_name>";
    private static final String HOST_VARIABLE = "<host>";
    private static final String TM_ID_VARIABLE = "<tm_id>";
    private static final String SUBTASK_INDEX_VARIABLE = "<subtask_index>";
    private static final String BROKER_LIST =
            "172.16.1.41:9092,172.16.1.42:9092,172.16.1.43:9092,172.16.1.44:9092";
    private static final String TOPIC = "fdm_lend_audit_record_test";
    private static final String FLINK_VERSION = "1.14.4";
    private KafkaProducer<String, String> kafkaProducer;
    private final List<String> metricsFilter = new ArrayList<>();
    private int chunkSize;
    private String topic;
    private String jobName;
    private String jobId;
    private String host;
    private String containerId;
    private String subtaskIndex;
    private String containerType;

    @Override
    public void open(MetricConfig metricConfig) {
        String bootstrapServer = metricConfig.getString("bootstrapServers", BROKER_LIST);
        String topic = metricConfig.getString("topic", TOPIC);
        String filter = metricConfig.getString("filter", "");
        String chunkSize = metricConfig.getString("chunkSize", "5");
        Properties properties = new Properties();
        properties.setProperty("bootstrap.servers", bootstrapServer);
        properties.setProperty("acks", "all");
        properties.setProperty(
                "key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        properties.setProperty(
                "value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
        Thread.currentThread().setContextClassLoader(null);
        kafkaProducer = new KafkaProducer<>(properties);
        Thread.currentThread().setContextClassLoader(classLoader);
        if (StringUtils.isNotEmpty(filter)) {
            this.metricsFilter.addAll(Arrays.asList(filter.split(",")));
        }
        this.chunkSize = Integer.parseInt(chunkSize);
        this.topic = topic;
    }

    @Override
    public void notifyOfAddedMetric(Metric metric, String metricName, MetricGroup group) {
        Map<String, String> allVariables = group.getAllVariables();
        String jobId = allVariables.get(JOB_ID_VARIABLE);
        if (jobId != null && this.jobId == null) {
            this.jobId = jobId;
        }
        String jobName = allVariables.get(JOB_NAME_VARIABLE);
        if (jobName != null && this.jobName == null) {
            this.jobName = jobName;
        }

        String hostName = allVariables.get(HOST_VARIABLE);
        if (hostName != null && this.host == null) {
            this.host = hostName;
        }

        String tmId = allVariables.get(TM_ID_VARIABLE);
        if (tmId != null && this.containerId == null) {
            this.containerId = tmId;
        }

        String subtaskIndex = allVariables.get(SUBTASK_INDEX_VARIABLE);
        if (subtaskIndex != null && this.subtaskIndex == null) {
            this.subtaskIndex = subtaskIndex;
        }

        LOGGER.info(
                "job id: {}, job name: {}, host :{}, container_id :{}, subtaskIndex :{}",
                this.jobId,
                this.jobName,
                this.host,
                this.containerId,
                this.subtaskIndex);

        // 如果 filter 没设置就上报所有的 metrics,如果设置了则只上报 filter 里面的 metric
        LOGGER.info("add metric, metric name : {}", metricName);
        boolean metrics = this.metricsFilter.size() > 0 && this.metricsFilter.contains(metricName);
        if (metrics || this.metricsFilter.size() == 0) {
            super.notifyOfAddedMetric(metric, metricName, group);
        }
    }

    @Override
    public void notifyOfRemovedMetric(Metric metric, String metricName, MetricGroup group) {
        boolean metrics = this.metricsFilter.size() > 0 && this.metricsFilter.contains(metricName);
        if (metrics || this.metricsFilter.size() == 0) {
            super.notifyOfRemovedMetric(metric, metricName, group);
        }
    }

    @Override
    public void close() {
        if (kafkaProducer != null) {
            kafkaProducer.close();
        }
    }

    @Override
    public void report() {
        synchronized (this) {
            try {
                tryReport();
            } catch (Exception e) {
                e.printStackTrace();
                log.error("KafkaReporter report error: {}", e.toString());
            }
        }
    }

    private void tryReport() {
        Map<String, Object> metricMap = new LinkedHashMap<>();
        metricMap.put("job_id", this.jobId);
        metricMap.put("job_name", this.jobName);
        metricMap.put("host_name", this.host);
        metricMap.put("container_type", this.containerType);
        metricMap.put("container_id", this.containerId);
        metricMap.put("subtask_index", this.subtaskIndex);
        metricMap.put("flink_version", FLINK_VERSION);

        JSONArray jsonArray = new JSONArray();
        gauges.forEach(
                (gauge, metricName) ->
                        jsonArray.add(generateJsonObject(metricName, gauge.getValue(), "Gauge")));
        counters.forEach(
                (counter, metricName) ->
                        jsonArray.add(
                                generateJsonObject(metricName, counter.getCount(), "Counter")));
        histograms.forEach(
                (histogram, metricName) ->
                        jsonArray.add(
                                generateJsonObject(metricName, histogram.getCount(), "Histogram")));

        meters.forEach(
                (meter, metricName) ->
                        jsonArray.add(generateJsonObject(metricName, meter.getCount(), "Meter")));

        // 防止 jsonarray 过大,拆分成小的 chunk
        List<Object> objects = new ArrayList<>(jsonArray);
        List<List<?>> lists = new ArrayList<>();
        for (int start = 0; start < objects.size(); start += chunkSize) {
            int end = Math.min(objects.size(), start + chunkSize);
            lists.add(objects.subList(start, end));
        }

        lists.forEach(
                list -> {
                    metricMap.put("metrics", list);
                    ProducerRecord<String, String> record =
                            new ProducerRecord<>(
                                    this.topic, this.jobId, JSONObject.toJSONString(metricMap));
                    kafkaProducer.send(record);
                    LOGGER.info("The metrics report was successful");
                });
    }

    @Override
    public String filterCharacters(String input) {
        return input;
    }

    private <T> JSONObject generateJsonObject(String metricName, T value, String type) {
        JSONObject jsonObject = new JSONObject();
        if (this.containerType == null) {
            this.containerType = metricName.contains("jobmanager") ? "jobmanager" : "taskmanager";
        }
        jsonObject.put("metric_name", metricName);
        jsonObject.put("value", value);
        jsonObject.put("type", type);
        return jsonObject;
    }
}
