package com.sui.bigdata.flink.sql.core.util;

import com.alibaba.fastjson.JSON;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.net.InetAddress;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;

/**
 * @author YongChen
 * @date 2020/3/6 14:47
 * @description
 * @email yong_chen@sui.com
 */
public class DealFailDataUtil {

    private static final Logger logger = LoggerFactory.getLogger(DealFailDataUtil.class);

    private final static String bootstrapServers = "kafka-mjq.suishoushuju.internal:9093";
    private final static String topic = "datateam.riskctrl.flink_fail_data_recall";
    public static String jobName;


    public static void reportFailData(String type, String storage,String tableName, String data,String exception) {
        if (!isProductEnv()) {
            return;
        }
        HashMap<String, String> msg = new HashMap();
        msg.put("jobName", jobName);
        msg.put("type", type);
        msg.put("storage", storage);
        msg.put("tableName", tableName);
        msg.put("data", data);
        msg.put("exception",exception);
        msg.put("reportTime", getCurrentTime());

        Producer producer = null;
        try {
            producer = getProducer();
            producer.send(new ProducerRecord<>(topic, "", JSON.toJSONString(msg)));
        } catch (Exception e) {
            logger.error("report data fail.", e);
        } finally {
            if (producer != null) {
                producer.close();
            }
        }
    }

    public static Producer getProducer() {
        Map<String, Object> props = new HashMap<String, Object>();
        props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        props.put("bootstrap.servers", bootstrapServers);
        props.put("acks", ConfigConstrant.KAFKA_ACKS);
        props.put("retries", ConfigConstrant.KAFKA_RETRIES);
        props.put("batch.size", ConfigConstrant.KAFKA_BATCH_SIZE);
        props.put("linger.ms", ConfigConstrant.KAFKA_LINGER_MS);
        props.put("buffer.memory", ConfigConstrant.KAFKA_BUFFER_MEMORY);

        return new KafkaProducer<String, String>(props);
    }

    public static Boolean isProductEnv() {
        InetAddress addr = null;
        try {
            addr = InetAddress.getLocalHost();
            String ip = addr.getHostAddress();
            return ip.startsWith("10.201.7") || ip.startsWith("172.22") ? Boolean.FALSE : Boolean.TRUE;
        } catch (Exception e) {
            logger.error("get current host exception ", e);
        }
        return Boolean.FALSE;
    }

    public static String getCurrentTime() {
        SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        return df.format(new Date(System.currentTimeMillis()));
    }

    public static void main(String[] args) {
        DealFailDataUtil.jobName = "jobtest";
        reportFailData("join","hbase", "test", "{1,2}","error");
    }
}
