package com.navinfo.platform.trip.analysis.flink.sink;

import com.navinfo.platform.trip.analysis.flink.config.SystemConfig;
import com.navinfo.platform.trip.analysis.pojo.DF_OuterStatisticData;
import com.navinfo.platform.trip.common.arithmetic.data.OuterEventData;
import com.navinfo.platform.trip.common.util.JsonUtils;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.kafka.KafkaSerializationSchema;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.io.Serializable;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.util.Properties;

/**
 * Kafka的生成者
 * @author: web
 **/
public class KafkaProducer implements Serializable {
    private static final Logger logger = LoggerFactory.getLogger(KafkaProducer.class);
    /**
     * 行程统计的输出Topic
     */
    private String topic_tripStatistic;

    /**
     * 行程事件的输出Topic
     */
    private String topic_tripEvent;

    /**
     * Kafka生产者的配置
     */
    private Properties producerConf;

    public KafkaProducer(String[] args) throws IOException, URISyntaxException {
        ParameterTool parameterTool = SystemConfig.loadJarPathConfig(args, "kafka_sink.properties");

        producerConf = new Properties();
        producerConf.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, parameterTool.get("kafka.bootstrap.servers"));
        producerConf.put(ProducerConfig.ACKS_CONFIG,  parameterTool.get("kafka.producer.acks", "1"));
        producerConf.put(ProducerConfig.RETRIES_CONFIG,  parameterTool.getInt("kafka.producer.retries", 5));
        producerConf.put(ProducerConfig.RETRY_BACKOFF_MS_CONFIG, parameterTool.getInt("kafka.producer.retry.backoff.ms",1000));
        producerConf.put(ProducerConfig.COMPRESSION_TYPE_CONFIG,  parameterTool.get("kafka.producer.compression.type", "none"));
        producerConf.put(ProducerConfig.BUFFER_MEMORY_CONFIG, parameterTool.getInt("kafka.producer.buffer.memory",33554432));
        producerConf.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, parameterTool.getInt("kafka.producer.max.block.ms",60000));
        producerConf.put(ProducerConfig.BATCH_SIZE_CONFIG, parameterTool.getInt("kafka.producer.batch.size",16384));
        producerConf.put(ProducerConfig.LINGER_MS_CONFIG, parameterTool.getInt("kafka.producer.linger.ms", 0));
        producerConf.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, parameterTool.getInt("kafka.producer.max.request.size",3145728));
        producerConf.put(ProducerConfig.SEND_BUFFER_CONFIG,  parameterTool.getInt("kafka.producer.send.buffer.bytes", 131072));
        producerConf.put(ProducerConfig.RECEIVE_BUFFER_CONFIG,  parameterTool.getInt("kafka.producer.receive.buffer.bytes", 65536));

        topic_tripStatistic = parameterTool.get("kafka.producer.topic.trip.statistic", "trip_statistic");
        topic_tripEvent = parameterTool.get("kafka.producer.topic.trip.event", "trip_event");

        logger.info("初始化Kafka生产者完成...");
    }

    /**
     * 行程统计结果的kafka输出
     * @return 行程统计结果的kafka输出
     */
    public FlinkKafkaProducer<DF_OuterStatisticData> tripStatistic() {
        FlinkKafkaProducer<DF_OuterStatisticData> kafkaProducer = new FlinkKafkaProducer<>(topic_tripStatistic,
                (KafkaSerializationSchema<DF_OuterStatisticData>) (element, timestamp) -> {
                    if (null != element) {
                        byte[] value;
                        try {
                            value = JsonUtils.toJson(element).getBytes(StandardCharsets.UTF_8);
                        } catch (Exception e) {
                            logger.error("行程统计信息序列化为JSON数据失败，{}，{}", element.getTid(), element.getTripId());
                            value = "{}".getBytes(StandardCharsets.UTF_8);
                        }

                        return new ProducerRecord<>(topic_tripStatistic, element.getTid().toString().getBytes(StandardCharsets.UTF_8), value);
                    }

                    return null;
                }, producerConf, FlinkKafkaProducer.Semantic.AT_LEAST_ONCE);

        kafkaProducer.setWriteTimestampToKafka(true);
        logger.info("创建Kafka主题{}生产者完成...", topic_tripStatistic);
        return kafkaProducer;
    }

    /**
     * 行程事件结果的kafka输出
     * @return 行程事件结果的kafka输出
     */
    public FlinkKafkaProducer<OuterEventData> tripEvent() {
        FlinkKafkaProducer<OuterEventData> kafkaProducer = new FlinkKafkaProducer<>(topic_tripEvent,
                (KafkaSerializationSchema<OuterEventData>) (element, timestamp) -> {
                    if (null != element) {
                        byte[] value;
                        try {
                            value = JsonUtils.toJson(element).getBytes(StandardCharsets.UTF_8);
                        } catch (Exception e) {
                            logger.error("行程事件信息序列化为JSON数据失败，{}，{}", element.getTid(), element.getEvent());
                            value = "{}".getBytes(StandardCharsets.UTF_8);
                        }

                        return new ProducerRecord<>(topic_tripEvent, element.getTid().toString().getBytes(StandardCharsets.UTF_8), value);
                    }

                    return null;
                }, producerConf, FlinkKafkaProducer.Semantic.AT_LEAST_ONCE);

        kafkaProducer.setWriteTimestampToKafka(true);
        logger.info("创建Kafka主题{}生产者完成...", topic_tripEvent);
        return kafkaProducer;
    }
}
