package cn.texous.demo.dsj.stream;

import cn.texous.demo.dsj.model.EventsVo;
import cn.texous.demo.dsj.util.JsonUtils;
import com.google.common.base.CharMatcher;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.parquet.example.data.Group;
import org.apache.parquet.example.data.simple.SimpleGroupFactory;
import org.apache.parquet.hadoop.ParquetFileWriter;
import org.apache.parquet.hadoop.ParquetWriter;
import org.apache.parquet.hadoop.example.ExampleParquetWriter;
import org.apache.parquet.hadoop.example.GroupWriteSupport;
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.OriginalType;
import org.apache.parquet.schema.PrimitiveType;
import org.apache.parquet.schema.Types;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka010.ConsumerStrategies;
import org.apache.spark.streaming.kafka010.KafkaUtils;
import org.apache.spark.streaming.kafka010.LocationStrategies;
import sun.misc.BASE64Decoder;

import java.io.IOException;
import java.io.Serializable;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

import static org.apache.spark.sql.types.DataTypes.LongType;
import static org.apache.spark.sql.types.DataTypes.StringType;

/**
 *
 */
@Slf4j
public class SparkKafkaStreamTask implements Serializable {

    private static final long serialVersionUID = 1L;
    private static final String APP_NAME = "transfer App";

    private static final MessageType EVENT_MESSAGE_TYPE;
    private static final String PATH_NAME = "/tmp/parquet/";
    private static final BASE64Decoder DECODER = new BASE64Decoder();

    //    private static final Config CONFIG = ConfigurationFactory.loadEnv();


    public static void main(String[] args) {
        log.info("start to init SparkEventsTask!!!");
        SparkConf conf = new SparkConf();
        conf.setAppName(APP_NAME);
        conf.setMaster("local[4]");
        //优雅的关闭
        conf.set("spark.streaming.stopGracefullyOnShutdown", Boolean.TRUE.toString());
        conf.set("spark.default.parallelism", "6");

        // 配置 安全链接
        // String jaasTemplate = "org.apache.kafka.common.security.plain.PlainLoginModule"
        //         + " required username=\"%s\" password=\"%s\";";
        // String jaasCfg = String.format(jaasTemplate,
        //         CONFIG.getProducer().getUser(), CONFIG.getProducer().getPassword());
        JavaStreamingContext jssc
                = new JavaStreamingContext(conf, Durations.seconds(10));
        // 初始化Spark Conf.
        final SQLContext sqlContext = new SQLContext(jssc.sparkContext());
        //连接pipelines-common中的配置的配置
        Map<String, Object> kafkaParams = new HashMap<>();
        kafkaParams.put("bootstrap.servers", "192.168.0.68:9092");
        kafkaParams.put("key.deserializer", StringDeserializer.class);
        kafkaParams.put("value.deserializer", StringDeserializer.class);
        //从头开始消费
        kafkaParams.put("group.id", "testGroup");
        kafkaParams.put("auto.offset.reset", "earliest");
        kafkaParams.put("enable.auto.commit", Boolean.TRUE.toString());
        kafkaParams.put("auto.commit.interval", "100");

        //        kafkaParams.put("security.protocol", "SASL_SSL");
        //        kafkaParams.put("sasl.mechanism", "PLAIN");
        //        kafkaParams.put("sasl.jaas.config", jaasCfg);
        //        kafkaParams.put("ssl.endpoint.identification.algorithm", "https");

        Collection<String> topic0 = Collections.singletonList("test1");
        List<Collection<String>> topics = Collections.singletonList(topic0);
        List<JavaDStream<ConsumerRecord<String, String>>> kafkaStreams
                = new ArrayList<>(topics.size());
        for (Collection<String> topic : topics) {
            kafkaStreams.add(KafkaUtils.createDirectStream(
                    jssc, LocationStrategies.PreferConsistent(),
                    ConsumerStrategies.Subscribe(topic, kafkaParams)));
        }
        JavaDStream<ConsumerRecord<String, String>> stream = jssc.union(kafkaStreams.get(0),
                kafkaStreams.subList(1, kafkaStreams.size()));
        //        stream.foreachRDD((rdd) ->
        //              rdd.foreachPartition(SparkKafkaStreamTask::patchTransfer));
        stream.foreachRDD(rdd -> {
            JavaRDD<EventsVo> events = rdd.map(
                    new Function<ConsumerRecord<String, String>, EventsVo>() {
                        @Override
                        public EventsVo call(ConsumerRecord<String, String>
                                                     stringStringConsumerRecord)
                                throws Exception {
                            String msg = CharMatcher.is('\"')
                                    .trimFrom(stringStringConsumerRecord.value());
                            log.info(String.format("consume SparkEventsTask msg:%s", msg));
                            String decodeMsg = new String(
                                    DECODER.decodeBuffer(msg), StandardCharsets.UTF_8);
                            log.info(String.format(
                                    "consume SparkEventsTask decodeMsg:%s", decodeMsg));
                            return JsonUtils.deserialize(decodeMsg, EventsVo.class);
                        }
                    });
            writeSqlContext(sqlContext, events);
        });
        jssc.start();

        try {
            jssc.awaitTermination();
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        }
        log.info("succ to init SparkEventsTask!!!");
    }

    private static void writeSqlContext(SQLContext sqlContext, JavaRDD<EventsVo> javaRDD) {
        String fileName = generatorFileName();
        String pathName = PATH_NAME + "test1";
        sqlContext.createDataFrame(javaRDD, EventsVo.class)
                .repartition(1).write().mode(SaveMode.Append).parquet(pathName);
    }

    private static void patchTransfer(Iterator<ConsumerRecord<String, String>> crs) {
        log.info("=============start SparkEventsTask events========");
        if (crs == null || !crs.hasNext()) {
            log.info("crs(SparkEventsTask events) is null or empty!!!");
            return;
        }
        ParquetWriter<Group> writer = null;
        try {
            String fileName = generatorFileName();
            final String pathName = PATH_NAME + fileName;
            Configuration configuration = new Configuration();
            GroupWriteSupport.setSchema(EVENT_MESSAGE_TYPE, configuration);
            // 3. 写数据
            // 2. 声明parquetWriter
            Path path = new Path(pathName);

            writer = ExampleParquetWriter.builder(path)
                    .withWriteMode(ParquetFileWriter.Mode.CREATE)
                    .withCompressionCodec(CompressionCodecName.UNCOMPRESSED)
                    .withConf(configuration)
                    .build();

            BASE64Decoder decoder = new BASE64Decoder();
            while (crs.hasNext()) {
                ConsumerRecord<String, String> record = crs.next();
                String msg = CharMatcher.is('\"').trimFrom(record.value());
                log.info(String.format("consume SparkEventsTask msg:%s", msg));
                String decodeMsg = new String(decoder.decodeBuffer(msg), StandardCharsets.UTF_8);
                log.info(String.format("consume SparkEventsTask decodeMsg:%s", decodeMsg));

                Group group = msg2Group(decodeMsg, EVENT_MESSAGE_TYPE);
                writer.write(group);
            }
            //            System.currentTimeMillis() / (1000L * 60)
            // 上传文件
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        } finally {
            if (writer != null) {
                try {
                    writer.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
        log.info("============end SparkEventsTask events===============");
    }

    private static Group msg2Group(String msg, MessageType messageType) {
        try {
            EventsVo events = JsonUtils.deserialize(msg, EventsVo.class);
            Group group = new SimpleGroupFactory(messageType)
                    .newGroup().append("event_time",
                            events.getEventTime() == null ? "" : events.getEventTime())
                    .append("create_id",
                            events.getCreateId() == null ? "" : events.getCreateId())
                    .append("camp",
                            events.getCamp() == null ? "" : events.getCamp())
                    .append("app_id",
                            events.getAppId() == null ? "" : events.getAppId())
                    .append("ad_set_id",
                            events.getAdSetId() == null ? "" : events.getAdSetId())
                    .append("ch_id",
                            events.getChId() == null ? "" : events.getChId())
                    .append("user_id",
                            events.getUserId() == null ? "" : events.getUserId())
                    .append("offer_id",
                            events.getOfferId() == null ? "" : events.getOfferId())
                    .append("creative_id",
                            events.getCreativeId() == null ? "" : events.getCreativeId())
                    .append("event_type",
                            events.getEventType() == null ? "" : events.getEventType())
                    .append("geo",
                            events.getGeo() == null ? "" : events.getGeo())
                    .append("device",
                            events.getDevice() == null ? "" : events.getDevice())
                    .append("os_version",
                            events.getOsVersion() == null ? "" : events.getOsVersion())
                    .append("device_ip",
                            events.getDeviceIp() == null ? "" : events.getDeviceIp())
                    .append("ua",
                            events.getUa() == null ? "" : events.getUa())
                    .append("sdkImpId",
                            events.getSdkClickId() == null ? "" : events.getSdkClickId());
            return group;
        } catch (IOException e) {
            log.error("解析 msg 异常：exception={}, msd={}", e.getMessage(), msg);
        }
        return null;
    }

    private static String generatorFileName() {
        return "events" + System.currentTimeMillis();
    }

    private static void getStr() {
        StructType eventsSchema = new StructType()
                .add("event_time", DataTypes.TimestampType)
                .add("event_time_ts", LongType)
                .add("request_id", StringType)
                .add("app_id", StringType)
                .add("ad_set_id", StringType)
                .add("ch_id", StringType)
                .add("user_id", StringType)
                .add("offer_id", StringType)
                .add("creative_id", StringType)
                .add("event_id", StringType)
                .add("event_type", StringType)
                .add("geo", StringType)
                .add("device", StringType)
                .add("os_version", StringType)
                .add("device_ip", StringType)
                .add("ua", StringType)
                .add("placement_id", StringType);
    }

    static {
        EVENT_MESSAGE_TYPE =
                Types.buildMessage()
                        .required(PrimitiveType.PrimitiveTypeName.BINARY)
                        .as(OriginalType.UTF8).named("event_time")
                        .required(PrimitiveType.PrimitiveTypeName.BINARY)
                        .as(OriginalType.UTF8).named("create_id")
                        .required(PrimitiveType.PrimitiveTypeName.BINARY)
                        .as(OriginalType.UTF8).named("camp")
                        .required(PrimitiveType.PrimitiveTypeName.BINARY)
                        .as(OriginalType.UTF8).named("app_id")
                        .required(PrimitiveType.PrimitiveTypeName.BINARY)
                        .as(OriginalType.UTF8).named("ad_set_id")
                        .required(PrimitiveType.PrimitiveTypeName.BINARY)
                        .as(OriginalType.UTF8).named("ch_id")
                        .required(PrimitiveType.PrimitiveTypeName.BINARY)
                        .as(OriginalType.UTF8).named("user_id")
                        .required(PrimitiveType.PrimitiveTypeName.BINARY)
                        .as(OriginalType.UTF8).named("offer_id")
                        .required(PrimitiveType.PrimitiveTypeName.BINARY)
                        .as(OriginalType.UTF8).named("creative_id")
                        .required(PrimitiveType.PrimitiveTypeName.BINARY)
                        .as(OriginalType.UTF8).named("event_type")
                        .required(PrimitiveType.PrimitiveTypeName.BINARY)
                        .as(OriginalType.UTF8).named("geo")
                        .required(PrimitiveType.PrimitiveTypeName.BINARY)
                        .as(OriginalType.UTF8).named("device")
                        .required(PrimitiveType.PrimitiveTypeName.BINARY)
                        .as(OriginalType.UTF8).named("os_version")
                        .required(PrimitiveType.PrimitiveTypeName.BINARY)
                        .as(OriginalType.UTF8).named("device_ip")
                        .required(PrimitiveType.PrimitiveTypeName.BINARY)
                        .as(OriginalType.UTF8).named("ua")
                        .required(PrimitiveType.PrimitiveTypeName.BINARY)
                        .as(OriginalType.UTF8).named("sdkImpId")
                        .named("trigger");
    }


}
