package cn.texous.demo.dsj.stream;

import cn.texous.demo.dsj.util.JsonUtils;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3Client;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.CharMatcher;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.parquet.example.data.Group;
import org.apache.parquet.example.data.simple.SimpleGroupFactory;
import org.apache.parquet.hadoop.ParquetFileWriter;
import org.apache.parquet.hadoop.ParquetWriter;
import org.apache.parquet.hadoop.example.ExampleParquetWriter;
import org.apache.parquet.hadoop.example.GroupWriteSupport;
import org.apache.parquet.hadoop.metadata.CompressionCodecName;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.OriginalType;
import org.apache.parquet.schema.PrimitiveType;
import org.apache.parquet.schema.Types;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;

import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Base64;
import java.util.Properties;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Function;

/**
 * insert description here
 *
 * @author Showa.L
 * @since 2019/9/18 16:36
 */
public class KafkaToAwsSingle {

    private static final MessageType EVENT_MESSAGE_TYPE;
    // temp pull data path
    private static final String PATH_NAME = "/tmp/parquet/";
    //    private static final BASE64Decoder DECODER = new BASE64Decoder();

    // key to ParquetWriter map
    private static final ConcurrentHashMap<String, ParquetWriter<Group>>
        PARQUET_WRITER_CONCURRENT_HASH_MAP = new ConcurrentHashMap<>();
    private static final long ONE_MINUTE_MILLIS = 1 * 60 * 1000L;
    private static final long HOUR_TIME_MILLIS = 1 * 60 * 60 * 1000L;
    // pull data from kafka switch
    // NOTE: Used to stop program
    private static final AtomicBoolean PULL_DATA = new AtomicBoolean(true);
    // generator ParquetWriter timer task
    private static Timer GENERATOR_PARQUETWRITE_TIMER = null;
    // upload file timer task
    private static Timer UPLOAD_FILE_TIMER = null;

    private static AmazonS3 s3;
    // 配置 aws s3
    private static AWSConfig clientConfig = new AWSConfig();

    public static void main(String[] args) throws Exception {
        start();
    }

    private static final void start() throws Exception {
        // upload old file to s3
        startUploadOldFile();
        // generator parquetWriter time task
        startGeneratorParquetWriterTask();
        // init first and second parquetWriter
        startInitFirstAndSecondParquetWriter();
        // start pull data from kafka and upload to s3
        // NOTE: The program will block in this method
        startPullKafkaDataAndUploadToS3();
    }

    private static void startUploadOldFile() {
        listFilesAndOperatorAfterDelete(KafkaToAwsSingle::uploadFileToS3);
    }

    private static final void startGeneratorParquetWriterTask() {
        TimerTask timerTask = new GeneratorParquetWriterTimerTask();
        GENERATOR_PARQUETWRITE_TIMER = new Timer(true);
        GENERATOR_PARQUETWRITE_TIMER.schedule(timerTask, 10000, 10000L);
    }

    private static final void startInitFirstAndSecondParquetWriter() throws Exception {
        long currentTime = System.currentTimeMillis();
        String firstKey = String.valueOf(getKey(currentTime));
        String nextKey = String.valueOf(getNextKey(currentTime));
        String firstFileName = getFileName(firstKey);
        String nextFileName = getFileName(nextKey);
        ParquetWriter<Group> firstWriter = generatorParquetWriter(firstFileName);
        ParquetWriter<Group> nextWriter = generatorParquetWriter(nextFileName);

        PARQUET_WRITER_CONCURRENT_HASH_MAP.put(firstKey, firstWriter);
        PARQUET_WRITER_CONCURRENT_HASH_MAP.put(nextKey, nextWriter);
    }

    private static void startPullKafkaDataAndUploadToS3() throws IOException {
        // start upload file task
        startUploadFileTask();

        Properties properties = new Properties();
        properties.put("bootstrap.servers", "192.168.0.68:9092");
        properties.put("group.id", "kafka-to-s3");
        properties.put("enable.auto.commit", "true");
        properties.put("auto.commit.interval.ms", "1000");
        properties.put("auto.offset.reset", "earliest");
        properties.put("session.timeout.ms", "30000");
        properties.put("key.deserializer",
                "org.apache.kafka.common.serialization.StringDeserializer");
        properties.put("value.deserializer",
                "org.apache.kafka.common.serialization.StringDeserializer");

        KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties);
        kafkaConsumer.subscribe(Arrays.asList("demo-kafka-connect"));
        while (PULL_DATA.get()) {
            ConsumerRecords<String, String> records = kafkaConsumer.poll(100);
            String key = String.valueOf(getKey(System.currentTimeMillis()));
            for (ConsumerRecord<String, String> record : records) {
                String msg = CharMatcher.is('\"').trimFrom(record.value());
                String decodeMsg = new String(
                        Base64.getDecoder().decode(msg), StandardCharsets.UTF_8);
                //                System.out.println(decodeMsg);
                writerDataToParquetFile(key, decodeMsg);
            }
        }
        // stop timer task
        stopGeneratorParquetWriterTask();
        // stop upload file task
        stopUploadFileTask();
        // close parquet writer
        PARQUET_WRITER_CONCURRENT_HASH_MAP.forEach((k, v) -> {
            try {
                v.close();
                PARQUET_WRITER_CONCURRENT_HASH_MAP.remove(k);
            } catch (IOException e) {
                e.printStackTrace();
            }
        });
        // upload another file
        startUploadOldFile();
    }

    private static final void startUploadFileTask() {
        TimerTask timerTask = new UploadFileTimerTask();
        UPLOAD_FILE_TIMER = new Timer(true);
        UPLOAD_FILE_TIMER.schedule(timerTask, 0, 10000L);
    }

    private static void stopGeneratorParquetWriterTask() {
        GENERATOR_PARQUETWRITE_TIMER.cancel();
    }

    private static final void stopUploadFileTask() {
        UPLOAD_FILE_TIMER.cancel();
    }

    private static void writerDataToParquetFile(String key, String msg) throws IOException {
        ParquetWriter<Group> parquetWriter = PARQUET_WRITER_CONCURRENT_HASH_MAP.get(key);
        Group group = msg2Group(msg, EVENT_MESSAGE_TYPE);
        parquetWriter.write(group);
    }

    private static ParquetWriter<Group> getParquetWriter(long timeMillis) {
        String key = String.valueOf(getKey(timeMillis));
        return PARQUET_WRITER_CONCURRENT_HASH_MAP.get(key);
    }

    private static ParquetWriter<Group> generatorParquetWriter(String fileName) throws IOException {
        final String pathName = PATH_NAME + fileName;
        Configuration configuration = new Configuration();
        GroupWriteSupport.setSchema(EVENT_MESSAGE_TYPE, configuration);
        // 3. 写数据
        // 2. 声明parquetWriter
        Path path = new Path(pathName);

        return ExampleParquetWriter.builder(path)
            .withWriteMode(ParquetFileWriter.Mode.CREATE)
            .withCompressionCodec(CompressionCodecName.UNCOMPRESSED)
            .withConf(configuration)
            .build();
    }

    private static void listFilesAndOperatorAfterDelete(Function<File, Boolean> operation) {
        File fileDir = new File(PATH_NAME);
        if (null != fileDir && fileDir.isDirectory()) {
            File[] files = fileDir.listFiles();

            if (null != files) {
                for (int i = 0; i < files.length; i++) {
                    File file = files[i];
                    // 如果是文件夹 继续读取
                    if (file != null && file.isFile()) {
                        Boolean delete = operation.apply(file);
                        if (delete != null && delete)
                            file.delete();
                    }
                }
            }
        }
    }

    private static boolean uploadFileToS3(File file) {
        boolean success = true;
        try {
            if (file.length() > 0
                    && file.getName() != null
                    && file.getName().endsWith(".parquet")) {
                String filename = file.getName();
                String[] sfn = filename.split("-");
                if (sfn.length > 1) {
                    String key = sfn[1];
                    if (!PARQUET_WRITER_CONCURRENT_HASH_MAP.containsKey(key)) {
                        long timeMillis = getTimeMillis(key);
                        String filePrefix = getPartionPrefix(timeMillis);
                        uploadFile(clientConfig.getBucketName(), file, filePrefix + filename);
                    }
                }
            } else {
                // Do not do anything
            }
        } catch (Exception e) {
            e.printStackTrace();
            success = false;
        }
        return success;
    }

    private static long getKey(long timeMillis) {
        return timeMillis / ONE_MINUTE_MILLIS;
    }

    private static long getTimeMillis(String key) {
        return Long.valueOf(key) * ONE_MINUTE_MILLIS;
    }

    private static long getNextKey(long timeMillis) {
        return getKey(timeMillis) + 1;
    }

    private static long getPreKey(long timeMillis) {
        return getKey(timeMillis) - 1;
    }

    static class GeneratorParquetWriterTimerTask extends TimerTask {

        @Override
        public void run() {
            try {
                System.out.println("timer");
                long currentTimeMillis = System.currentTimeMillis();
                String nextKey = String.valueOf(getNextKey(currentTimeMillis));
                String preKey = String.valueOf(getPreKey(currentTimeMillis));
                String fileName = getFileName(nextKey);
                if (!PARQUET_WRITER_CONCURRENT_HASH_MAP.containsKey(nextKey)) {
                    ParquetWriter<Group> parquetWriter = generatorParquetWriter(fileName);
                    PARQUET_WRITER_CONCURRENT_HASH_MAP.put(nextKey, parquetWriter);
                }
                if (PARQUET_WRITER_CONCURRENT_HASH_MAP.containsKey(preKey)) {
                    ParquetWriter<Group> preParquetWriter
                            = PARQUET_WRITER_CONCURRENT_HASH_MAP.remove(preKey);
                    preParquetWriter.close();
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }

    }

    static class UploadFileTimerTask extends TimerTask {
        @Override
        public void run() {
            try {
                System.out.println("upload file start");
                listFilesAndOperatorAfterDelete(KafkaToAwsSingle::uploadFileToS3);
            } catch (Exception e) {
                e.printStackTrace();
            }
        }

    }


    private static boolean uploadFile(String bucketName, File file, String filename) {
        System.out.println("uploadFile " + filename);
        s3.putObject(bucketName, filename, file);
        System.out.println("uploadFile " + filename + " succeed!");
        return true;
    }

    /***/
    public static String encodeTimestamp(long partitionDurationMs,
                                         String pathFormat,
                                         String timeZoneString,
                                         long timestamp) {
        DateTimeZone timeZone = DateTimeZone.forID(timeZoneString);
        DateTimeFormatter formatter = DateTimeFormat.forPattern(pathFormat).withZone(timeZone);
        DateTime partition = new DateTime(getPartition(partitionDurationMs, timestamp, timeZone));
        return partition.toString(formatter);
    }

    private static long getPartition(long timeGranularityMs,
                                     long timestamp,
                                     DateTimeZone timeZone) {
        long adjustedTimeStamp = timeZone.convertUTCToLocal(timestamp);
        long partitionedTime = (adjustedTimeStamp / timeGranularityMs) * timeGranularityMs;
        return timeZone.convertLocalToUTC(partitionedTime, false);
    }

    private static String getPartionPrefix(long timeMillis) {
        long partionDuration = 1 * 60 * 60 * 1000L;
        String pathPattern = "'year='yyyy/'month='MM/'day='dd/'hour='HH/";
        String dateTimeZoneString = "Asia/Shanghai";
        return encodeTimestamp(partionDuration, pathPattern, dateTimeZoneString, timeMillis);
    }

    //    public static void main(String[] args) {
    //        System.out.println(encodeTimestamp(60000,
    //        "'year='yyyy/'month='MM/'day='dd/'hour='HH/mm/ss",
    //        TimeZone.getTimeZone("+8").getID(), System.currentTimeMillis()));
    //    }

    private static Group msg2Group(String msg, MessageType messageType) {
        try {
            EventsVo events = JsonUtils.deserialize(msg, EventsVo.class);
            Group group = new SimpleGroupFactory(messageType).newGroup();
            group.append("event_time", events.getEventTime() == null ? "" : events.getEventTime())
                .append("create_id", events.getCreateId() == null ? "" : events.getCreateId())
                .append("camp", events.getCamp() == null ? "" : events.getCamp())
                .append("app_id", events.getAppId() == null ? "" : events.getAppId())
                .append("ad_set_id", events.getAdSetId() == null ? "" : events.getAdSetId())
                .append("ch_id", events.getChId() == null ? "" : events.getChId())
                .append("user_id", events.getUserId() == null ? "" : events.getUserId())
                .append("offer_id", events.getOfferId() == null ? "" : events.getOfferId())
                .append("creative_id", events.getCreativeId() == null ? "" : events.getCreativeId())
                .append("event_type", events.getEventType() == null ? "" : events.getEventType())
                .append("geo", events.getGeo() == null ? "" : events.getGeo())
                .append("device", events.getDevice() == null ? "" : events.getDevice())
                .append("os_version", events.getOsVersion() == null ? "" : events.getOsVersion())
                .append("device_ip", events.getDeviceIp() == null ? "" : events.getDeviceIp())
                .append("ua", events.getUa() == null ? "" : events.getUa())
                .append("sdkImpId", events.getSdkClickId() == null ? "" : events.getSdkClickId());
            return group;
        } catch (IOException expected) {
            //            log.error("解析 msg 异常：exception={}, msd={}", e.getMessage(), msg);
        }
        return null;
    }

    private static String getFileName(String key) {
        long time = System.currentTimeMillis();
        long rund = time % 100;
        return "events-" + key + "-" + rund + ".parquet";
    }

    static {
        EVENT_MESSAGE_TYPE =
            Types.buildMessage()
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                    .as(OriginalType.UTF8).named("event_time")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                    .as(OriginalType.UTF8).named("create_id")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                    .as(OriginalType.UTF8).named("camp")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                    .as(OriginalType.UTF8).named("app_id")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                    .as(OriginalType.UTF8).named("ad_set_id")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                    .as(OriginalType.UTF8).named("ch_id")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                    .as(OriginalType.UTF8).named("user_id")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                    .as(OriginalType.UTF8).named("offer_id")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                    .as(OriginalType.UTF8).named("creative_id")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                    .as(OriginalType.UTF8).named("event_type")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                    .as(OriginalType.UTF8).named("geo")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                    .as(OriginalType.UTF8).named("device")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                    .as(OriginalType.UTF8).named("os_version")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                    .as(OriginalType.UTF8).named("device_ip")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                    .as(OriginalType.UTF8).named("ua")
                .required(PrimitiveType.PrimitiveTypeName.BINARY)
                    .as(OriginalType.UTF8).named("sdkImpId")
                .named("trigger");
        AWSCredentials credentials = new BasicAWSCredentials(
                clientConfig.getAccessKeyId(), clientConfig.getSecretAccessKey());
        s3 = new AmazonS3Client(credentials);
    }

    @Data
    @NoArgsConstructor
    @AllArgsConstructor
    private static class AWSConfig {
        private String secretAccessKey;
        private String accessKeyId;
        private String bucketName;
    }

    @Data
    private static class EventsVo {
        @JsonProperty("event_time")
        private String eventTime;
        @JsonProperty("create_id")
        private String createId;
        @JsonProperty("app_id")
        private String appId;
        @JsonProperty("ad_set_id")
        private String adSetId;
        @JsonProperty("ch_id")
        private String chId;
        @JsonProperty("user_id")
        private String userId;
        @JsonProperty("offer_id")
        private String offerId;
        @JsonProperty("creative_id")
        private String creativeId;
        @JsonProperty("event_id")
        private String eventId;
        @JsonProperty("event_type")
        private String eventType;
        @JsonProperty("geo")
        private String geo;
        @JsonProperty("device")
        private String device;
        @JsonProperty("os_version")
        private String osVersion;
        @JsonProperty("device_ip")
        private String deviceIp;
        @JsonProperty("ua")
        private String ua;
        @JsonProperty("camp")
        private String camp;
        @JsonProperty("sdk_click_id")
        private String sdkClickId;
        @JsonProperty("sdk_imp_id")
        private String sdkImpId;
    }

}
