package com.htiiot.action;

import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.JSONObject;
import com.htiiot.base.EndHook;
import com.htiiot.bean.LogBean;
import com.htiiot.config.Configs;
import com.htiiot.tools.Cmd;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.log4j.Logger;
import java.io.IOException;
import java.net.URI;
import java.text.SimpleDateFormat;
import java.util.*;

public class Kafka2Hdfs {
    private static Logger log = Logger.getLogger(Kafka2Hdfs.class);
    private static FileSystem fs = null;

    public static void main(String[] args) throws Exception {
        Cmd.parseCmd(args);
        System.setProperty("HADOOP_USER_NAME", "hdfs");
        log.info("\r\n" + Configs.getInfo());
        Properties props = new Properties();
        props.put("bootstrap.servers", Configs.KafkaConfig.BOOTSTRAP);
        props.put("group.id", Configs.KafkaConfig.GROUPID);
        props.put("enable.auto.commit", "true");
        props.put("session.timeout.ms", "30000");
        props.put("auto.offset.reset", "earliest");
        props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
        List<String> l = new ArrayList<String>();
        l.add(Configs.KafkaConfig.TOPIC);
        consumer.subscribe(l);
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
        Date dateToday = new Date();
        Date start = new Date(sdf.parse(sdf.format(dateToday)).getTime() + 10 * 60 * 1000);
        String rootDir = Configs.HdfsConfig.HDFS_ROOT_PATH;
        FSDataOutputStream todayS = null;
        FSDataOutputStream yesterdayS = null;
        Runtime.getRuntime().addShutdownHook(new Thread(new EndHook(todayS,yesterdayS)));
        try {
            String fileName = "";
            long todayZero = 0l;
            Configuration conf = new Configuration();
            conf.setBoolean("dfs.support.append", true);
            conf.set("dfs.client.block.write.replace-datanode-on-failure.policy", "NEVER");
            conf.setBoolean("dfs.client.block.write.replace-datanode-on-failure.enable", true);
            fs = FileSystem.get(new URI(Configs.HdfsConfig.HDFS_PATH), conf);
            while (true) {
                String today = sdf.format(new Date());
                if (!today.equals(fileName)) {
                    if (todayS != null) {
                        todayS.close();
                    }
                    fileName = rootDir + "/" + today + "/log.txt";
                    Path p = new Path(fileName);
                    createFile(p);
                    todayS = fs.append(p);
                    if (yesterdayS != null) {
                        yesterdayS.close();
                    }
                    todayZero = sdf.parse(today).getTime();
                }
                ConsumerRecords<String, String> records = consumer.poll(1000);
                for (ConsumerRecord<String, String> r : records) {
                    String value = new String((r.value() + "\r\n").getBytes(), "utf-8");
                    try {
                        LogBean b = JSONObject.parseObject(value, LogBean.class);
                        if (b.getCreateTime() < todayZero&&b.getCreateTime() != 0) {
                            log.info("This record create time before today\r\n" + value);
                            try {
                                yesterdayS.write(value.getBytes());
                            } catch (Exception e) {
                                log.warn(e);
                                log.warn("Yesterday OutStream has been closed or null,now try to create new stream to write...");
                                String fileYes = rootDir + "/" + sdf.format(new Date(dateToday.getTime() - 86400000)) + "/log.txt";
                                Path yesPath = new Path(fileYes);
                                createFile(yesPath);
                                yesterdayS = fs.append(yesPath);
                                yesterdayS.write(value.getBytes());
                                delayClose(yesterdayS);
                            }
                        } else {
                            todayS.write(value.getBytes());
                        }
                    }catch (JSONException e){
                        log.error("JSON Parse Exception.The JSON record is:"+r,e);
                    }
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
            log.error(e, e);
        } finally {
            if (consumer != null) {
                consumer.close();
            }
            if (todayS != null) {
                todayS.close();
            }
            if (yesterdayS != null) {
                yesterdayS.close();
            }
        }
    }

    public static void createFile(Path p) throws IOException {
        if (!fs.exists(p)) {
            fs.createNewFile(p);
        }
    }

    public static void delayClose(FSDataOutputStream fos) {
        new Timer().schedule(new TimerTask() {
            @Override
            public void run() {
                try {
                    fos.close();
                } catch (IOException e) {
                    e.printStackTrace();
                    log.error(e,e);
                }
            }
        }, 5 * 60 * 1000);
    }

}
