package com.epic;

import com.alibaba.fastjson.JSONObject;
import java.io.IOException;
import java.time.Duration;
import java.util.Arrays;
import java.util.HashMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

public class Job {
    public static final Log logger = LogFactory.getLog(Job.class);

    public static boolean RenameFile(Configuration conf, String srcFile, String dstFile) {
        try {
            FileSystem src = FileSystem.get(conf);
            Path srcpath = new Path(srcFile);
            Path dstpath = new Path(dstFile);
            return FileUtil.copy(src, srcpath, src, dstpath, true, conf);
        } catch (IOException e) {
            e.printStackTrace();
            return false;
        }
    }

    public static void main(String[] args) {
        HashMap<String, String> paritions = new HashMap<>();
        String brokerUrl = "esp1:9092,esp2:9092,esp3:9092";
        String topic = "hive";
        KafkaConsumer<String, String> kafkaConsumer = KafkaConsumerUtils.getConsumer(brokerUrl, "hive");
        kafkaConsumer.subscribe(Arrays.asList(new String[] { topic }));
        HiveConf hiveConf = new HiveConf();
        hiveConf.addResource(new Path("/opt/xxx/yyy/deps/hive/conf/hive-site.xml"));
        HiveMetaStoreClient client = null;
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://esp1:9820");
        String srcPath = "";
        String dstPath = "";
        String warehouse = "/user/hive/warehouse/epic.db/";
        String partName = "";
        try {
            client = new HiveMetaStoreClient((Configuration)hiveConf);
            try {
                while (true) {
                    try {
                        logger.info("start ....");
                        ConsumerRecords<String, String> records = kafkaConsumer.poll(Duration.ofSeconds(10L));
                        long startTime = System.currentTimeMillis();
                        String Data = "";
                        for (ConsumerRecord<String, String> record : records) {
                            try {
                                Data = (String)record.value();
                                JSONObject jb = JSONObject.parseObject(Data);
                                srcPath = jb.get("filepath").toString();
                                String table = jb.get("table").toString();
                                String ns_date = jb.get("ns_date").toString();
                                String ns_hour = jb.get("ns_hour").toString();
                                String name = srcPath.split("/")[2];
                                dstPath = warehouse + table + "/ns_date=" + ns_date + "/ns_hour=" + ns_hour + "/" + name;
                                partName = "ns_date=" + ns_date + "/ns_hour=" + ns_hour;
                                logger.info("load " + dstPath);
                                RenameFile(conf, srcPath, dstPath);
                                if (!paritions.containsKey(table + partName)) {
                                    client.appendPartition("epic", table, partName);
                                    paritions.put(table + partName, "exist");
                                    if (paritions.size() == 20)
                                        paritions.clear();
                                    continue;
                                }
                                logger.warn("cur partitons exist : " + partName);
                            } catch (Exception e) {
                                logger.warn("KafkaConsumers one data error ", e);
                            }
                        }
                        long endTime = System.currentTimeMillis();
                        logger.info("log load speeds : " + (endTime - startTime));
                        logger.info("end ....\n");
                    } catch (Exception e) {
                        logger.error("KafkaConsumers error msg", e);
                    }
                }
            } catch (Exception e) {
                logger.error(e.getMessage());
                client.close();
            }
        } catch (MetaException e) {
            logger.error(e.getMessage());
        }
        client.close();
    }
}

