package com.zetatech.bdp.writer.hive;

import com.zetatech.bdp.entity.ColumnDef;
import com.zetatech.bdp.entity.HiveWriter;
import com.zetatech.bdp.entity.Writer;
import com.zetatech.bdp.writer.WriterHandler;
import com.zetatech.bdp.writer.hive.utils.DBUtils;
import lombok.val;
import org.apache.commons.lang.time.FastDateFormat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import javax.sql.DataSource;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.sql.*;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;

import static com.zetatech.bdp.writer.hive.utils.ConstantClassField.*;

public class HiveWriterHandler implements WriterHandler {


    @Override
    public void write(Writer writer, List<ColumnDef> columnDefs, List<Map<String, String>> datas) {

        HiveWriter hiveWriter = (HiveWriter) writer;
        //schema(所有)
        ArrayList<String> listSchames = new ArrayList<>();

        for (ColumnDef colum : columnDefs) {
            listSchames.add(colum.getName());
        }

        StringBuilder totallSbData = new StringBuilder();
        StringBuilder lineData = new StringBuilder();
        //hive表分隔符
        String fieldelim = hiveWriter.getConf().get("fieldelim");
        if (fieldelim == null || "".equals(fieldelim)) {
            fieldelim =DEFAULT_HIVE_FIELD_DELIM;
        }
        for (Map<String, String> line : datas) {

            for (String key : listSchames) {

                try {
                    lineData.append(line.get(key)).append(fieldelim);
                } catch (Exception e) {
                    System.out.println(e);
                }
            }
            //处理最后一条数据
            totallSbData.append(lineData.deleteCharAt(lineData.lastIndexOf(fieldelim)));
            totallSbData.append("\n");
            lineData.delete(0, lineData.length());
        }


        Long startHDSFTime = System.currentTimeMillis();

        /**
         * 生成上传到HDFS的目录
         */
        String path = generateHdfsPath(hiveWriter);

        /**
         * 上传到HDFS
         */
        try {
            saveToHDFS(hiveWriter, totallSbData, path);
        } catch (IOException e) {
            e.printStackTrace();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
        Long endHDFSTime = System.currentTimeMillis();
        Long hdfsTime = (endHDFSTime - startHDSFTime) / 1000;

        System.out.println(("Save TO HDFS consuming：" + hdfsTime + "s"));


        Long startHiveTime = System.currentTimeMillis();

        /**
         * load 到Hive
         */
        try {
            loadToHive(hiveWriter, path);
        } catch (SQLException e) {
            e.printStackTrace();
        }
        Long endHiveTime = System.currentTimeMillis();
        Long hiveTime = (endHiveTime - startHiveTime) / 1000;
        System.out.println("Load TO Hive consuming：" + hiveTime + "s");
        Long totalTime = (endHiveTime - startHDSFTime) / 1000;
        System.out.println("Total consuming：" + totalTime + "s");
    }

    /**
     * 上传到HDFS
     *
     * @param hiveWriter   配置参数
     * @param totallSbData 数据
     */
    public static void saveToHDFS(HiveWriter hiveWriter, StringBuilder totallSbData, String path) throws IOException, InterruptedException {
        //获取HDFS存储路径
        String hdfsUrl = hiveWriter.getDefaultFS();
        // String dataBase = hiveWriter.getDatabase();
        String outPutPath = path;

        Configuration configuration = new Configuration();
        configuration.set("dfs.replication", "1");
        URI uri = null;
        try {
            uri = new URI(hdfsUrl);
        } catch (URISyntaxException e) {

        }
        FileSystem fileSystem = FileSystem.get(uri, configuration, "hadoop");
        FSDataOutputStream out = null;
        //write to hdfs
        try {
            if (!fileSystem.exists(new Path(outPutPath))) {
                out = fileSystem.create(new Path(outPutPath));
            } else {
                out = fileSystem.append(new Path(outPutPath));
            }
            out.write(totallSbData.toString().getBytes());
        } catch (Exception e) {
            e.printStackTrace();
        }
        out.flush();
        out.close();
    }

    /**
     *
     */
    private void loadToHive(HiveWriter hiveWriter, String path) throws SQLException {

        Map<String, String> conf = hiveWriter.getConf();
        String driveName = "org.apache.hive.jdbc.HiveDriver";
        String hiveJdbcUrl = conf.get("hiveJdbcUrl");
        String dataBase = hiveWriter.getDatabase();
        List<String> partitions = hiveWriter.getPartitions();
        String table = hiveWriter.getTable();
        String user = conf.get("user");
        String pwd = conf.get("password");


        // Class.forName(driveName)
        //TODO 修改连接池
        DataSource ds = DBUtils.me().getDataSource(hiveJdbcUrl, driveName, user, pwd);
        Connection conn = null;
        try {
            conn = ds.getConnection();
        } catch (SQLException e) {
            e.printStackTrace();
            conn.rollback();
        }
        Statement stmt = null;
        try {
            stmt = conn.createStatement();
        } catch (SQLException e) {
            e.printStackTrace();
        }

        if (dataBase == null || "".equals(dataBase)) {
            dataBase = DEFAULT_HIVE_DATABASE;
        }

        if (table == null || "".equals(table)) {
            table = DEFAULT_HIVE_TABLE;
        }
        //TODO ..
//        if (partitions!=null&&partitions.size()>0) {
//            table = DEFAULT_HIVE_TABLE;
//        }
        String userDefPath = hiveWriter.getConf().get("path");
        StringBuilder sql = new StringBuilder();
        if (userDefPath != null && !"".equals(userDefPath)) {
            sql.append("load data local inpath ");
            sql.append(userDefPath);
            sql.append(" into table ");
            sql.append(dataBase + "." + table);

        } else {
            sql.append("load data inpath ");
            sql.append("'" + path + "'");
            sql.append(" into table ");
            sql.append(dataBase + "." + table);
        }


        try {
            stmt.execute(sql.toString());
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                stmt.close();
                conn.close();
            } catch (Exception e) {
            }
        }


    }

    /**
     * 生成上传到 HDFS路径
     *
     * @param hiveWriter
     * @return
     */
    String generateHdfsPath(HiveWriter hiveWriter) {

        //路径格式 path:=/root/database/table/tmp/yyyyMMddHHmmSS.txt/parquet
        Map<String, String> conf = hiveWriter.getConf();
        // val driveName = conf.get("driverName")
        String hiveJdbcUrl = conf.get("hiveJdbcUrl");
        String dataBase = conf.get("dataBase");
        if (dataBase == null || "".equals(conf.get(dataBase))) {
            dataBase = "default";
        }
        String table = conf.get("table");
        if (table == null || "".equals(conf.get(table))) {
            table = "test";
        }

        String storeType = hiveWriter.getStore();
        if (storeType == null || "".equals(storeType)) {
            storeType = DEFAULT_HIVE_STORE_TYPE;
        }

        String path = hiveWriter.getConf().get("path"); //hdfs路径
        if (path == null || "".equals(path)) {

            path = "/root" + "/" + dataBase + "/" + table + "/" + "tmp/";
        }
        //生成 date :yyyyMMddHHmmss
        Long sysTime = System.currentTimeMillis();
        Date date = new Date(sysTime);
        FastDateFormat TARGET_TIME_FORMAT = FastDateFormat.getInstance("yyyyMMddHHmmss");
        String formatTime = TARGET_TIME_FORMAT.format(date);
        if (path.endsWith("/")) {
            path = path + formatTime + "." + storeType;
        } else {
            path = path + "/" + formatTime + "." + storeType;

        }
        return path;
    }
}