package cn.com.zetatech.maple.writer.hive;

import cn.com.zetatech.maple.entity.ColumnDef;
import cn.com.zetatech.maple.entity.HiveWriter;
import cn.com.zetatech.maple.entity.Writer;
import cn.com.zetatech.maple.writer.WriterHandler;
import cn.com.zetatech.maple.writer.common.DataSourceHolder;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.time.DateFormatUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import javax.sql.DataSource;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.stream.Collectors;

@SuppressWarnings("all")
public class HiveWriterHandler implements WriterHandler {

    @Override
    public void write(Writer writer, List<ColumnDef> columnDefs, List<Map<String, String>> datas) {
        HiveWriter hiveWriter = (HiveWriter) writer;
        String fieldsTerminatedBy = hiveWriter.getFieldsTerminatedBy();

        List<String> schema = columnDefs.stream().map(ColumnDef::getName).collect(Collectors.toList());

        StringBuilder totallSbData = new StringBuilder();
        StringBuilder lineData = new StringBuilder();

        for (Map<String, String> line : datas) {
            for (String key : schema) {
                String fieldVal = line.get(key);
                if (fieldVal != null) {
                    lineData.append(line.get(key));
                }
                lineData.append(fieldsTerminatedBy);
            }
            totallSbData.append(lineData.deleteCharAt(lineData.lastIndexOf(fieldsTerminatedBy)));
            totallSbData.append("\n");
            lineData.delete(0, lineData.length());
        }

        /**
         * 生成上传到HDFS的目录
         */
        String path = generateHdfsPath(hiveWriter);

        /**
         * 上传到HDFS
         */
        try {
            saveToHDFS(hiveWriter, totallSbData, path);
            loadToHive(hiveWriter, path);
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                //无用成功与否都要删除hdfs上的文件
                deleteHdfsFile(hiveWriter, path);
            } catch (Exception e) {
            }
        }
    }


    /**
     * 上传到HDFS
     *
     * @param hiveWriter   配置参数
     * @param totallSbData 数据
     */
    public void saveToHDFS(HiveWriter hiveWriter, StringBuilder totallSbData, String path) throws IOException, InterruptedException {
        //获取HDFS存储路径
        String hdfsUrl = hiveWriter.getDefaultFS();
        String dataBase = hiveWriter.getDatabase();
        String outPutPath = path;
        Map<String, String> conf = hiveWriter.getConf();
        Configuration configuration = new Configuration();
        conf.forEach((Key, value) -> {
            configuration.set(Key, value);
        });
        URI uri = null;
        try {
            uri = new URI(hdfsUrl);
        } catch (URISyntaxException e) {
        }
        FileSystem fileSystem = FileSystem.get(uri, configuration, "hdfs");
        FSDataOutputStream out = null;
        try {
            if (!fileSystem.exists(new Path(outPutPath))) {
                out = fileSystem.create(new Path(outPutPath));
            } else {
                out = fileSystem.append(new Path(outPutPath));
            }
            out.write(totallSbData.toString().getBytes());
            out.flush();
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            IOUtils.closeQuietly(out);
        }
    }

    private void loadToHive(HiveWriter hiveWriter, String path) throws SQLException {
        String driveName = hiveWriter.getDriver();
        String url = hiveWriter.getUrl();
        String username = hiveWriter.getUsername();
        String password = hiveWriter.getPassword();
        String database = hiveWriter.getDatabase();
        String table = hiveWriter.getTable();

        // 分区
        List<HiveWriter.Partition> partitions = hiveWriter.getPartitions();
        StringBuilder sql = new StringBuilder();

//        StringBuilder partitonSB = new StringBuilder();
//        if (partitions != null && partitions.size() != 0) {
//            for (int i = 0; i < partitions.size(); i++) {
//                HiveWriter.Partition partition = partitions.get(i);
//                partitonSB = partitonSB.append(partition.getName() + "='" + partition.getValue() + "',");
//            }
//        }
//        if (partitonSB.toString().endsWith(",")) {
//            partitonSB = partitonSB.deleteCharAt(partitonSB.length() - 1);
//        }
//        if (path.contains("local")) {
//            sql.append("load data local inpath ");
//            sql.append(path);
//            sql.append(" into table ");
//            sql.append(database + "." + table);
//            if (partitonSB.length() > 0) {
//                sql.append("  partition " + "(" + partitonSB + "')");
//            }
//        } else {
//            sql.append("load data inpath ");
//            sql.append("'" + path + "'");
//            sql.append(" into table ");
//            sql.append(database + "." + table);
//            if (partitonSB.length() > 0) {
//                sql.append("  partition " + "(" + partitonSB + ")");
//            }
//        }

        sql.append(String.format("load data %s inpath '%s' into table %s.%s ", path.contains("local") ? "local" : "", path, database, table));
        if (CollectionUtils.isNotEmpty(partitions)) {
            sql.append("partition (");
            Iterator<HiveWriter.Partition> it = partitions.iterator();
            if (it.hasNext()) {
                HiveWriter.Partition partition = it.next();
                sql.append(String.format("%s='%s'", partition.getName(), partition.getValue()));
                while (it.hasNext()) {
                    partition = it.next();
                    sql.append(",");
                    sql.append(String.format("%s='%s'", partition.getName(), partition.getValue()));
                }
            }
            sql.append(")");
        }
        System.out.println(sql.toString());
        DataSource ds = DataSourceHolder.me().getDataSource(url, driveName, username, password);
        Connection conn = null;
        Statement stmt = null;
        try {
            conn = ds.getConnection();
            stmt = conn.createStatement();
            stmt.execute(sql.toString());
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                stmt.close();
                conn.close();
            } catch (Exception e) {
            }
        }
    }

    /**
     * 生成上传到 HDFS路径
     *
     * @param hiveWriter
     * @return
     */
    private String generateHdfsPath(HiveWriter hiveWriter) {
        String defaultFS = hiveWriter.getDefaultFS();
        String database = hiveWriter.getDatabase();
        String table = hiveWriter.getTable();
        String storeBasePath = hiveWriter.getStoreBasePath();// eg: /tmp/hive/
        String today = DateFormatUtils.format(new Date(), "yyyyMMdd");
        String uuid = UUID.randomUUID().toString().replace("-", "");
        if (defaultFS.endsWith("/")) {
            defaultFS = defaultFS.substring(0, defaultFS.length() - 1);
        }
        if (storeBasePath.endsWith("/")) {
            storeBasePath = storeBasePath.substring(0, storeBasePath.length() - 1);
        }
        // eg: hdfs://10.10.10.205:8020/tmp/hive/bdp_test/20190601_uuid.txt
        return String.format("%s/%s/%s_%s/%s_%s.txt", defaultFS, storeBasePath, database, table, today, uuid);
    }

    private void deleteHdfsFile(HiveWriter hiveWriter, String path) throws IOException, InterruptedException {
        String hdfsUrl = hiveWriter.getDefaultFS();
        Configuration configuration = new Configuration();
        Map<String, String> conf = hiveWriter.getConf();
        conf.forEach((Key, value) -> {
            configuration.set(Key, value);
        });
        URI uri = null;
        try {
            uri = new URI(hdfsUrl);
            FileSystem fileSystem = FileSystem.get(uri, configuration, "hdfs");
            fileSystem.delete(new Path(path));
        } catch (Exception e) {
        }
    }

}