package com.sg.java.apps;

import cn.hutool.core.util.NumberUtil;
import com.sg.java.TimeSharing;
import com.sg.java.YHVolt;
import com.sg.java.entity.CMS_VOLT_CURVE;
import com.sg.java.entity.TimeShare;
import com.sg.java.entity.VoltU1_96;
import com.sg.java.entity.VoltageQualifiedVo;
import com.sg.java.util.CsvUtils;
import com.sg.java.util.ReadUtils;
import com.sg.java.util.SqlUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.commons.lang3.tuple.Triple;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.rdd.RDD;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.*;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.nio.file.attribute.PosixFilePermissions;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import java.util.stream.Stream;

import static com.sg.java.YHVolt4.sparkReadProperties;
import static com.sg.java.apps.GetDataFromHBase2.*;
import static com.sg.java.util.HdfsUtils.fs;
import static com.sg.java.util.HdfsUtils.hdfsPathPrefix;

public class ReadHdfsFile implements Serializable {

    public static final Logger log = LoggerFactory.getLogger(ReadHdfsFile.class);

    public static final int insertBatchSize = 50000;

    //数值正则，只匹配数字和小数点
    public static final String double_regex = "^(([0])|([1-9]+[0-9]*.{1}[0-9]+)|([0].{1}[1-9]+[0-9]*)|([1-9][0-9]*)|([0][.][0-9]+[1-9]+))$";

    public static String[] pbHeaders = new String[]{"cdId", "pbId", "cons_id", "dydj", "tran_name"};
    public static HashMap<String, Triple<String, String, String>> pbgxMap;

    public static String pg_volt_curve_pb = "pms3_qkjdy.gddy_amr_volt_curve_pb";
    public static String pg_cur_curve = "pms3_qkjdy.gddy_amr_cur_curve";
    public static String pg_power_curve = "pms3_qkjdy.gddy_amr_power_curve";

    public static Properties pgProp = SqlUtils.cqPgProp;

//    public static String pbFilePath = System.getProperty("user.dir") + File.separator + "conf" + File.separator;

    public static String pbFilePath = "/qkjdy/datax/bin/conf/";

    public static String writeFilePath = "/home/textData";

    static {
        System.out.println("默认文件系统:" + FileSystems.getDefault());
    }

    public static void initStaticVariables(String type) {
        switch (type) {
            case "file":
                pg_volt_curve_pb = "pms3_qkjdy.gddy_amr_volt_curve_pb";
                pg_cur_curve = "pms3_qkjdy.gddy_amr_cur_curve";
                pg_power_curve = "pms3_qkjdy.gddy_amr_power_curve";
                pbFilePath = "/home/jenkins_home/jenkins_home/datax_cq/datax/bin/conf/";
                pgProp = SqlUtils.homePgProp;
//                writeFilePath = "";//todo
                break;
            case "spark":
            case "hdfs":
                pg_volt_curve_pb = "pms3_qkjdy.gddy_amr_volt_curve_pb";
                pg_cur_curve = "pms3_qkjdy.gddy_amr_cur_curve";
                pg_power_curve = "pms3_qkjdy.gddy_amr_power_curve";
                pgProp = SqlUtils.cqPgProp;
//                writeFilePath = "";//todo
                break;
            default:
                break;
        }
    }

//    public static void file(String taskType, String day, String writeType) throws Exception {
//        File dataDirPath = new File("/home/hiveData");
//        File[] allDirsOrFiles = dataDirPath.listFiles();
//        if (allDirsOrFiles == null || allDirsOrFiles.length == 0) {
//            log.info("目录下为空");
//            return;
//        }
//        //过滤目录下需要的数据文件目录
//        List<File> dataDirs = Arrays.stream(allDirsOrFiles)
//                .filter(f -> (f.getName().contains("cur") || f.getName()
//                        .contains("vol") || f.getName()
//                        .contains("power")) && (day.equals("ALL") || f.getName()
//                        .contains(day)))
//                .collect(Collectors.toList());
//        for (File dataDir : dataDirs) {
//            log.info("已获取数据目录:{}", dataDir.getName());
//        }
//        //遍历数据目录进行相关操作，预计90份文件左右
//        Stream<File> fileStatusStream;
//        switch (taskType) {
//            case "1":
//                //加载配变关系
//                pbgxMap = CsvUtils.CsvToPbCd2(pbFilePath, pbHeaders, day + YHVolt.pbgx);
//                //数据多开多线程并行流
//                fileStatusStream = dataDirs.parallelStream();
//                log.info("任务类型:{}", "入电流电压功率基础数据入PG");
//                break;
//            case "2":
//                //不能多线程目前，因为readMap加载关联关系是用的唯一变量
//                fileStatusStream = dataDirs.stream().filter(fs -> fs.getName().contains("vol"));
//                log.info("任务类型:{}", "电压基础数据进行计算数据入PG，只取获取数据目录的vol电压目录");
//                break;
//            default:
//                throw new RuntimeException("未知任务类型");
//        }
//        fileStatusStream.forEach(dir -> {
//            assert dir.isDirectory();
//            //写入数据用
//            Connection pgConn = null;
//            //读文件数据用
//            FileInputStream in = null;
//            //封装缓冲读
//            BufferedReader reader = null;
//            //目录名称
//            final String dirName = dir.getName();
//            //目录数据日期，目录名截取后8位为日期 格式:20220801
//            final String ds = dirName.substring(dirName.length() - 8);
//            //单线程sdf,为确保多线程不出问题，每个线程new一个
//            final SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
//            //目录数据日期date对象
//            final Date date;
//            //数据类型 U-电压 I-电流 P-功率
//            final String type;
//            if (dirName.contains("vol")) {
//                type = "U";
//            } else if (dirName.contains("cur")) {
//                type = "I";
//            } else if (dirName.contains("power")) {
//                type = "P";
//            } else {
//                throw new RuntimeException("文件未知数据类型");
//            }
//            //打印日志用，log名称格式(目录名-数据类型-任务类型):vol_2022081-U-1
//            final Logger log = LoggerFactory.getLogger(dirName + "-" + type + "-" + taskType);
//
//            String dataLine;
//            //此目录总读取
//            long totalDirReadCount = 0;
//            //此目录总写入`
//            long totalDirWriteCount = 0;
//            java.nio.file.Path path = null;
//            try {
//                //初始化conn
//                pgConn = SqlUtils.newPgConn(pgProp);
//                //ds转date
//                date = sdf.parse(ds);
//                //获取数据目录下的数据文件
//                File[] dataFiles = dir.listFiles();
//                if (dataFiles == null || dataFiles.length == 0) {
//                    log.info(dir.getName() + "下无数据文件");
//                    return;
//                }
//                if (taskType.equals("2")) {
//                    String tableName = "\"pms3_qkjdy\".\"essential_data_" + ds + "\"";
//                    log.info("表名:{}", tableName);
//                    InputStream is = ClassLoader.getSystemResourceAsStream("essential_data_ds.sql");
//                    assert is != null;
//                    final byte[] bytes = ReadUtils.readStream(is);
//                    String createTableSql = new String(bytes, StandardCharsets.UTF_8);
//                    createTableSql = String.format(createTableSql, ds, ds, ds);
//                    Statement statement = pgConn.createStatement();
//                    for (String s : createTableSql.split(";")) {
//                        statement.addBatch(s);
//                    }
//                    int[] result = statement.executeBatch();
//                    log.info("createTableSql:{}\nok:{}", createTableSql, Arrays.toString(result));
//                    String truncateSql = "TRUNCATE TABLE " + tableName + ";";
//                    boolean execute = pgConn.prepareStatement(truncateSql).execute();
//                    if (execute) {
//                        log.info("表:{}数据已清空", tableName);
//                    }
//                    if (writeType.equals("txt")) {
//                        path = Paths.get(URI.create("file://" + writeFilePath + "/" + ds + ".txt"));
//                        log.info("写入txt文件:{}", path);
//                        java.nio.file.Path rwxrwxrwx = Files.createFile(path, PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rwxrwxrwx")));
//                        log.info("创建文件:{}", rwxrwxrwx);
//                    }
//                } else if (taskType.equals("1")) {
//                    String month = ds.substring(0, 6);
//                    String delete_volt = pg_volt_curve_pb + "_" + month;
//                    String delete_cur = pg_cur_curve + "_" + month;
//                    String delete_power = pg_power_curve + "_" + month;
//                    String sqlTemplate = "delete from %s where to_char(data_date,'YYYYMMDD') = '%s'";
//                    PreparedStatement p1 = pgConn.prepareStatement(String.format(sqlTemplate, delete_volt, ds));
//                    p1.executeUpdate();
//                    PreparedStatement p2 = pgConn.prepareStatement(String.format(sqlTemplate, delete_cur, ds));
//                    p2.executeUpdate();
//                    PreparedStatement p3 = pgConn.prepareStatement(String.format(sqlTemplate, delete_power, ds));
//                    p3.executeUpdate();
//                }
//                //遍历单个文件
//                long dir_s = System.currentTimeMillis();
//                Map<String, Void> uniqueIds = new HashMap<>();
//                for (File dataFile : dataFiles) {
//                    long file_s = System.currentTimeMillis();
//                    long totalFileReadCount = 0;
//                    //此文件总写入
//                    long totalFileWriteCount = 0;
//                    //每批次读取
//                    int thisReadCount;
//                    //每批次写入
//                    int thisWriteCount;
//                    String fileName = dataFile.getName();
//                    List<String[]> dataList = new ArrayList<>(insertBatchSize);
//                    log.info("正在读取数据分区文件:{}", fileName);
//                    in = new FileInputStream(dataFile);
//                    reader = new BufferedReader(new InputStreamReader(in));
//                    //此文件总读取
//                    while ((dataLine = reader.readLine()) != null) {
//                        //每读一行，当前文件总读取++
//                        totalFileReadCount++;
//                        //dataCol数据格式 0-95为96个点的数据，96为id，97为data_date，98为org_no，99为phase_flag或data_type 一共100个数据
//                        String[] dataCol = dataLine.split("\t");
//                        dataList.add(dataCol);
//                        //满批次进行一次插入
//                        if (dataList.size() == insertBatchSize) {
//                            long s = System.currentTimeMillis();
//                            thisReadCount = dataList.size();
//                            switch (taskType) {
//                                case "1":
//                                    thisWriteCount = writeBaseData(dataList, type, pgConn);
//                                    break;
//                                case "2":
//                                    thisWriteCount = doGetResult(dataList, pgConn, ds, date, writeType.equals("txt") ? path : null, uniqueIds);
//                                    break;
//                                default:
//                                    throw new RuntimeException();
//                            }
//                            totalFileWriteCount += thisWriteCount;
//                            if (dataList.size() != 0) {
//                                dataList.clear();
//                            }
//                            long e = System.currentTimeMillis() - s;
//                            log.info("taskType:{}\t文件名:{}\t此次已读取:{}\t此次已插入:{}\t当前文件总已读取:{}\t当前文件总已插入:{}\t此次耗时:{}", taskType, dataFile.getName(), thisReadCount, thisWriteCount, totalFileReadCount, totalFileWriteCount, e);
//
//                        }
//                    }
//                    //剩余数据都入
//                    switch (taskType) {
//                        case "1":
//                            thisWriteCount = writeBaseData(dataList, type, pgConn);
//                            break;
//                        case "2":
//                            thisWriteCount = doGetResult(dataList, pgConn, ds, date, writeType.equals("txt") ? path : null, uniqueIds);
//                            break;
//                        default:
//                            throw new RuntimeException();
//                    }
//                    totalFileWriteCount += thisWriteCount;
//                    log.info("当前文件最后一次剩余数量插入:taskType:{}\t文件名:{}\t剩余数量:{}\t此次已插入:{}\t当前文件总已读取:{}\t当前文件总已插入:{}\t", taskType, dataFile.getName(), dataList.size(), thisWriteCount, totalFileReadCount, totalFileWriteCount);
//                    dataList.clear();
//                    totalDirReadCount += totalFileReadCount;
//                    totalDirWriteCount += totalFileWriteCount;
//                    log.info("-------------------------此文件数据已全部结束，当前文件总已读取:{}\t当前文件总已插入:{}\t此文件读取总耗时:{}-------------------------", totalFileReadCount, totalFileWriteCount, System.currentTimeMillis() - file_s);
//                }
//                log.info("-------------------------此目录数据已全部结束，当前目录总已读取:{}\t当前目录总已插入:{}\t此目录读取总耗时:{}-------------------------", totalDirReadCount, totalDirWriteCount, System.currentTimeMillis() - dir_s);
//            } catch (Exception e) {
//                //捕获异常但不抛出，只打印堆栈，保证程序无论并行或串行都能正常执行完毕，后续再排查报错日志针对性修复
//                log.error("ERROR");
//                e.printStackTrace();
//            } finally {
//                IOUtils.closeStream(reader);
//                IOUtils.closeStream(in);
//                try {
//                    if (pgConn != null) {
//                        pgConn.close();
//                    }
//                } catch (SQLException e) {
//                    e.printStackTrace();
//                }
//            }
//        });
//    }

    public static void file(String taskType, String day, String writeType) throws Exception {
        File dataDirPath = new File("/home/hiveData");
        File[] allDirsOrFiles = dataDirPath.listFiles();
        if (allDirsOrFiles == null || allDirsOrFiles.length == 0) {
            log.info("目录下为空");
            return;
        }
        //过滤目录下需要的数据文件目录
        List<File> dataDirs = Arrays.stream(allDirsOrFiles)
                .filter(f -> (f.getName().contains("cur") || f.getName()
                        .contains("vol") || f.getName()
                        .contains("power")) && (day.equals("ALL") || f.getName()
                        .contains(day)))
                .collect(Collectors.toList());
        for (File dataDir : dataDirs) {
            log.info("已获取数据目录:{}", dataDir.getName());
        }
        //遍历数据目录进行相关操作，预计90份文件左右
        Stream<File> fileStatusStream;
        switch (taskType) {
            case "1":
                //加载配变关系
                pbgxMap = CsvUtils.CsvToPbCd2(pbFilePath, pbHeaders, day + YHVolt.pbgx);
                //数据多开多线程并行流
                fileStatusStream = dataDirs.parallelStream();
                log.info("任务类型:{}", "入电流电压功率基础数据入PG");
                break;
            case "2":
                //不能多线程目前，因为readMap加载关联关系是用的唯一变量
                fileStatusStream = dataDirs.stream().filter(fs -> fs.getName().contains("vol"));
                log.info("任务类型:{}", "电压基础数据进行计算数据入PG，只取获取数据目录的vol电压目录");
                break;
            default:
                throw new RuntimeException("未知任务类型");
        }
        fileStatusStream.forEach(dir -> {
            assert dir.isDirectory();
            //写入数据用
            Connection pgConn = null;
            //读文件数据用
            FileInputStream in = null;
            //封装缓冲读
            BufferedReader reader = null;
            //目录名称
            final String dirName = dir.getName();
            //目录数据日期，目录名截取后8位为日期 格式:20220801
            final String ds = dirName.substring(dirName.length() - 8);
            //单线程sdf,为确保多线程不出问题，每个线程new一个
            final SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
            //目录数据日期date对象
            final Date date;
            //数据类型 U-电压 I-电流 P-功率
            final String type;
            if (dirName.contains("vol")) {
                type = "U";
            } else if (dirName.contains("cur")) {
                type = "I";
            } else if (dirName.contains("power")) {
                type = "P";
            } else {
                throw new RuntimeException("文件未知数据类型");
            }
            //打印日志用，log名称格式(目录名-数据类型-任务类型):vol_2022081-U-1
            final Logger log = LoggerFactory.getLogger(dirName + "-" + type + "-" + taskType);

            String dataLine;
            //此目录总读取
            long totalDirReadCount = 0;
            //此目录总写入`
            long totalDirWriteCount = 0;
            java.nio.file.Path path = null;
            try {
                //初始化conn
                pgConn = SqlUtils.newPgConn(pgProp);
                //ds转date
                date = sdf.parse(ds);
                //获取数据目录下的数据文件
                File[] dataFiles = dir.listFiles();
                if (dataFiles == null || dataFiles.length == 0) {
                    log.info(dir.getName() + "下无数据文件");
                    return;
                }
                if (taskType.equals("2")) {
                    String tableName = "\"pms3_qkjdy\".\"essential_data_" + ds + "\"";
                    log.info("表名:{}", tableName);
                    InputStream is = ClassLoader.getSystemResourceAsStream("essential_data_ds.sql");
                    assert is != null;
                    final byte[] bytes = ReadUtils.readStream(is);
                    String createTableSql = new String(bytes, StandardCharsets.UTF_8);
                    createTableSql = String.format(createTableSql, ds, ds, ds);
                    Statement statement = pgConn.createStatement();
                    for (String s : createTableSql.split(";")) {
                        statement.addBatch(s);
                    }
                    int[] result = statement.executeBatch();
                    log.info("createTableSql:{}\nok:{}", createTableSql, Arrays.toString(result));
                    String truncateSql = "TRUNCATE TABLE " + tableName + ";";
                    boolean execute = pgConn.prepareStatement(truncateSql).execute();
                    if (execute) {
                        log.info("表:{}数据已清空", tableName);
                    }
                    if (writeType.equals("txt")) {
                        path = Paths.get(URI.create("file://" + writeFilePath + "/" + ds + ".txt"));
                        log.info("写入txt文件:{}", path);
                        java.nio.file.Path rwxrwxrwx = Files.createFile(path, PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rwxrwxrwx")));
                        log.info("创建文件:{}", rwxrwxrwx);
                    }
                } else if (taskType.equals("1")) {
                    String month = ds.substring(0, 6);
                    String delete_volt = pg_volt_curve_pb + "_" + month;
                    String delete_cur = pg_cur_curve + "_" + month;
                    String delete_power = pg_power_curve + "_" + month;
                    String sqlTemplate = "delete from %s where to_char(data_date,'YYYYMMDD') = '%s'";
                    PreparedStatement p1 = pgConn.prepareStatement(String.format(sqlTemplate, delete_volt, ds));
                    p1.executeUpdate();
                    PreparedStatement p2 = pgConn.prepareStatement(String.format(sqlTemplate, delete_cur, ds));
                    p2.executeUpdate();
                    PreparedStatement p3 = pgConn.prepareStatement(String.format(sqlTemplate, delete_power, ds));
                    p3.executeUpdate();
                }
                //遍历单个文件
                long dir_s = System.currentTimeMillis();
                Map<String, Void> uniqueIds = new HashMap<>();
                HashMap<String, List<TimeShare>> pbMap = new HashMap<>();
                for (File dataFile : dataFiles) {
                    long file_s = System.currentTimeMillis();
                    long totalFileReadCount = 0;
                    //此文件总写入
                    long totalFileWriteCount = 0;
                    //每批次读取
                    int thisReadCount;
                    //每批次写入
                    int thisWriteCount;
                    String fileName = dataFile.getName();
                    List<String[]> dataList = new ArrayList<>(insertBatchSize);
                    log.info("正在读取数据分区文件:{}", fileName);
                    in = new FileInputStream(dataFile);
                    reader = new BufferedReader(new InputStreamReader(in));

                    //此文件总读取
                    while ((dataLine = reader.readLine()) != null) {
                        //每读一行，当前文件总读取++
                        totalFileReadCount++;
                        //dataCol数据格式 0-95为96个点的数据，96为id，97为data_date，98为org_no，99为phase_flag或data_type 一共100个数据
                        String[] dataCol = dataLine.split("\t");
                        dataList.add(dataCol);
                        //满批次进行一次插入
                        if (dataList.size() == insertBatchSize) {
                            long s = System.currentTimeMillis();
                            thisReadCount = dataList.size();
                            switch (taskType) {
                                case "1":
                                    thisWriteCount = writeBaseData(dataList, type, pgConn);
                                    break;
                                case "2":
                                    thisWriteCount = doGetResult(dataList, pgConn, ds, date, writeType.equals("txt") ? path : null, uniqueIds);
                                    break;
                                default:
                                    throw new RuntimeException();
                            }
                            totalFileWriteCount += thisWriteCount;
                            if (dataList.size() != 0) {
                                dataList.clear();
                            }
                            long e = System.currentTimeMillis() - s;
                            log.info("taskType:{}\t文件名:{}\t此次已读取:{}\t此次已插入:{}\t当前文件总已读取:{}\t当前文件总已插入:{}\t此次耗时:{}", taskType, dataFile.getName(), thisReadCount, thisWriteCount, totalFileReadCount, totalFileWriteCount, e);

                        }
                    }


                    //剩余数据都入
                    switch (taskType) {
                        case "1":
                            thisWriteCount = writeBaseData(dataList, type, pgConn);
                            break;
                        case "2":
                            thisWriteCount = doGetResult(dataList, pgConn, ds, date, writeType.equals("txt") ? path : null, uniqueIds);
                            break;
                        default:
                            throw new RuntimeException();
                    }
                    totalFileWriteCount += thisWriteCount;
                    log.info("当前文件最后一次剩余数量插入:taskType:{}\t文件名:{}\t剩余数量:{}\t此次已插入:{}\t当前文件总已读取:{}\t当前文件总已插入:{}\t", taskType, dataFile.getName(), dataList.size(), thisWriteCount, totalFileReadCount, totalFileWriteCount);
                    dataList.clear();
                    totalDirReadCount += totalFileReadCount;
                    totalDirWriteCount += totalFileWriteCount;
                    log.info("-------------------------此文件数据已全部结束，当前文件总已读取:{}\t当前文件总已插入:{}\t此文件读取总耗时:{}-------------------------", totalFileReadCount, totalFileWriteCount, System.currentTimeMillis() - file_s);
                }

//                ArrayList<TimeShare> timeShares = new ArrayList<>();
//                pbMap.forEach((k,v)->{
//                    timeShares.addAll(v);
//                });
////                ArrayList<TimeShare> timeShares = new ArrayList<>();  // 假设这是您的原始数据
//                int chunkSize = 400000;
//                int totalSize = timeShares.size();
//                int numChunks = (int) Math.ceil((double) totalSize / chunkSize);
//                for (int i = 0; i < numChunks; i++) {
//                    int fromIndex = i * chunkSize;
//                    int toIndex = Math.min(fromIndex + chunkSize, totalSize);
//                    List<TimeShare> chunk = timeShares.subList(fromIndex, toIndex);
//                    // 这里可以对切片后的数据chunk进行处理
//                    // 比如将chunk传递给某个方法进行进一步处理，或者存储到另一个数据结构中等等
//                    // 示例：打印切片的索引范围和数据数量
//                    String preSql = "insert into pms3_qkjdy.time_share_"+ ds
//                            + " values ";
//                    final String postSql = postSql2(chunk);
//                    if (postSql.length() != 0) {
//                        String finalSql = preSql + postSql2(chunk);
//                        PreparedStatement ps;
//                        try {
//                            ps = pgConn.prepareStatement(finalSql);
//                            ps.executeUpdate();
//                        } catch (SQLException e) {
//                            log.error("finalSql:{}", finalSql);
//                            throw new RuntimeException(e);
//                        }
//                    }

//                    System.out.println("Chunk " + (i + 1) + ": fromIndex=" + fromIndex + ", toIndex=" + toIndex + ", size=" + chunk.size());
//               }
                log.info("-------------------------此目录数据已全部结束，当前目录总已读取:{}\t当前目录总已插入:{}\t此目录读取总耗时:{}-------------------------", totalDirReadCount, totalDirWriteCount, System.currentTimeMillis() - dir_s);
            } catch (Exception e) {
                //捕获异常但不抛出，只打印堆栈，保证程序无论并行或串行都能正常执行完毕，后续再排查报错日志针对性修复
                log.error("ERROR");
                e.printStackTrace();
            } finally {
                IOUtils.closeStream(reader);
                IOUtils.closeStream(in);
                try {
                    if (pgConn != null) {
                        pgConn.close();
                    }
                } catch (SQLException e) {
                    e.printStackTrace();
                }
            }
        });
    }


    public static void spark(String taskType, String day, String writeType) throws Exception {
        SparkConf sparkConf = new SparkConf();
        sparkConf.set("spark.yarn.user.classpath.first", "true");
        sparkConf.set("spark.driver.userClassPathFirst", "true");
        sparkConf.set("spark.executor.userClassPathFirst", "true");
        SparkContext sc = new SparkContext(sparkConf);
        Path dataDirPath = new Path(hdfsPathPrefix);
        FileStatus[] allDirsOrFiles = fs.listStatus(dataDirPath);
        assert allDirsOrFiles != null && allDirsOrFiles.length != 0;
        //过滤目录下需要的数据文件目录
        List<FileStatus> dataDirs = Arrays.stream(allDirsOrFiles)
                .filter(f -> (f.getPath()
                        .getName()
                        .contains("cur") || f.getPath()
                        .getName()
                        .contains("vol") || f.getPath()
                        .getName()
                        .contains("power")) && (day.equals("ALL") || f.getPath()
                        .getName()
                        .contains(day)))
                .collect(Collectors.toList());
        for (FileStatus dataDir : dataDirs) {
            log.info("已获取数据目录:{}", dataDir.getPath().getName());
        }
        Stream<FileStatus> fileStatusStream;
        switch (taskType) {
            case "1":
                //加载配变关系
                pbgxMap = CsvUtils.CsvToPbCd2(pbFilePath, pbHeaders, day + YHVolt.pbgx);
                //数据多开多线程并行流
                fileStatusStream = dataDirs.parallelStream();
                log.info("任务类型:{}", "入电流电压功率基础数据入PG");
                break;
            case "2":
                //不能多线程目前，因为readMap加载关联关系是用的唯一变量
                fileStatusStream = dataDirs.stream().filter(fs -> fs.getPath().getName().contains("vol"));
                log.info("任务类型:{}", "电压基础数据进行计算数据入PG，只取获取数据目录的vol电压目录");
                break;
            default:
                throw new RuntimeException("未知任务类型");
        }
        fileStatusStream.forEach(dir -> {
            assert dir.isDirectory();
            Connection pgConn;
            //目录名称
            final String dirName = dir.getPath().getName();
            //目录数据日期，目录名截取后8位为日期 格式:20220801
            final String ds = dirName.substring(dirName.length() - 8);
            //单线程sdf,为确保多线程不出问题，每个线程new一个
            final SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
            //目录数据日期date对象
            final Date date;
            //数据类型 U-电压 I-电流 P-功率
            final String type;
            if (dirName.contains("vol")) {
                type = "U";
            } else if (dirName.contains("cur")) {
                type = "I";
            } else if (dirName.contains("power")) {
                type = "P";
            } else {
                throw new RuntimeException("文件未知数据类型");
            }
            //打印日志用，log名称格式(目录名-数据类型-任务类型):vol_2022081-U-1
            final Logger log = LoggerFactory.getLogger(dirName + "-" + type + "-" + taskType);
            try {
                //初始化conn
                pgConn = SqlUtils.newPgConn(pgProp);
                //ds转date
                date = sdf.parse(ds);
                //获取数据目录下的数据文件
                FileStatus[] dataFiles = fs.listStatus(dir.getPath());
                //遍历单个文件
                long dir_s = System.currentTimeMillis();
                if (taskType.equals("2")) {
                    String tableName = "pms3_qkjdy.essential_data_" + ds;
                    InputStream is = ClassLoader.getSystemResourceAsStream("essential_data_ds.sql");
                    assert is != null;
                    final byte[] bytes = ReadUtils.readStream(is);
                    String createTableSql = new String(bytes, StandardCharsets.UTF_8);

                    createTableSql = String.format(createTableSql, ds, ds);
                    Statement statement = pgConn.createStatement();
                    for (String s : createTableSql.split(";")) {
                        statement.addBatch(s);
                    }
                    int[] result = statement.executeBatch();
                    log.info("createTableSql:{}\nok:{}", createTableSql, Arrays.toString(result));
                    String truncateSql = "TRUNCATE TABLE " + tableName + ";";
                    boolean execute = pgConn.prepareStatement(truncateSql).execute();
                    if (execute) {
                        log.info("表:{}数据已清空", tableName);
                    }

                    //初始化配置
                    String pbgx = "20220915pbgx111";
                    String yhgx = "20220915yhgx111";
                    String yhpb = "20220915yhTopb111";
                    try {
                        sparkReadProperties(pbgx, yhgx, yhpb);
                    } catch (Exception e) {
                        e.printStackTrace();
                    }

                }
                pgConn.close();


                //开始多服务器计算入库

                final Map<String, Void> uniqueIds = new ConcurrentHashMap<>();
                for (FileStatus dataFile : dataFiles) {
                    String fileName = dataFile.getPath().getName();
                    log.info("正在读取数据分区文件:路径:{}\t名称:{}", dataFile.getPath().toString(), fileName);

                    final RDD<String> rdd = sc.textFile(dataFile.getPath().toString(), 10);
                    final JavaRDD<String> javaRDD = rdd.toJavaRDD();
                    javaRDD.foreachPartition(new VoidFunction<Iterator<String>>() {

                        final List<String[]> dataList = new ArrayList<>(insertBatchSize);
                        //每批次读取
                        int thisReadCount;
                        //每批次写入
                        int thisWriteCount;

                        @Override
                        public void call(Iterator<String> lines) throws Exception {
                            final Connection pgConn = SqlUtils.newPgConn(pgProp);
                            String line;
                            while (lines.hasNext()) {
                                line = lines.next();
                                final String[] dataCol = line.split("\t");
                                dataList.add(dataCol);
                                if (dataList.size() == insertBatchSize) {
                                    thisReadCount = dataList.size();
                                    switch (taskType) {
//                                        case "1":
//                                            thisWriteCount = writeBaseData(dataList, type, pgConn);
//                                            break;
                                        case "2":
                                            thisWriteCount = sparkDoGetResult(dataList, pgConn, ds, date, null, uniqueIds);
                                            break;
                                        default:
                                            throw new RuntimeException();
                                    }
                                    if (dataList.size() != 0) {
                                        dataList.clear();
                                    }
                                }
                            }
                            switch (taskType) {
//                                case "1":
//                                    thisWriteCount = writeBaseData(dataList, type, pgConn);
//                                    break;
                                case "2":
                                    thisWriteCount = sparkDoGetResult(dataList, pgConn, ds, date, null, uniqueIds);
                                    break;
                                default:
                                    throw new RuntimeException();
                            }
                            dataList.clear();
                            try {
                                pgConn.close();
                            } catch (SQLException e) {
                                throw new RuntimeException(e);
                            }
                        }
                    });
                }
//                log.info("-------------------------此目录数据已全部结束，当前目录总已读取:{}\t当前目录总已插入:{}\t此目录读取总耗时:{}-------------------------", totalDirReadCount, totalDirWriteCount, System.currentTimeMillis() - dir_s);
            } catch (Exception e) {
                //捕获异常但不抛出，只打印堆栈，保证程序无论并行或串行都能正常执行完毕，后续再排查报错日志针对性修复
                log.error("报出了某些异常");
                e.printStackTrace();
            }
        });
    }

//    public static void hdfs(String taskType, String day, String writeType) throws Exception {
//        Path dataDirPath = new Path(hdfsPathPrefix);
//        FileStatus[] allDirsOrFiles = fs.listStatus(dataDirPath);
//        assert allDirsOrFiles != null && allDirsOrFiles.length != 0;
//        //过滤目录下需要的数据文件目录
//        List<FileStatus> dataDirs = Arrays.stream(allDirsOrFiles)
//                .filter(f -> (f.getPath()
//                        .getName()
//                        .contains("cur") || f.getPath()
//                        .getName()
//                        .contains("vol") || f.getPath()
//                        .getName()
//                        .contains("power")) && (day.equals("ALL") || f.getPath()
//                        .getName()
//                        .contains(day)))
//                .collect(Collectors.toList());
//        for (FileStatus dataDir : dataDirs) {
//            log.info("已获取数据目录:{}", dataDir.getPath().getName());
//        }
//        //遍历数据目录进行相关操作，预计90份文件左右
//        Stream<FileStatus> fileStatusStream;
//        switch (taskType) {
//            case "1":
//                //加载配变关系
//                pbgxMap = CsvUtils.CsvToPbCd2(pbFilePath, pbHeaders, day + YHVolt.pbgx);
//                //数据多开多线程并行流
//                fileStatusStream = dataDirs.parallelStream();
//                log.info("任务类型:{}", "入电流电压功率基础数据入PG");
//                break;
//            case "2":
//                //不能多线程目前，因为readMap加载关联关系是用的唯一变量
//                fileStatusStream = dataDirs.stream().filter(fs -> fs.getPath().getName().contains("vol"));
//                log.info("任务类型:{}", "电压基础数据进行计算数据入PG，只取获取数据目录的vol电压目录");
//                break;
//            default:
//                throw new RuntimeException("未知任务类型");
//        }
//        fileStatusStream.forEach(dir -> {
//            assert dir.isDirectory();
//            //写入数据用
//            Connection pgConn = null;
//            //读文件数据用
//            FSDataInputStream in = null;
//            //封装缓冲读
//            BufferedReader reader = null;
//            //目录名称
//            final String dirName = dir.getPath().getName();
//            //目录数据日期，目录名截取后8位为日期 格式:20220801
//            final String ds = dirName.substring(dirName.length() - 8);
//            //单线程sdf,为确保多线程不出问题，每个线程new一个
//            final SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
//            //目录数据日期date对象
//            final Date date;
//            //数据类型 U-电压 I-电流 P-功率
//            final String type;
//            if (dirName.contains("vol")) {
//                type = "U";
//            } else if (dirName.contains("cur")) {
//                type = "I";
//            } else if (dirName.contains("power")) {
//                type = "P";
//            } else {
//                throw new RuntimeException("文件未知数据类型");
//            }
//            //打印日志用，log名称格式(目录名-数据类型-任务类型):vol_2022081-U-1
//            final Logger log = LoggerFactory.getLogger(dirName + "-" + type + "-" + taskType);
//
//            String dataLine;
//            //此目录总读取
//            long totalDirReadCount = 0;
//            //此目录总写入`
//            long totalDirWriteCount = 0;
//            //写入路径
//            java.nio.file.Path path = null;
//            try {
//                //初始化conn
//                pgConn = SqlUtils.newPgConn(pgProp);
//                //ds转date
//                date = sdf.parse(ds);
//                //获取数据目录下的数据文件
//                FileStatus[] dataFiles = fs.listStatus(dir.getPath());
//                //遍历单个文件
//                long dir_s = System.currentTimeMillis();
//                if (taskType.equals("2")) {
//                    String tableName = "pms3_qkjdy.essential_data_" + ds;
//                    InputStream is = ClassLoader.getSystemResourceAsStream("essential_data_ds.sql");
//                    assert is != null;
//                    final byte[] bytes = ReadUtils.readStream(is);
//                    String createTableSql = new String(bytes, StandardCharsets.UTF_8);
//
//                    createTableSql = String.format(createTableSql, ds, ds, ds);
//                    Statement statement = pgConn.createStatement();
//                    for (String s : createTableSql.split(";")) {
//                        statement.addBatch(s);
//                    }
//                    int[] result = statement.executeBatch();
//                    log.info("createTableSql:{}\nok:{}", createTableSql, Arrays.toString(result));
//                    String truncateSql = "TRUNCATE TABLE " + tableName + ";";
//                    boolean execute = pgConn.prepareStatement(truncateSql).execute();
//                    if (execute) {
//                        log.info("表:{}数据已清空", tableName);
//                    }
//                    if (writeType.equals("txt")) {
//                        path = Paths.get(URI.create("file:/" + writeFilePath + "/" + ds + ".txt"));
//                        log.info("写入txt文件:{}", path);
//                        java.nio.file.Path rwxrwxrwx = Files.createFile(path, PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rwxrwxrwx")));
//                        log.info("创建文件:{}", rwxrwxrwx);
//                    }
//                } else if (taskType.equals("1")) {
//                    String month = ds.substring(0, 6);
//                    String delete_volt = pg_volt_curve_pb + "_" + month;
//                    String delete_cur = pg_cur_curve + "_" + month;
//                    String delete_power = pg_power_curve + "_" + month;
//                    String sqlTemplate = "delete from %s where to_char(data_date,'YYYYMMDD') = '%s'";
//                    PreparedStatement p1 = pgConn.prepareStatement(String.format(sqlTemplate, delete_volt, ds));
//                    p1.executeUpdate();
//                    PreparedStatement p2 = pgConn.prepareStatement(String.format(sqlTemplate, delete_cur, ds));
//                    p2.executeUpdate();
//                    PreparedStatement p3 = pgConn.prepareStatement(String.format(sqlTemplate, delete_power, ds));
//                    p3.executeUpdate();
//                }
//                Map<String, Void> uniqueIds = new HashMap<>();
//                for (FileStatus dataFile : dataFiles) {
//                    long file_s = System.currentTimeMillis();
//                    long totalFileReadCount = 0;
//                    //此文件总写入
//                    long totalFileWriteCount = 0;
//                    //每批次读取
//                    int thisReadCount;
//                    //每批次写入
//                    int thisWriteCount;
//                    String fileName = dataFile.getPath().getName();
//                    List<String[]> dataList = new ArrayList<>(insertBatchSize);
//                    log.info("正在读取数据分区文件:{}", fileName);
//                    in = fs.open(dataFile);
//                    reader = new BufferedReader(new InputStreamReader(in));
//                    //此文件总读取
//                    while ((dataLine = reader.readLine()) != null) {
//                        //每读一行，当前文件总读取++
//                        totalFileReadCount++;
//                        //dataCol数据格式 0-95为96个点的数据，96为id，97为data_date，98为org_no，99为phase_flag或data_type 一共100个数据
//                        String[] dataCol = dataLine.split("\t");
//                        dataList.add(dataCol);
//                        //满批次进行一次插入
//                        if (dataList.size() == insertBatchSize) {
//                            long s = System.currentTimeMillis();
//                            thisReadCount = dataList.size();
//                            switch (taskType) {
//                                case "1":
//                                    thisWriteCount = writeBaseData(dataList, type, pgConn);
//                                    break;
//                                case "2":
//                                    thisWriteCount = doGetResult(dataList, pgConn, ds, date, writeType.equals("txt") ? path : null, uniqueIds);
//                                    break;
//                                default:
//                                    throw new RuntimeException();
//                            }
//                            totalFileWriteCount += thisWriteCount;
//                            if (dataList.size() != 0) {
//                                dataList.clear();
//                            }
//                            long e = System.currentTimeMillis() - s;
//                            log.info("taskType:{}\t文件名:{}\t此次已读取:{}\t此次已插入:{}\t当前文件总已读取:{}\t当前文件总已插入:{}\t此次耗时:{}", taskType, dataFile.getPath()
//                                    .getName(), thisReadCount, thisWriteCount, totalFileReadCount, totalFileWriteCount, e);
//
//                        }
//                    }
//                    //剩余数据都入
//                    switch (taskType) {
//                        case "1":
//                            thisWriteCount = writeBaseData(dataList, type, pgConn);
//                            break;
//                        case "2":
//                            thisWriteCount = doGetResult(dataList, pgConn, ds, date, writeType.equals("txt") ? path : null, uniqueIds);
//                            break;
//                        default:
//                            throw new RuntimeException();
//                    }
//                    totalFileWriteCount += thisWriteCount;
//                    log.info("当前文件最后一次剩余数量插入:taskType:{}\t文件名:{}\t剩余数量:{}\t此次已插入:{}\t当前文件总已读取:{}\t当前文件总已插入:{}\t", taskType, dataFile.getPath()
//                            .getName(), dataList.size(), thisWriteCount, totalFileReadCount, totalFileWriteCount);
//                    dataList.clear();
//                    totalDirReadCount += totalFileReadCount;
//                    totalDirWriteCount += totalFileWriteCount;
//                    log.info("-------------------------此文件数据已全部结束，当前文件总已读取:{}\t当前文件总已插入:{}\t此文件读取总耗时:{}-------------------------", totalFileReadCount, totalFileWriteCount, System.currentTimeMillis() - file_s);
//                }
//                log.info("-------------------------此目录数据已全部结束，当前目录总已读取:{}\t当前目录总已插入:{}\t此目录读取总耗时:{}-------------------------", totalDirReadCount, totalDirWriteCount, System.currentTimeMillis() - dir_s);
//            } catch (Exception e) {
//                //捕获异常但不抛出，只打印堆栈，保证程序无论并行或串行都能正常执行完毕，后续再排查报错日志针对性修复
//                log.error("ERROR");
//                e.printStackTrace();
//            } finally {
//                IOUtils.closeStream(reader);
//                IOUtils.closeStream(in);
//                try {
//                    if (pgConn != null) {
//                        pgConn.close();
//                    }
//                } catch (SQLException e) {
//                    e.printStackTrace();
//                }
//            }
//        });
//    }



    public static void hdfs(String taskType, String day, String writeType) throws Exception {
        Path dataDirPath = new Path(hdfsPathPrefix);
        FileStatus[] allDirsOrFiles = fs.listStatus(dataDirPath);
        assert allDirsOrFiles != null && allDirsOrFiles.length != 0;
        //过滤目录下需要的数据文件目录
        List<FileStatus> dataDirs = Arrays.stream(allDirsOrFiles)
                .filter(f -> (f.getPath()
                        .getName()
                        .contains("cur") || f.getPath()
                        .getName()
                        .contains("vol") || f.getPath()
                        .getName()
                        .contains("power")) && (day.equals("ALL") || f.getPath()
                        .getName()
                        .contains(day)))
                .collect(Collectors.toList());
        for (FileStatus dataDir : dataDirs) {
            log.info("已获取数据目录:{}", dataDir.getPath().getName());
        }
        //遍历数据目录进行相关操作，预计90份文件左右
        Stream<FileStatus> fileStatusStream;
        switch (taskType) {
            case "1":
                //加载配变关系
                pbgxMap = CsvUtils.CsvToPbCd2(pbFilePath, pbHeaders, day + YHVolt.pbgx);
                //数据多开多线程并行流
                fileStatusStream = dataDirs.parallelStream();
                log.info("任务类型:{}", "入电流电压功率基础数据入PG");
                break;
            case "2":
                //不能多线程目前，因为readMap加载关联关系是用的唯一变量
                fileStatusStream = dataDirs.stream().filter(fs -> fs.getPath().getName().contains("vol"));
                log.info("任务类型:{}", "电压基础数据进行计算数据入PG，只取获取数据目录的vol电压目录");
                break;
            default:
                throw new RuntimeException("未知任务类型");
        }
        fileStatusStream.forEach(dir -> {
            assert dir.isDirectory();
            //写入数据用
            Connection pgConn = null;
            //读文件数据用
            FSDataInputStream in = null;
            //封装缓冲读
            BufferedReader reader = null;
            //目录名称
            final String dirName = dir.getPath().getName();
            //目录数据日期，目录名截取后8位为日期 格式:20220801
            final String ds = dirName.substring(dirName.length() - 8);
            //单线程sdf,为确保多线程不出问题，每个线程new一个
            final SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
            //目录数据日期date对象
            final Date date;
            //数据类型 U-电压 I-电流 P-功率
            final String type;
            if (dirName.contains("vol")) {
                type = "U";
            } else if (dirName.contains("cur")) {
                type = "I";
            } else if (dirName.contains("power")) {
                type = "P";
            } else {
                throw new RuntimeException("文件未知数据类型");
            }
            //打印日志用，log名称格式(目录名-数据类型-任务类型):vol_2022081-U-1
            final Logger log = LoggerFactory.getLogger(dirName + "-" + type + "-" + taskType);

            String dataLine;
            //此目录总读取
            long totalDirReadCount = 0;
            //此目录总写入`
            long totalDirWriteCount = 0;
            //写入路径
            java.nio.file.Path path = null;
            try {
                //初始化conn
                pgConn = SqlUtils.newPgConn(pgProp);
                //ds转date
                date = sdf.parse(ds);
                //获取数据目录下的数据文件
                FileStatus[] dataFiles = fs.listStatus(dir.getPath());
                //遍历单个文件
                long dir_s = System.currentTimeMillis();
                if (taskType.equals("2")) {
                    String tableName = "pms3_qkjdy.essential_data_" + ds;
                    InputStream is = ClassLoader.getSystemResourceAsStream("essential_data_ds.sql");
                    assert is != null;
                    final byte[] bytes = ReadUtils.readStream(is);
                    String createTableSql = new String(bytes, StandardCharsets.UTF_8);

                    createTableSql = String.format(createTableSql, ds, ds, ds);
                    Statement statement = pgConn.createStatement();
                    for (String s : createTableSql.split(";")) {
                        statement.addBatch(s);
                    }
                    int[] result = statement.executeBatch();
                    log.info("createTableSql:{}\nok:{}", createTableSql, Arrays.toString(result));
                    String truncateSql = "TRUNCATE TABLE " + tableName + ";";
                    boolean execute = pgConn.prepareStatement(truncateSql).execute();
                    if (execute) {
                        log.info("表:{}数据已清空", tableName);
                    }
                    if (writeType.equals("txt")) {
                        path = Paths.get(URI.create("file:/" + writeFilePath + "/" + ds + ".txt"));
                        log.info("写入txt文件:{}", path);
                        java.nio.file.Path rwxrwxrwx = Files.createFile(path, PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rwxrwxrwx")));
                        log.info("创建文件:{}", rwxrwxrwx);
                    }
                } else if (taskType.equals("1")) {
                    String month = ds.substring(0, 6);
                    String delete_volt = pg_volt_curve_pb + "_" + month;
                    String delete_cur = pg_cur_curve + "_" + month;
                    String delete_power = pg_power_curve + "_" + month;
                    String sqlTemplate = "delete from %s where to_char(data_date,'YYYYMMDD') = '%s'";
                    PreparedStatement p1 = pgConn.prepareStatement(String.format(sqlTemplate, delete_volt, ds));
                    p1.executeUpdate();
                    PreparedStatement p2 = pgConn.prepareStatement(String.format(sqlTemplate, delete_cur, ds));
                    p2.executeUpdate();
                    PreparedStatement p3 = pgConn.prepareStatement(String.format(sqlTemplate, delete_power, ds));
                    p3.executeUpdate();
                }
                Map<String, Void> uniqueIds = new HashMap<>();
                HashMap<String, List<TimeShare>> pbMap = new HashMap<>();
                for (FileStatus dataFile : dataFiles) {
                    long file_s = System.currentTimeMillis();
                    long totalFileReadCount = 0;
                    //此文件总写入
                    long totalFileWriteCount = 0;
                    //每批次读取
                    int thisReadCount;
                    //每批次写入
                    int thisWriteCount;
                    String fileName = dataFile.getPath().getName();
                    List<String[]> dataList = new ArrayList<>(insertBatchSize);
                    log.info("正在读取数据分区文件:{}", fileName);
                    in = fs.open(dataFile);
                    reader = new BufferedReader(new InputStreamReader(in));
                    //此文件总读取
                    while ((dataLine = reader.readLine()) != null) {
                        //每读一行，当前文件总读取++
                        totalFileReadCount++;
                        //dataCol数据格式 0-95为96个点的数据，96为id，97为data_date，98为org_no，99为phase_flag或data_type 一共100个数据
                        String[] dataCol = dataLine.split("\t");
                        dataList.add(dataCol);
                        //满批次进行一次插入
                        if (dataList.size() == insertBatchSize) {
                            long s = System.currentTimeMillis();
                            thisReadCount = dataList.size();
                            switch (taskType) {
                                case "1":
                                    thisWriteCount = writeBaseData(dataList, type, pgConn);
                                    break;
                                case "2":
                                    thisWriteCount = doGetResult(dataList, pgConn, ds, date, writeType.equals("txt") ? path : null, uniqueIds);
                                    break;
                                default:
                                    throw new RuntimeException();
                            }
                            totalFileWriteCount += thisWriteCount;
                            if (dataList.size() != 0) {
                                dataList.clear();
                            }
                            long e = System.currentTimeMillis() - s;
                            log.info("taskType:{}\t文件名:{}\t此次已读取:{}\t此次已插入:{}\t当前文件总已读取:{}\t当前文件总已插入:{}\t此次耗时:{}", taskType, dataFile.getPath()
                                    .getName(), thisReadCount, thisWriteCount, totalFileReadCount, totalFileWriteCount, e);

                        }
                    }
                    //剩余数据都入
                    switch (taskType) {
                        case "1":
                            thisWriteCount = writeBaseData(dataList, type, pgConn);
                            break;
                        case "2":
                            thisWriteCount = doGetResult(dataList, pgConn, ds, date, writeType.equals("txt") ? path : null, uniqueIds);
                            break;
                        default:
                            throw new RuntimeException();
                    }
                    totalFileWriteCount += thisWriteCount;
                    log.info("当前文件最后一次剩余数量插入:taskType:{}\t文件名:{}\t剩余数量:{}\t此次已插入:{}\t当前文件总已读取:{}\t当前文件总已插入:{}\t", taskType, dataFile.getPath()
                            .getName(), dataList.size(), thisWriteCount, totalFileReadCount, totalFileWriteCount);
                    dataList.clear();
                    totalDirReadCount += totalFileReadCount;
                    totalDirWriteCount += totalFileWriteCount;
                    log.info("-------------------------此文件数据已全部结束，当前文件总已读取:{}\t当前文件总已插入:{}\t此文件读取总耗时:{}-------------------------", totalFileReadCount, totalFileWriteCount, System.currentTimeMillis() - file_s);
                }

//                ArrayList<TimeShare> timeShares = new ArrayList<>();
//                pbMap.forEach((k,v)->{
//                    timeShares.addAll(v);
//                });
//                int chunkSize = 400000;
//                int totalSize = timeShares.size();
//                int numChunks = (int) Math.ceil((double) totalSize / chunkSize);
//                for (int i = 0; i < numChunks; i++) {
//                    int fromIndex = i * chunkSize;
//                    int toIndex = Math.min(fromIndex + chunkSize, totalSize);
//                    List<TimeShare> chunk = timeShares.subList(fromIndex, toIndex);
//                    // 这里可以对切片后的数据chunk进行处理
//                    // 比如将chunk传递给某个方法进行进一步处理，或者存储到另一个数据结构中等等
//                    // 示例：打印切片的索引范围和数据数量
//                    String preSql = "insert into pms3_qkjdy.time_share_" + ds
//                            + " values ";
//                    final String postSql = postSql2(chunk);
//                    if (postSql.length() != 0) {
//                        String finalSql = preSql + postSql2(chunk);
//                        PreparedStatement ps;
//                        try {
//                            ps = pgConn.prepareStatement(finalSql);
//                            ps.executeUpdate();
//                        } catch (SQLException e) {
//                            log.error("finalSql:{}", finalSql);
//                            throw new RuntimeException(e);
//                        }
//                    }
//                }

                log.info("-------------------------此目录数据已全部结束，当前目录总已读取:{}\t当前目录总已插入:{}\t此目录读取总耗时:{}-------------------------", totalDirReadCount, totalDirWriteCount, System.currentTimeMillis() - dir_s);

            } catch (Exception e) {
                //捕获异常但不抛出，只打印堆栈，保证程序无论并行或串行都能正常执行完毕，后续再排查报错日志针对性修复
                log.error("ERROR");
                e.printStackTrace();
            } finally {
                IOUtils.closeStream(reader);
                IOUtils.closeStream(in);
                try {
                    if (pgConn != null) {
                        pgConn.close();
                    }
                } catch (SQLException e) {
                    e.printStackTrace();
                }
            }
        });
    }

    public static void main(String[] args) throws Exception {
        log.info("此次程序版本号:{}", "加了些日志,修复bug:list剩余数据未清空");
        //任务类型 1-电流电压功率基础数据入pg 2-电压数据计算入pg
        String taskType = args[0];
        String day = args[1];
        String type = args[2];
        String writeType = args[3];
        initStaticVariables(type);
        switch (type) {
            case "file":
                file(taskType, day, writeType);
                break;
            case "spark":
                spark(taskType, day, writeType);
                break;
            case "hdfs":
            default:
                hdfs(taskType, day, writeType);
                break;
        }
    }

    public static int doGetResult(List<String[]> dataList, Connection pgConn, String ds, Date date, java.nio.file.Path filePath, Map<String, Void> uniqueIds) {
        List<CMS_VOLT_CURVE> results = new ArrayList<>(insertBatchSize);
        int A = 0;
        int B = 0;
        int C = 0;
        int other = 0;
        for (String[] result : dataList) {
            String id = result[96];
            String dataDate = result[97];
            String orgNo = result[98];
            String phaseFlag = result[99];
            String dataPointFlag = result[100];
            switch (phaseFlag) {
                case "1":
                    A++;
                    break;
                case "2":
                    B++;
                    break;
                case "3":
                    C++;
                    break;
                default:
                    other++;
                    break;
            }
            if (!phaseFlag.equals("1")) {
                continue;
            }
            String row = id + "-" + dataDate + "-" + phaseFlag;
            CMS_VOLT_CURVE cms_volt_curve = new CMS_VOLT_CURVE();
            cms_volt_curve.setRow(row);
            cms_volt_curve.setMETER_ID(id);
            cms_volt_curve.setDATA_DATE(date);
            cms_volt_curve.setPHASE_FLAG(phaseFlag);
            cms_volt_curve.setDATA_POINT_FLAG(dataPointFlag);
            cms_volt_curve.setORG_NO(orgNo);
            for (int i = 0; i < 96; i++) {
                cms_volt_curve.getCOL_TIME$U().add(new VoltU1_96(Integer.parseInt(String.valueOf(i + 1)), result[i]));
            }
            results.add(cms_volt_curve);
        }
        //读相关id的关联关系，这里用的static变量存的，暂时无法多线程
//        readMap(YHVolt.pbgx, YHVolt.yhgx, YHVolt.yhpb, ids);
        List<VoltageQualifiedVo> vos = results.stream()
                .map(vo -> YHVolt.getResult(vo, ds))
                .filter(Objects::nonNull)
                .collect(Collectors.toList());
        if (results.size() != 0) {
            log.info("result sample:{}\t传入数据总数:{}\tA相总数:{}\tvo总数:{}\tA:{}\tB:{}\tC:{}\tother:{}", results.get(0).getCOL_TIME$U(), dataList.size(), results.size(), vos.size(), A, B, C, other);
        }
        String preSql = "insert into pms3_qkjdy.essential_data_" + ds
                + " (id,sbname,sbid,date_time,dycount,valid_date_time,monitor_time,max_value,max_value_time,min_value,min_value_time,device_type,device_level,org_no,pid,real_voltage_point,up_time,down_time,exception_code,voltage_level,avg,cons_type,grid_type,high_voltage,low_voltage,severe_high_voltage,severe_low_voltage,high_time,low_time,severe_high_time,severe_low_time,high_cs,low_cs,severe_high_cs,severe_low_cs) values ";
        if (filePath == null) {
            final String postSql = postSql(vos);
            if (postSql.length() != 0) {
                String finalSql = preSql + postSql(vos);
                PreparedStatement ps;
                try {
                    ps = pgConn.prepareStatement(finalSql);
                    return ps.executeUpdate();
                } catch (SQLException e) {
                    log.error("finalSql:{}", finalSql);
                    throw new RuntimeException(e);
                }
            } else {
                return 0;
            }
        } else {
            try {
                return writeFile(vos, uniqueIds, filePath);
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }
    }


    public static int doGetResult2(List<String[]> dataList, Connection pgConn, String ds, Date date, java.nio.file.Path filePath, Map<String, Void> uniqueIds,HashMap<String, List<TimeShare>> pbMap) throws Exception {
        List<CMS_VOLT_CURVE> results = new ArrayList<>(insertBatchSize);

        int A = 0;
        int B = 0;
        int C = 0;
        int other = 0;
        for (String[] result : dataList) {
            String id = result[96];
            String dataDate = result[97];
            String orgNo = result[98];
            String phaseFlag = result[99];
            String dataPointFlag = result[100];
            switch (phaseFlag) {
                case "1":
                    A++;
                    break;
                case "2":
                    B++;
                    break;
                case "3":
                    C++;
                    break;
                default:
                    other++;
                    break;
            }
            if (!phaseFlag.equals("1")) {
                continue;
            }
            String row = id + "-" + dataDate + "-" + phaseFlag;
            CMS_VOLT_CURVE cms_volt_curve = new CMS_VOLT_CURVE();
            cms_volt_curve.setRow(row);
            cms_volt_curve.setMETER_ID(id);
            cms_volt_curve.setDATA_DATE(date);
            cms_volt_curve.setPHASE_FLAG(phaseFlag);
            cms_volt_curve.setDATA_POINT_FLAG(dataPointFlag);
            cms_volt_curve.setORG_NO(orgNo);

            for (int i = 0; i < 96; i++) {
                cms_volt_curve.getCOL_TIME$U().add(new VoltU1_96(Integer.parseInt(String.valueOf(i + 1)), result[i]));
            }
            TimeSharing.getResult(cms_volt_curve,ds,pbMap);
            results.add(cms_volt_curve);
        }
        //读相关id的关联关系，这里用的static变量存的，暂时无法多线程
//        readMap(YHVolt.pbgx, YHVolt.yhgx, YHVolt.yhpb, ids);
//        List<VoltageQualifiedVo> vos = results.stream()
//                .map(vo -> YHVolt.getResult(vo, ds))
//                .filter(Objects::nonNull)
//                .collect(Collectors.toList());
//        results.stream()
//                .map(vo-> TimeSharing.getResult(vo,null))

        return pbMap.size();
    }

    public static int sparkDoGetResult(List<String[]> dataList, Connection pgConn, String ds, Date date, java.nio.file.Path filePath, Map<String, Void> uniqueIds) {
        final List<CMS_VOLT_CURVE> results = new ArrayList<>(insertBatchSize);
        int A = 0;
        int B = 0;
        int C = 0;
        int other = 0;
        for (String[] result : dataList) {
            String id = result[96];
            String dataDate = result[97];
            String orgNo = result[98];
            String phaseFlag = result[99];
            switch (phaseFlag) {
                case "1":
                    A++;
                    break;
                case "2":
                    B++;
                    break;
                case "3":
                    C++;
                    break;
                default:
                    other++;
                    break;
            }
            if (!phaseFlag.equals("1")) {
                continue;
            }
            String row = id + "-" + dataDate + "-" + phaseFlag;
            CMS_VOLT_CURVE cms_volt_curve = new CMS_VOLT_CURVE();
            cms_volt_curve.setRow(row);
            cms_volt_curve.setMETER_ID(id);
            cms_volt_curve.setDATA_DATE(date);
            cms_volt_curve.setPHASE_FLAG(phaseFlag);
            cms_volt_curve.setORG_NO(orgNo);
            for (int i = 0; i < 96; i++) {
                cms_volt_curve.getCOL_TIME$U().add(new VoltU1_96(Integer.parseInt(String.valueOf(i + 1)), result[i]));
            }
            results.add(cms_volt_curve);
        }

        log.info("排序前");
        log.info(results.get(0).getCOL_TIME$U().get(0).toString());
        //读相关id的关联关系，这里用的static变量存的，暂时无法多线程
//        readMap(YHVolt.pbgx, YHVolt.yhgx, YHVolt.yhpb, ids);
        List<VoltageQualifiedVo> vos = results.stream()
                .map(vo -> YHVolt.getResult(vo, ds))
                .filter(Objects::nonNull)
                .collect(Collectors.toList());
        log.info("stream操作后");
        log.info(results.get(0).getCOL_TIME$U().get(0).toString());
//        if (results.size() != 0) {
//            log.info("result sample:{}\t传入数据总数:{}\tA相总数:{}\tvo总数:{}\tA:{}\tB:{}\tC:{}\tother:{}", results.get(0), dataList.size(), results.size(), vos.size(), A, B, C, other);
//        }
        String preSql = "insert into pms3_qkjdy.essential_data_" + ds +
                " (id,sbname,sbid,date_time,dycount,valid_date_time,monitor_time,max_value,max_value_time,min_value,min_value_time,device_type,device_level,org_no,pid,real_voltage_point,up_time,down_time,exception_code,voltage_level,avg,cons_type,grid_type,high_voltage,low_voltage,severe_high_voltage,severe_low_voltage,high_time,low_time,severe_high_time,severe_low_time,high_cs,low_cs,severe_high_cs,severe_low_cs) values ";
//        if (filePath == null) {
        final String postSql = postSql(vos);
        if (postSql.length() != 0) {
            String finalSql = preSql + postSql(vos);
            PreparedStatement ps;
            try {
                ps = pgConn.prepareStatement(finalSql);
                return ps.executeUpdate();
            } catch (SQLException e) {
                log.error("finalSql:{}", finalSql);
                throw new RuntimeException(e);
            }
        } else {
            return 0;
        }
//        }
//        else {
//            try {
//                return writeFile(vos, uniqueIds, filePath);
//            }
//            catch (IOException e) {
//                throw new RuntimeException(e);
//            }
//        }
    }


    public static int writeFile(List<VoltageQualifiedVo> vos, Map<String, Void> uniqueIds, java.nio.file.Path filePath) throws
            IOException {
        Pair<Integer, String> pair = postLine(vos, uniqueIds);
        int count = pair.getLeft();
        String content = pair.getRight();
        Files.write(filePath, content.getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
        return count;
    }

//    public static int writeFile2(List<TimeShare> vos, Map<String, Void> uniqueIds, java.nio.file.Path filePath) throws
//            IOException {
//        Pair<Integer, String> pair = postLine(vos, uniqueIds);
//        int count = pair.getLeft();
//        String content = pair.getRight();
//        Files.write(filePath, content.getBytes(StandardCharsets.UTF_8), StandardOpenOption.APPEND);
//        return count;
//    }

    public static final String insertSqlTemplate_volt_pb = "insert into %s (%s,id,data_date,org_no,%s,tg_id,volt_code,mp_name,data_point_flag) values %s";
    public static final String insertSqlTemplate = "insert into %s (%s,id,data_date,org_no,%s,data_point_flag) values %s";
    public static final String U_values;
    public static final String I_values;
    public static final String P_values;


    static {
        StringJoiner U_sj = new StringJoiner(",");
        StringJoiner I_sj = new StringJoiner(",");
        StringJoiner P_sj = new StringJoiner(",");
        for (int i = 1; i <= 96; i++) {
            U_sj.add("u" + i);
            I_sj.add("i" + i);
            P_sj.add("p" + i);
        }
        U_values = U_sj.toString();
        I_values = I_sj.toString();
        P_values = P_sj.toString();
    }

    //写入pg库
    public static int writeBaseData(List<String[]> dataList, String type, Connection conn) {
        //数据清洗
        List<String[]> validDataList = dataList.stream().filter(dataArray -> {
            String id = dataArray[96];
            Triple<String, String, String> relation = pbgxMap.get(id);
            return relation != null;
        }).collect(Collectors.toList());

        if (!validDataList.isEmpty()) {
            //生成写入sql
            String insertSql = genInsertSql(validDataList, type);
            PreparedStatement ps;
            int affectRows;
            //执行写入
            try {
                ps = conn.prepareStatement(insertSql);
                affectRows = ps.executeUpdate();
                return affectRows;
            } catch (SQLException e) {
                log.error(insertSql);
                throw new RuntimeException(e);
            }
        } else {
            return 0;
        }
    }


    //生成写入pg库的insert sql
    public static String genInsertSql(List<String[]> dataList, String type) {
        StringJoiner valuesSj = new StringJoiner(",");
        String ds = null;
        String dataPointFlag = null;
        for (String[] dataArray : dataList) {
            StringJoiner valueSj = new StringJoiner(",", "(", ")");
            String meterId = dataArray[96];
            ds = dataArray[97];
            dataPointFlag = dataArray[100];
            for (int i = 0; i < dataArray.length - 1; i++) {
                //一行数据的每列数据
                String data = dataArray[i];
                //如果是空串，置为null
                if (StringUtils.isBlank(data)) {
                    data = null;
                }
                if (i < 96) {
                    //96个点数字不为null，但是包含除数字和小数点外的其他字符，置为null
                    if (data != null && !NumberUtil.isNumber(data)) {
                        data = null;
                    }
                    valueSj.add(data);
                } else {
                    //字符串类型不为null的话加个单引号
                    if (data != null) {
                        valueSj.add("'" + data + "'");
                    } else {
                        valueSj.add(null);
                    }
                }
            }

            if (type.equals("U")) {
                Triple<String, String, String> otherData = pbgxMap.get(meterId);
                valueSj.add("'" + otherData.getLeft() + "'");
                valueSj.add("'" + otherData.getMiddle() + "'");
                valueSj.add("'" + otherData.getRight() + "'");
                valueSj.add("'" + dataPointFlag + "'");
            } else {
                dataPointFlag = dataArray[dataArray.length - 1];
                valueSj.add("'" + dataPointFlag + "'");
            }


            valuesSj.add(valueSj.toString());

        }
        String ds_month = ds.substring(0, 6);
        //入pg库表名
        String pgTableName;
        //96个点字段
        String valuesColumn;
        //表独特字段，电压电流和phase_flag，功率为data_type
        String uniqueColumn;
        String finalSql;
        switch (type) {
            case "U":
                pgTableName = pg_volt_curve_pb + "_" + ds_month;
                valuesColumn = U_values;
                uniqueColumn = "phase_flag";
                finalSql = String.format(insertSqlTemplate_volt_pb, pgTableName, valuesColumn, uniqueColumn, valuesSj);
                break;
            case "I":
                pgTableName = pg_cur_curve + "_" + ds_month;
                valuesColumn = I_values;
                uniqueColumn = "phase_flag";
                finalSql = String.format(insertSqlTemplate, pgTableName, valuesColumn, uniqueColumn, valuesSj);
                break;
            case "P":
                pgTableName = pg_power_curve + "_" + ds_month;
                valuesColumn = P_values;
                uniqueColumn = "data_type";
                finalSql = String.format(insertSqlTemplate, pgTableName, valuesColumn, uniqueColumn, valuesSj);
                break;
            default:
                throw new RuntimeException("文件未知数据类型");
        }
        return finalSql;
    }

}
