package avicit.bdp.dds.common.utils.parquet;


import avicit.bdp.dds.api.dto.PhmDataFormatBean;
import avicit.bdp.dds.api.dto.TableHeadBean;
import avicit.bdp.dds.api.dto.TableResultBean;
import avicit.platform6.core.exception.BusinessException;
import avicit.platform6.core.rest.msg.PageParameter;
import com.google.common.collect.Lists;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.column.ColumnDescriptor;
import org.apache.parquet.column.page.PageReadStore;
import org.apache.parquet.example.data.Group;
import org.apache.parquet.example.data.GroupFactory;
import org.apache.parquet.example.data.simple.SimpleGroupFactory;
import org.apache.parquet.example.data.simple.convert.GroupRecordConverter;
import org.apache.parquet.format.converter.ParquetMetadataConverter;
import org.apache.parquet.hadoop.ParquetFileReader;
import org.apache.parquet.hadoop.ParquetWriter;
import org.apache.parquet.hadoop.example.GroupWriteSupport;
import org.apache.parquet.hadoop.metadata.BlockMetaData;
import org.apache.parquet.hadoop.metadata.ParquetMetadata;
import org.apache.parquet.io.ColumnIOFactory;
import org.apache.parquet.io.MessageColumnIO;
import org.apache.parquet.io.RecordReader;
import org.apache.parquet.schema.GroupType;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.OriginalType;
import org.apache.parquet.schema.PrimitiveType;
import org.apache.parquet.schema.Type;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.LineNumberReader;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * TODO
 *
 * @author xugb
 * @date 2022/3/30 17:35
 */
public class ParquetUtils {
    private static final Logger logger = LoggerFactory.getLogger(ParquetUtils.class);

    private static final String TIME_DEFAULT_PATTERN = "HH:mm:ss.SSS";

    /**
     * 数据库类型转换
     */
    private static Map<String, String> typeMap = new HashMap<>();

    static {
        typeMap.put("INT64", "Long");
        typeMap.put("INT32", "Integer");
        typeMap.put("DOUBLE", "Double");
        typeMap.put("FLOAT", "Float");
        typeMap.put("BOOLEAN", "Boolean");
        typeMap.put("BINARY", "String");
    }

    public static void main(String[] args) throws Exception {
        String file1 = "hdfs://10.216.38.71:8020//user/piflow/visualization/application_1650017251786_0077/曲线图-2/data";
        String file2 = "file:///D:\\xuwork\\gitworks\\SparkDemo\\namesAndAges.parquet";
//    parquetWriter("test\\parquet-out2","input.txt");
        String file3 = "file:///d://data2022-04-20.parquet";
        String file4 = "hdfs://10.216.60.112:8020/bdp/dds/data2022-04-20.parquet";
        List<String> list = new ArrayList<>();
        list.add("TIME");
        list.add("FSECU_1_A429_Out_2_L354_Slat_Skew_FSECU1_Mon");
        list.add("FSECU_1_A429_Out_2_L354_Local_FSECU_Operational_FSECU1_Mon");
        //TableResultBean bean = readParquet(file3, 1, 10, null);
        //System.out.println(bean.getPageParameter().getTotalCount());
        // parquetReader(file3, 100000,100100);

        String source = "d://data2022-04-20.txt";
        String dist = "file:///d://data2022-04-20.parquet";
        //parquetWriter(dist, source, "\t", 1);
        File file = new File(source);
        PhmDataFormatBean format = new PhmDataFormatBean();
        format.setHeader(true);
        format.setRegex("\t");
        format.setHeaderLines(1);
        format.setReadFieldNum(0);
        format.setMarked(false);
        //parquetWriter(dist, new FileInputStream(file), format, null, null);
        String str = "'abc'";
        System.out.println(str + "/" + str.replaceAll("\'", ""));
    }


    public static TableResultBean readParquet(String filepath, Integer pageNo, Integer pageSize, List<String> fieldList) throws IOException {
        long startLine = -1;
        long endLine = -1;
        //处理翻页
        if (pageNo != null && pageSize != null) {
            if (pageNo <= 0) {
                pageNo = 1;
            }
            startLine = (pageNo - 1) * pageSize + 1;
            endLine = pageNo * pageSize;
        }

        TableResultBean table = new TableResultBean();
        long start = System.currentTimeMillis();
        //二位数据列表
        List<Map<String, Object>> dataList = new ArrayList<>();

        Configuration conf = new Configuration();

        ParquetMetadata readFooter = ParquetFileReader.readFooter(conf, new Path(filepath), ParquetMetadataConverter.NO_FILTER);
        List<BlockMetaData> list = readFooter.getBlocks();
        List<BlockMetaData> blockList = new ArrayList<>();
        long lastBlockCount = 0;
        long currCount = 0;
        long lastRowCount = 0;
        if (startLine == -1) {
            startLine = 0;
        }
        //总记录数
        long total = 0;
        for (BlockMetaData data : list) {
            total = total + data.getRowCount();
        }
        BlockMetaData lastData = null;
        for (BlockMetaData data : list) {
            currCount = currCount + data.getRowCount();
            // 全部数据
            if (startLine == 0 && endLine == -1) {
                blockList = list;
                break;
            } else if (startLine == 0 && endLine != -1) {
                blockList.add(data);
                if (currCount > endLine) {
                    break;
                }
            } else if (startLine != 0 && endLine == -1) {
                //找最后一个block的位置数
                if (currCount >= startLine) {
                    blockList.add(data);
                } else {
                    lastBlockCount = currCount;
                }
            } else if (startLine != 0 && endLine != -1) {
                //找最后一个block的位置数
                if (currCount < startLine) {
                    lastBlockCount = currCount;
                } else if (currCount > endLine) {
                    //大于截止行数，则把本block数据也加入
                    blockList.add(data);
                    break;
                } else if (currCount >= startLine) {
                    blockList.add(data);
                }

            }
        }
        //从1开始计数
        lastBlockCount++;
        logger.info("开始数据块位置-------" + lastBlockCount + "----数据块数量：" + blockList.size());
        MessageType schema = readFooter.getFileMetaData().getSchema();

        List<ColumnDescriptor> columns = null;

        MessageType readSchema = schema;
        //处理接口传入字段列表，如果为空则查询全部
        if (fieldList == null) {
            columns = schema.getColumns();
        } else {
            columns = filterFieldList(fieldList, schema.getColumns());
            List<Type> typeList = new ArrayList<>();
            for (ColumnDescriptor desc : columns) {
                typeList.add(schema.getType(desc.getPath()));
            }
            readSchema = new MessageType("message", typeList);
        }

        ParquetFileReader r = new ParquetFileReader(conf, readFooter.getFileMetaData(), new Path(filepath), blockList, columns);
        PageReadStore pages = null;

        try {
            long count = 0;
            while (null != (pages = r.readNextRowGroup())) {
                final long rows = pages.getRowCount();
                logger.info(" 行数: " + rows);

                final MessageColumnIO columnIO = new ColumnIOFactory().getColumnIO(readSchema);
                final RecordReader<Group> recordReader = columnIO.getRecordReader(pages,
                        new GroupRecordConverter(readSchema));

                for (int i = 0; i < rows; i++) {
//					System.out.println(recordReader.shouldSkipCurrentRecord());
                    final Group g = recordReader.read();
                    if (count == 0) {
                        // 设置表头列名
                        table.setColumns(parquetColumn(g));
                    }
                    if (startLine != 0 && lastBlockCount < startLine) {
                        lastBlockCount++;
                        continue;
                    } else if (endLine != -1 && lastBlockCount > endLine) {
                        break;
                    }

                    // 获取行数据
                    Map<String, Object> row = getparquetData(table.getColumns(), g);
                    //logger.info(lastBlockCount + "----" + StringUtils.join(row, ","));

                    lastBlockCount++;

                    dataList.add(row);

                    count++;
                }


            }
        } finally {
            r.close();
        }
        table.setData(dataList);
        PageParameter page = new PageParameter();
        page.setTotalCount(total);
        if (pageSize != null) {
            page.setRows(pageSize);
        }
        if (pageNo != null) {
            page.setPage(pageNo);
        }
        table.setPageParameter(page);
        logger.info(" 加载时间:" + (System.currentTimeMillis() - start));
        return table;

    }

    public static List<TableHeadBean> readParqueColumns(String filepath) throws IOException {
        long start = System.currentTimeMillis();

        Configuration conf = new Configuration();

        ParquetMetadata readFooter = ParquetFileReader.readFooter(conf, new Path(filepath), ParquetMetadataConverter.NO_FILTER);

        MessageType schema = readFooter.getFileMetaData().getSchema();

        List<ColumnDescriptor> columns = schema.getColumns();
        List<Type> typeList = new ArrayList<>();
        for (ColumnDescriptor desc : columns) {
            typeList.add(schema.getType(desc.getPath()));
        }
        int i = 0;
        List<TableHeadBean> list = new ArrayList<>();
        for (Type type : typeList) {
            TableHeadBean bean = new TableHeadBean();
            bean.setName(type.getName());
            bean.setSerial(i);
            bean.setType(typeMap.get(type.asPrimitiveType().getPrimitiveTypeName().name()));
            i++;
            list.add(bean);
        }

        logger.info(" 加载时间:" + (System.currentTimeMillis() - start));
        return list;

    }


    private static List<ColumnDescriptor> filterFieldList(List<String> fieldList, List<ColumnDescriptor> columns) {
        List<ColumnDescriptor> list = new ArrayList<>();
        for (String field : fieldList) {
            for (ColumnDescriptor desc : columns) {
                if (desc.getPath()[0].equalsIgnoreCase(field)) {
                    list.add(desc);
                    break;
                }
            }
        }
        return list;
    }

    /**
     * 解析parquet数据
     *
     * @param columns
     * @param line
     * @return java.util.List<java.lang.Object>
     */
    public static Map<String, Object> getparquetData(List<TableHeadBean> columns, Group line) {

        Object cellStr = null;
        Map<String, Object> map = new HashMap<>();
        for (int i = 0; i < columns.size(); i++) {
            try {
                switch (columns.get(i).getType()) {
                    case "DOUBLE":
                        cellStr = line.getDouble(i, 0);
                        break;
                    case "FLOAT":
                        cellStr = line.getFloat(i, 0);
                        break;
                    case "BOOLEAN":
                        cellStr = line.getBoolean(i, 0);
                        break;
                    case "INT32":
                        cellStr = line.getInteger(i, 0);
                        break;
                    case "INT64":
                        cellStr = line.getLong(i, 0);
                        break;
                    default:
                        cellStr = line.getValueToString(i, 0);
                }

            } catch (RuntimeException e) {

            } finally {

            }
            map.put(columns.get(i).getName(), cellStr);
        }
        return map;
    }

    public static List<Object> getparquetDataList(List<TableHeadBean> columns, Group line) {

        Object cellStr = null;
        List<Object> list = new ArrayList<>();
        for (int i = 0; i < columns.size(); i++) {
            try {
                switch (columns.get(i).getType()) {
                    case "DOUBLE":
                        cellStr = line.getDouble(i, 0);
                        break;
                    case "FLOAT":
                        cellStr = line.getFloat(i, 0);
                        break;
                    case "BOOLEAN":
                        cellStr = line.getBoolean(i, 0);
                        break;
                    case "INT32":
                        cellStr = line.getInteger(i, 0);
                        break;
                    case "INT64":
                        cellStr = line.getLong(i, 0);
                        break;
                    default:
                        cellStr = line.getValueToString(i, 0);
                }

            } catch (RuntimeException e) {

            }
            list.add(cellStr);
        }
        return list;
    }

    /**
     * 获取arrow 文件 表头信息
     *
     * @param
     * @return
     */
    public static List<TableHeadBean> parquetColumn(Group line) {
        List<TableHeadBean> columns = Lists.newArrayList();
        TableHeadBean dto = null;

        GroupType groupType = line.getType();

        int fieldCount = groupType.getFieldCount();
        for (int i = 0; i < fieldCount; i++) {
            dto = new TableHeadBean();
            Type type = groupType.getType(i);
            String fieldName = type.getName();
            OriginalType originalType = type.getOriginalType();
            String typeName = null;
            if (originalType != null) {
                typeName = originalType.name();
            } else {
                typeName = type.asPrimitiveType().getPrimitiveTypeName().name();
            }

            dto.setSerial(i);
            dto.setName(fieldName);
            dto.setType(typeName);
            columns.add(dto);
        }

        return columns;
    }

    /**
     * 快速读取文件行数
     *
     * @param in
     * @return int
     */
    public static int readFileLineNumber(InputStream in) {
        LineNumberReader lineNumberReader = null;
        int lines = 0;
        try {
            lineNumberReader = new LineNumberReader(new InputStreamReader(in));
            lineNumberReader.skip(Long.MAX_VALUE);
            lines = lineNumberReader.getLineNumber();

        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            if (lineNumberReader != null) {
                try {
                    lineNumberReader.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
        //这里的行数是从0开始计数的，需要加1
        return lines + 1;
    }

    private static String replaceAllSpecialChar(String str) {
        str = str.trim();
        str = str.replaceAll(" ", "_");
        str = str.replaceAll("\\[", "_");
        str = str.replaceAll("\\]", "_");
        return str;
    }

    private static void checkOtherRegex(String line, String regex) {
        String[] regex_all = new String[]{"\t", ",", " ", ";"};
        boolean checked = false;
        for (String re : regex_all) {
            if (!re.equals(regex)) {
                String[] lines = line.split(re);
                if (lines.length > 1) {
                    checked = true;
                }
            }
        }
        if (checked) {
            throw new BusinessException("文件分隔符不正确，请选择正确的数据标准格式");
        }
    }

    private static String commonDataTypeConvert(String type) {
        String result = "BINARY";
        switch (type) {
            case "Double":
                result = "DOUBLE";
                break;
            case "Float":
                result = "FLOAT";
                break;
            case "Boolean":
                result = "BOOLEAN";
                break;
            case "Integer":
                result = "INT32";
                break;
            case "Long":
                result = "INT64";
                break;
            case "String":
                result = "BINARY";
                break;
            default:
                result = "BINARY";
        }
        return result;
    }


    /**
     * 进行单双号处理
     *
     * @param data
     * @param dataFormat
     * @return java.lang.String
     */
    private static String handleMarkData(String data, PhmDataFormatBean dataFormat) {
        //是否进行单双引号处理
        int singleMark = dataFormat.getSingleMark() == null ? 0 : dataFormat.getSingleMark();
        int doubleMark = dataFormat.getDoubleMark() == null ? 0 : dataFormat.getDoubleMark();
        if (singleMark == 0) {
            data = data.replaceAll("\'", "");
        }
        if (doubleMark == 0) {
            data = data.replaceAll("\"", "");
        }

        return data;
    }

    /**
     * 保存数据到parquet
     *
     * @param group
     * @param type
     * @param data
     * @return void
     */
    private static void appendParquetData(Group group, Type type, String data) {

        try {
            switch (type.asPrimitiveType().getPrimitiveTypeName().name()) {
                case "DOUBLE":
                    group.append(type.getName(), Double.parseDouble(data));
                    break;
                case "FLOAT":
                    group.append(type.getName(), Float.parseFloat(data));
                    break;
                case "BOOLEAN":
                    group.append(type.getName(), Boolean.parseBoolean(data));
                    break;
                case "INT96":
                    group.append(type.getName(), data);
                    break;
                case "INT64":
                    group.append(type.getName(), Long.parseLong(data));
                    break;
                case "INT32":
                    group.append(type.getName(), Integer.parseInt(data));
                    break;
                default:
                    group.append(type.getName(), data);
            }

        } catch (Exception e) {
            group.append(type.getName(), data);
        }
    }

    /**
     * 转换日期成phm时间格式
     *
     * @param data
     * @param df_source
     * @param df_target
     * @return java.lang.String
     */
    private static String convertPhmTime(String data, SimpleDateFormat df_source, SimpleDateFormat df_target) {
        String date = null;
        try {
            Date d = df_source.parse(data);
            date = df_target.format(d);

        } catch (Exception e) {
            e.printStackTrace();
        }
        return date;
    }

    /**
     * 是否是时间格式
     *
     * @param data
     * @param df
     * @return boolean
     */
    private static boolean isPhmTime(String data, SimpleDateFormat df) {
        Date date = null;
        try {
            date = df.parse(data);
            return true;
        } catch (Exception e) {
            //e.printStackTrace();
        }
        return false;
    }

    public static void parquetWriter(String outPath, String inPath, String regex, int headLines) throws IOException {
//        MessageType schema = MessageTypeParser.parseMessageType("message Pair {\n" +
//                " required binary city (UTF8);\n" +
//                " required binary ip (UTF8);\n" +
//                " repeated group time {\n" +
//                " required int32 ttl;\n" +
//                " required binary ttl2;\n" +
//                "}\n" +
//                "}");
        long start = System.currentTimeMillis();

        int count = 1;
        ParquetWriter<Group> writer = null;
        BufferedReader br = null;
        try {
            br = new BufferedReader(new FileReader(new File(inPath)));
            String line = br.readLine();
            //读取到head行
            for (int i = 1; i < headLines; i++) {
                line = br.readLine();
            }
            //设置schema
            List<Type> list = new ArrayList<>();
            String[] schemas = line.split(regex);
            list.add(new PrimitiveType(Type.Repetition.REQUIRED, PrimitiveType.PrimitiveTypeName.INT32, "POINT"));
            for (String str : schemas) {
                if (str.toLowerCase().equals("time")) {
                    list.add(new PrimitiveType(Type.Repetition.REQUIRED, PrimitiveType.PrimitiveTypeName.BINARY, "TIME"));
                } else {
                    list.add(new PrimitiveType(Type.Repetition.REQUIRED, PrimitiveType.PrimitiveTypeName.DOUBLE, str));
                }

            }
            MessageType schema = new MessageType("message", list);

            GroupFactory factory = new SimpleGroupFactory(schema);

            Path path = new Path(outPath);
            Configuration configuration = new Configuration();
            GroupWriteSupport writeSupport = new GroupWriteSupport();
            writeSupport.setSchema(schema, configuration);
            writer = new ParquetWriter<Group>(path, configuration, writeSupport);

            //把本地文件读取进去，用来生成parquet格式文件
            while ((line = br.readLine()) != null) {
                Group group = factory.newGroup();
                group.append("POINT", count);
                String[] strs = line.split(regex);
                for (int i = 0; i < strs.length; i++) {
                    if (schemas[i].toLowerCase().equals("time")) {
                        group.append("TIME", strs[i]);
                    } else {
                        group.append(schemas[i], Double.parseDouble(strs[i]));
                    }

                }
                writer.write(group);
                count++;
            }
        } finally {
            if (writer != null) {
                writer.close();
            }
            if (br != null) {
                br.close();
            }

        }

        long end = System.currentTimeMillis();


        System.out.println("写入完成，总共" + count + "行,写入" + (end - start) / 1000 + "秒");
    }

    /**
     * 根据文件路径查询Schema
     *
     * @param path
     * @return
     */
    public static List<String> getSchema(String path) {
        try {
            ParquetMetadata readFooter = ParquetFileReader.readFooter(new Configuration(), new Path(path), ParquetMetadataConverter.NO_FILTER);
            if (readFooter == null) {
                throw new BusinessException("查询Parquet格式文件schema失败,path=" + path);
            }

            MessageType schema = readFooter.getFileMetaData().getSchema();
            if (schema == null) {
                throw new BusinessException("查询Parquet格式文件schema失败,path=" + path);
            }

            List<String> columns = new ArrayList<>();
            List<Type> fields = schema.getFields();
            for (Type type : fields) {
                columns.add(type.getName());
            }

            return columns;
        } catch (Exception e) {
            throw new BusinessException(String.format("查询Parquet格式文件schema失败,path=%s,errMsg=%s", path, e.getMessage()));
        }

    }


    /**
     * 读取一列数据不分页
     *
     * @param	filepath
     * @param	columnName
     * @return avicit.bdp.phm.entity.TableResultBean
     *
     * @author felix
     * @date 2022/7/12 9:15
     */
    public static List readParquetByColumn(String filepath, String columnName) throws IOException {

        List dataList = new ArrayList<>();

        Configuration conf = new Configuration();

        ParquetMetadata readFooter = ParquetFileReader.readFooter(conf, new Path(filepath), ParquetMetadataConverter.NO_FILTER);
        List<BlockMetaData> list = readFooter.getBlocks();
        MessageType schema = readFooter.getFileMetaData().getSchema();

        List<ColumnDescriptor> columns = null;

        //处理接口传入字段列表，如果为空则查询全部
        List<String> fieldList = new ArrayList<>();
        fieldList.add(columnName);
        columns = filterFieldList(fieldList, schema.getColumns());
        List<Type> typeList = new ArrayList<>();
        for (ColumnDescriptor desc : columns) {
            typeList.add(schema.getType(desc.getPath()));
        }
        MessageType readSchema = new MessageType("message", typeList);


        ParquetFileReader r = new ParquetFileReader(conf, readFooter.getFileMetaData(), new Path(filepath), list, columns);

        PageReadStore pages = null;

        try {
            while (null != (pages = r.readNextRowGroup())) {
                final long rows = pages.getRowCount();
                logger.info(" 行数: " + rows);

                final MessageColumnIO columnIO = new ColumnIOFactory().getColumnIO(readSchema);
                final RecordReader<Group> recordReader = columnIO.getRecordReader(pages,
                        new GroupRecordConverter(readSchema));
                for (int i = 0; i < rows; i++) {
                    final Group g = recordReader.read();
                    List<TableHeadBean> headList = ParquetUtils.parquetColumn(g);
                    //数据类型
                    String type = headList.get(0).getType();
                    // 获取行数据
                    List row = getparquetDataList(headList, g);
                    if("BINARY".equals(type.toUpperCase())){
                        try {
                            double value = Double.parseDouble(row.get(0).toString());
                            dataList.add(value);
                        }catch (Exception e){
                            return new ArrayList();
                        }
                    }else {
                        dataList.add(row.get(0));
                    }
                }
            }
        } finally {
            r.close();
        }

        return dataList;

    }
}
