package com.bleeth.flow.step.input;

import cn.hutool.core.convert.Convert;
import cn.hutool.core.util.ArrayUtil;
import cn.hutool.poi.excel.sax.Excel03SaxReader;
import cn.hutool.poi.excel.sax.Excel07SaxReader;
import com.bleeth.flow.core.common.APlugin;
import com.bleeth.flow.core.common.PluginAnnotation;
import com.bleeth.flow.core.common.PluginTypeEnum;
import com.bleeth.flow.core.util.SparkUtil;
import com.jd.platform.async.wrapper.WorkerWrapper;
import lombok.Data;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructType;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

/**
 * @author ：Bleeth
 * @date ：2021-08-05
 * @description：Excel读取插件
 */
@Data
@PluginAnnotation(name = "自定义聚合",
        type = PluginTypeEnum.AGG,
        description = "",
        id = "ExcelReaderPlugin")
public class ExcelReaderPlugin extends APlugin implements Serializable {

    private static final long serialVersionUID = 9151516393920337644L;

    private static final String DEFAULT_ENCODE = "UTF-8";
    private static final char DEFAULT_SPLIT = ',';
    private static final char DEFAULT_DELIMITER = '"';

    private String path;

    private String sheetName;


    private List<Object> headerList;


    @Override
    public Dataset<Row> action(Dataset<Row> input, Map<String, WorkerWrapper> allWrappers) {
        super.action(input, allWrappers);

        List<Row> rowLineList = new ArrayList<>();

        if (path.endsWith(".xlsx")) {
            Excel07SaxReader reader = new Excel07SaxReader((sheetIndex, rowIndex, rowList) -> {
                if (rowIndex == 0) {
                    headerList = rowList;
                    return;
                }
                List<String> strRowList = rowList.stream().map(key -> {
                    return Convert.toStr(key);
                }).collect(Collectors.toList());
                Row row = RowFactory.create(ArrayUtil.toArray(strRowList, String.class));
                rowLineList.add(row);
            });
            reader.read(path, sheetName);
        } else {
            Excel03SaxReader reader = new Excel03SaxReader((sheetIndex, rowIndex, rowList) -> {
                if (rowIndex == 0) {
                    headerList = rowList;
                    return;
                }
                List<String> strRowList = rowList.stream().map(key -> {
                    return Convert.toStr(key);
                }).collect(Collectors.toList());
                Row row = RowFactory.create(ArrayUtil.toArray(strRowList, String.class));
                rowLineList.add(row);
            });
            reader.read(path, sheetName);
        }


        //第一行作为表头
        List schemaFields = new ArrayList();
        for (Object headerObj : headerList) {
            String header = (String) headerObj;
            schemaFields.add(DataTypes.createStructField(header, DataTypes.StringType, true));
        }
        StructType schema = DataTypes.createStructType(schemaFields);

        Dataset<Row> ds = SparkUtil.getSparkInstance().createDataFrame(rowLineList, schema);
        return ds;
    }
}
