package com.scala.conf;



import org.apache.poi.ss.usermodel.*;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;

import java.io.*;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;

/**
 * @Author: ZhangJin
 * @Date: 2020/8/25 11:45
 */
@Component
public class javaExcel {

    @Autowired
    private SparkSession sparkSession;

    /*  前端传输的文件流传输给其处理 */
    public  void readExcel(InputStream inputStream,String tableName) throws IOException {
        DecimalFormat format = new DecimalFormat();
        format.applyPattern("#");
        //创建文件(可以接收上传的文件，springmvc使用CommonsMultipartFile，jersey可以使用org.glassfish.jersey.media.multipart.FormDataParam（参照本人文件上传博客）)
        BufferedInputStream bufferedInputStream = new BufferedInputStream(inputStream);
        //定义Excel workbook引用
        Workbook workbook =null;
        //.xlsx格式的文件使用XSSFWorkbook子类，xls格式的文件使用HSSFWorkbook
        workbook = new XSSFWorkbook(bufferedInputStream);
        //获取Sheets迭代器
        Iterator<Sheet> dataTypeSheets= workbook.sheetIterator();
        while(dataTypeSheets.hasNext()){
            // 每一个sheet都是一个表，为每个sheet
            ArrayList<String> schemaList = new ArrayList<String>();
            // dataList数据集
            ArrayList<org.apache.spark.sql.Row> dataList = new ArrayList<org.apache.spark.sql.Row>();
            //字段
            List<StructField> fields = new ArrayList<StructField>();
            //获取当前sheet
            Sheet  dataTypeSheet = dataTypeSheets.next();
            //获取第一行作为字段
            Iterator<Row> iterator = dataTypeSheet.iterator();
            //没有下一个sheet跳过
            if(!iterator.hasNext()) continue;
            //获取第一行用于建立表结构
            Iterator<Cell> firstRowCellIterator = iterator.next().iterator();
            while(firstRowCellIterator.hasNext()){
                //获取第一行每一列作为字段
                Cell currentCell = firstRowCellIterator.next();
                //字符串
                if(currentCell.getCellTypeEnum() == CellType.STRING) schemaList.add(currentCell.getStringCellValue().trim());
                //数值
                if(currentCell.getCellTypeEnum() == CellType.NUMERIC)  schemaList.add((currentCell.getNumericCellValue()+"").trim());
            }
            //创建StructField(spark中的字段对象，需要提供字段名，字段类型，第三个参数true表示列可以为空)并填充List<StructField>
            for (String fieldName : schemaList) {
                StructField field = DataTypes.createStructField(fieldName, DataTypes.StringType, true);
                fields.add(field);
            }
            //根据List<StructField>创建spark表结构org.apache.spark.sql.types.StructType
            StructType schema = DataTypes.createStructType(fields);
            //字段数len
            int len = schemaList.size();
            //获取当前sheet数据行数
            int rowEnd = dataTypeSheet.getLastRowNum();
            //遍历当前sheet所有行
            for (int rowNum = 1; rowNum <= rowEnd; rowNum++) {
                //一行数据做成一个List
                ArrayList<String> rowDataList = new ArrayList<String>();
                //获取一行数据
                Row r = dataTypeSheet.getRow(rowNum);
                if(r!=null){
                    //根据字段数遍历当前行的单元格
                    for (int cn = 0; cn < len; cn++) {
                        Cell c = r.getCell(cn, Row.MissingCellPolicy.RETURN_BLANK_AS_NULL);
                        if (c == null)  rowDataList.add("0");//空值简单补零
                        if (c != null&&c.getCellTypeEnum() == CellType.STRING)  rowDataList.add(c.getStringCellValue().trim());//字符串
                        if (c != null&&c.getCellTypeEnum() == CellType.NUMERIC){
                            double value = c.getNumericCellValue();
                            rowDataList.add(value+"");    //保留小数点
                        }
                    }
                }
                //dataList数据集添加一行
                dataList.add(RowFactory.create(rowDataList.toArray()));
            }
            //根据数据和表结构创建临时表
            try {
                sparkSession.createDataFrame(dataList, schema).createOrReplaceGlobalTempView(tableName);
            }catch (Exception e){
                e.getMessage();
            }
        }
    }
}
