package com.swsc.ai.util;

import com.swsc.ai.config.SparkSqlConf;
import com.swsc.ai.constant.EnvConstant;
import com.swsc.ai.enums.TFEnum;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.StructType;

import java.util.Collections;

/**
 * @describe: spark相关工具类
 * @author: DuanCXin
 * @created: 2023-10-23 15:25
 */
public class SparkUtil {
    /**
     * 生成不同环境的SparkSession
     *
     * @param isTest
     * @param className
     * @return
     */
    public static SparkSession getSession(String isTest, String className) {
        if (TFEnum.TRUE.equals(TFEnum.fromTypeName(isTest))) {
            SparkSqlConf.envMap.put("URL", EnvConstant.URL_DEV);
            SparkSqlConf.envMap.put("DRIVER", EnvConstant.DRIVER_DEV);
            SparkSqlConf.envMap.put("USER", EnvConstant.USER_DEV);
            SparkSqlConf.envMap.put("PASSWORD", EnvConstant.PASSWORD_DEV);
            SparkSqlConf.envMap.put("SPARK", EnvConstant.SPARK_ENV_DEV);
        } else {
            SparkSqlConf.envMap.put("URL", EnvConstant.URL_PROD);
            SparkSqlConf.envMap.put("DRIVER", EnvConstant.DRIVER_PROD);
            SparkSqlConf.envMap.put("USER", EnvConstant.USER_PROD);
            SparkSqlConf.envMap.put("PASSWORD", EnvConstant.PASSWORD_PROD);
            SparkSqlConf.envMap.put("SPARK", EnvConstant.SPARK_ENV_PROD);
        }
        return SparkSqlConf.getSession(className);
    }

    /**
     * 生成历史数据的视图
     *
     * @param session
     * @param schema
     * @param structType
     * @param inputParams
     * @return
     */
    public static Dataset<Row> generateHistoryData(SparkSession session, StructType schema, String structType, String... inputParams) {
        /**
         * 计算任务执行的前一天的日期
         */
        String taskDateBefore = DateUtil.getBeforeNDay(inputParams[1], 1);
        String projectName = inputParams[2];
        String taskName = inputParams[3];
        String subTaskName = inputParams[4];
        String isTest = inputParams[5];
        String beforeDataPath = HDFSUtil.getOutHDFSPath(projectName, taskName, subTaskName, taskDateBefore);
        if (TFEnum.TRUE.equals(TFEnum.fromTypeName(isTest))) {
            beforeDataPath = HDFSUtil.getTestOutHDFSPath(inputParams[0], subTaskName, taskDateBefore);
        }
        StringBuffer buffer = new StringBuffer(beforeDataPath);
        String beforeDataCompPath = buffer.append(EnvConstant.HISTORY_DATA_PATH).append("/*").toString();
        /**
         * 取出任务前一天的历史计算结果
         */
        Dataset<Row> historyData;
        try {
//            historyData = SparkSqlConf.getDataByCVS(session, structType, "file:///C:\\\\test.csv");
            historyData = SparkSqlConf.getDataByCVS(session, structType, beforeDataCompPath);
        } catch (Exception e) {
            System.out.println("读取历史数据异常：" + e.getMessage());
            historyData = session.createDataset(Collections.emptyList(), Encoders.row(schema));
        }
        return historyData;
    }
}
