package com.ctbri.manage.quality;
import com.ctbri.manage.bydeequ.example.TestExampleWithRealData;
import com.ctbri.manage.quality.config.YamlReader;
import com.ctbri.manage.quality.original.*;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.yaml.snakeyaml.Yaml;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;

public class DeequPerformanceTest {
    public static void main(String[] args) throws FileNotFoundException {
        long startTime=System.currentTimeMillis();   //获取开始时间
        //导入主配置文件信息
        final String filePathOfMainConfig = args[0];
        Map<String, Object> confMain = YamlReader.getInstance(filePathOfMainConfig).getConf();
        System.out.println("file path of main config:");
        System.out.println(confMain);
        final String checkName = YamlReader.getInstance(filePathOfMainConfig).getString("checkName");
        final String configPath = YamlReader.getInstance(filePathOfMainConfig).getString("configPath");
        final String dataSource1 = YamlReader.getInstance(filePathOfMainConfig).getString("dataSource");

        System.out.println("checkName: " + checkName);
        System.out.println("configPath: " + configPath);
        System.out.println("dataSource1: " + dataSource1);
       // YamlReader.getInstance(configPath).setConf((Map<String, Object>) new Yaml().load(new FileInputStream(new File(configPath))));
        //导入yaml配置信息
        Map<String, Object> conf = YamlReader.getInstance(configPath).getConf();
        System.out.println("file path of sub config:");
        System.out.println(conf);
        final String appName = YamlReader.getInstance(configPath).getString("appName");
        final String masterSet = YamlReader.getInstance(configPath).getString("masterSet");
        final String logLevel = YamlReader.getInstance(configPath).getString("logLevel");
        final String dataSavePath = YamlReader.getInstance(configPath).getString("dataSavePath");
        final String dataSaveStyle = YamlReader.getInstance(configPath).getString("dataSaveStyle");
        final String dataSource = YamlReader.getInstance(configPath).getString("dataSource");
        if (dataSource1.equals("excel")) {
            final String dataExportPath = YamlReader.getInstance(configPath).getString("dataExportPath");
            final Boolean needStructType = YamlReader.getInstance(configPath).getBoolean("needStructType", false);
            final List<String> colName = YamlReader.getInstance(configPath).getList("colName");
            DeequTest deequTest = new DeequTest(appName, masterSet, logLevel, dataExportPath, dataSavePath, dataSaveStyle);
            StructType structType = null;
            if (needStructType){
                List<StructField> structFields = new ArrayList<>();
                for (String i: colName) structFields.add(DataTypes.createStructField(i, DataTypes.StringType, true));
                structType = DataTypes.createStructType(structFields);
            }
            deequTest.call(structType, dataSource);
        }
        else if (dataSource1.equals("hive")) {
            final String databaseName = YamlReader.getInstance(configPath).getString("databaseName");
            final String tableName = YamlReader.getInstance(configPath).getString("tableName");
            DeequTest deequTest = new DeequTest(appName, masterSet, logLevel, databaseName, tableName, dataSavePath, dataSource);
            deequTest.call(null, dataSource);
        }
        long endTime=System.currentTimeMillis(); //获取结束时间
        System.out.println("程序运行时间: " + (endTime-startTime)/1000. + "s");
    }
}

/**
 * Deequ性能检测
 */
class DeequTest extends CheckBasic{
    /**
     * 从excel中导入数据
     * @param appName
     * @param masterSet
     * @param logLevel
     * @param dataExportPath
     * @param dataSavePath
     * @param dataSaveStyle
     */
    public DeequTest(String appName, String masterSet, String logLevel, String dataExportPath,
              String dataSavePath, String dataSaveStyle){
        super(appName, masterSet, logLevel, dataExportPath, dataSavePath, dataSaveStyle);
    }

    /**
     * 从hive中读入数据
     * @param appName
     * @param masterSet
     * @param logLevel
     * @param databaseName
     * @param tableName
     * @param dataSavePath
     * @param dataSaveStyle
     */
    public DeequTest(String appName, String masterSet, String logLevel, String databaseName, String tableName,
                     String dataSavePath, String dataSaveStyle){
        super(appName, masterSet, logLevel, databaseName, tableName, dataSavePath, dataSaveStyle);
    }
    public void call(StructType structType, final String dataSource){
        SparkSession spark = null;
        Dataset<Row> dataframe = null;
        System.out.println("dataSource: " + dataSource);
        try{
            if (dataSource.equals("excel")){
                spark = this.envSet(this.appname, this.masterSet, this.logLevel);
                dataframe = structType != null?
                        this.getDataFromExcel(this.dataExportPath, spark, structType): this.getDataFromExcel(this.dataExportPath, spark);
            }
//            else if (dataSource.equals("jdbc")){
//                spark = this.envSet(this.appname, this.masterSet, this.logLevel);
//                dataframe = this.getDataFromJdbc(spark, this.url, this.driver, this.user, this.password, this.dbtable);
//            }
            else if (dataSource.equals("hive")){
                spark = this.envSetHive(this.appname, this.masterSet, this.logLevel);
                dataframe = this.getDataFromHive(spark, this.databaseName, this.tableName);
            }
            //TODO: 实现可以选择从哪个数据库读入的功能(csv,txt,json)
            else{
                throw new Exception();
            }
        } catch (Exception e){
            e.printStackTrace();
        } finally {
            TestExampleWithRealData.calc(spark, dataframe, true, 20, null, null);
        }
    }

}
