package com.ctbri.manage.quality.single;
import com.ctbri.manage.quality.original.CheckBasic;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.api.java.UDF1;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.sql.types.*;
import org.apache.spark.sql.*;
import lombok.Getter;
import lombok.Setter;
import com.ctbri.manage.bydeequ.calculate.ForJavaCalcResultGeneration;

import java.math.BigDecimal;


public class CheckColumnStringLength extends CheckBasic {
    @Getter @Setter protected int len;
    /*
    excel,csv,txt,json
    **/
    public CheckColumnStringLength(String appName, String masterSet, String logLevel, String dataExportPath,
                            String dataSavePath, String dataSaveStyle, int len){
        super(appName, masterSet, logLevel, dataExportPath, dataSavePath, dataSaveStyle);
        this.len = len;
    }
    /*
    jdbc
    **/
    public CheckColumnStringLength(String appName, String masterSet, String logLevel, String url, String driver, String user,
                                   String password, String dbtable, int len, String dataSavePath, String dataSaveStyle){
        super(appName, masterSet, logLevel, url, driver, user, password, dbtable, dataSavePath, dataSaveStyle);
        this.len = len;
    }
    /*
    hive
    **/
    public CheckColumnStringLength(String appName, String masterSet, String logLevel, String databaseName,
                                   String tableName, int len, String dataSavePath, String dataSaveStyle){
        super(appName, masterSet, logLevel, databaseName, tableName, dataSavePath, dataSaveStyle);
        this.len = len;
    }

    public void call(StructType structType, final String dataSource, final String needCheckColName, final Boolean meetRequirement) throws Exception{
        /*
        @dataSource: csv, txt, excel, json, jdbc, hive
        */
        SparkSession spark = this.envSet(this.appname, this.masterSet, this.logLevel);;
        Dataset<Row> dataframe = this.getDataFromJdbc(spark, this.url, this.driver, this.user, this.password, this.dbtable);;
        System.out.println("dataSource: " + dataSource);
        try{
            if (dataSource.equals("excel")){
                spark = this.envSet(this.appname, this.masterSet, this.logLevel);
                dataframe = structType != null?
                        this.getDataFromExcel(this.dataExportPath, spark, structType): this.getDataFromExcel(this.dataExportPath, spark);
            }
            else if (dataSource.equals("jdbc")){
                spark = this.envSet(this.appname, this.masterSet, this.logLevel);
                dataframe = this.getDataFromJdbc(spark, this.url, this.driver, this.user, this.password, this.dbtable);
            }
            else if (dataSource.equals("hive")){
                spark = this.envSetHive(this.appname, this.masterSet, this.logLevel);
                dataframe = this.getDataFromHive(spark, this.databaseName, this.tableName);
            }
            //TODO: 实现可以选择从哪个数据库读入的功能(csv,txt,json)
            else{
                throw new Exception();
            }
        } catch (Exception e){
            e.printStackTrace();
        } finally {
            if(null !=spark && null !=dataframe) {
                //数据分析
                //todo: 实现检查长度满足大于，大于等于，小于，小于等于的情况
                spark.udf().register("checklength", new CLUDF(this.len), DataTypes.BooleanType);
                Dataset<Row> result = null;
                result = dataframe.filter(
                        functions.callUDF("checklength", functions.col(needCheckColName)).equalTo(true));
                long satisfiedNum = result.count();
                if (!meetRequirement) {
                    result = dataframe.filter(
                            functions.callUDF("checklength", functions.col(needCheckColName)).equalTo(false));
                }
                result.show(false);
                System.out.println("satisfied number: " + satisfiedNum);
                //结果输出
                String message = "checkColumnStringLength(" + needCheckColName + ")";
                long dataframeCount = dataframe.count();
                if (satisfiedNum == dataframeCount) {
                    message += " successed: Value meet the constraint requirement";
                    ForJavaCalcResultGeneration.resultGenerationForCustomFunc("Column", spark, "1.0", needCheckColName, "x=1.0", message, this.dataSavePath);
                } else {
                    double rate = new BigDecimal(satisfiedNum).doubleValue() / dataframeCount;
                    message += " failed: Value: " + String.valueOf(rate) + "  does not meet the constraint requirement!";
                    ForJavaCalcResultGeneration.resultGenerationForCustomFunc("Column", spark, String.valueOf(rate), needCheckColName, "x=1.0", message, this.dataSavePath);
                }
                this.resultSave(result, this.dataSavePath + "/dataList", this.dataSaveStyle);
                spark.stop();
            }
        }
    }
}

class CLUDF implements UDF1<String, Boolean> {
    private Integer len;
    public CLUDF(Integer len){
        this.len = len;
    }
    @Override
    public Boolean call(String col) throws Exception{
        return col.length() == this.len;
    }
}