package com.kingsoft.dc.khaos.module.spark.preprocess.specific.quality


import com.alibaba.fastjson.JSON
import com.kingsoft.dc.khaos.KhaosContext
import com.kingsoft.dc.khaos.innertype.Schema
import com.kingsoft.dc.khaos.metadata.{Dependency, KhaosStructField}
import com.kingsoft.dc.khaos.module.spark.constants.SchedulerConstants
import com.kingsoft.dc.khaos.module.spark.metadata.source.{HdfsCheckConfig, NewTechCheckConfig}
import com.kingsoft.dc.khaos.module.spark.model.TechCheck._
import com.kingsoft.dc.khaos.module.spark.preprocess.transform.TransformStrategy
import com.kingsoft.dc.khaos.module.spark.util.TechCheckUtils
import com.kingsoft.dc.khaos.util.Logging
import org.apache.commons.lang3.StringUtils
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.functions.{col, udf}
import org.apache.spark.sql.{DataFrame, SaveMode}
import org.json4s.DefaultFormats
import org.json4s.jackson.JsonMethods.parse

class NewTechCheck extends TransformStrategy  with Logging with Serializable {

  private var newTechCheckConfig: NewTechCheckConfig = null
  private var newTechCheckInfo: NewTechCheckInfo = null
  private var hdfsCheckConfig:HdfsCheckConfig = null

  override def exec(kc: KhaosContext,
                    module_id: String,
                    config: String,
                    dependences: Seq[Dependency],
                    targets:Seq[Dependency]): Seq[(String,DataFrame)] = {

    init(kc, config)
    //获取上游信息
    val (schema, dataFrame) = kc.structData[DataFrame](dependences.head.getEdge())
    val checkedDF = dataFrameChecked(dataFrame, newTechCheckInfo)
    checkedDF.show(100)
    var udf_getNormal =  udf { (row: String,checked:Seq[String]) => getNormal(row,checked) }

    val normalDF = checkedDF.select(udf_getNormal(col("row"),col("checked")) as "row") .filter("checked is null or checked[0]='warn'  ")
//    normalDF.show(100)
    val rejectDF = checkedDF.select("row").filter("checked is not null and checked[0]='reject' ")
//    rejectDF.show(100)
    //todo czc 确认日志格式
    val logDF = checkedDF.select("row","checked").filter("checked is not null")

    //保存错误数据 转换编码格式
    val rejectfilePath = TechCheckUtils.getHdfsPath(kc.conf.getString(SchedulerConstants.CLUSTER_NAMESPACE), kc.conf.getString(SchedulerConstants.PROXY_USER),hdfsCheckConfig.output_path,hdfsCheckConfig.input_dat_name +".rj")
    TechCheckUtils.saveAsFileAbsPath(rejectDF,rejectfilePath ,newTechCheckInfo.getSpliter,SaveMode.Overwrite)
    //判断错误记录数是否超过
    val sumCount =  checkedDF.count()
    val rejectCount = rejectDF.count()
    if( sumCount == 0 || rejectCount.toDouble/sumCount > newTechCheckConfig.reject_percent/100 ){
      val log = KsyunErrorCode.REJECT_ERROR.toString+"！最大错误记录比例："+ newTechCheckConfig.reject_percent + "！实际错误比例："+rejectCount+"/" + sumCount;
      newTechCheckInfo.addLog(log);
//
    }
    //保存日志
    val logfilePath = TechCheckUtils.getHdfsPath(kc.conf.getString(SchedulerConstants.CLUSTER_NAMESPACE), kc.conf.getString(SchedulerConstants.PROXY_USER),hdfsCheckConfig.output_path,hdfsCheckConfig.input_dat_name +".log")
    TechCheckUtils.saveLogs(logDF,newTechCheckInfo.getLogList,logfilePath ,newTechCheckInfo.getSpliter,kc.sparkSession,SaveMode.Overwrite,newTechCheckConfig.dest_encoding) //saveAsFileAbsPath(logDF,logfilePath ,newTechCheckInfo.getSpliter,SaveMode.Overwrite)
    //错误记录数超过最大值，抛出异常
    if( sumCount == 0 || rejectCount.toDouble/sumCount > newTechCheckConfig.reject_percent/100 ) {
      throw new KsyunTechCheckException(KsyunErrorCode.REJECT_ERROR, KsyunErrorCode.REJECT_ERROR.name + "最大错误记录比例：" + newTechCheckConfig.reject_percent + "%！实际错误比例：" + rejectCount + "/" + sumCount);
    }
    addResult(targets.filter(_.targetSeq.equals("0")).head, normalDF)
  }

  override def schema(kc: KhaosContext,
                      module_id: String,
                      config: String,
                      dependencies: Seq[Dependency]): Schema = {
    val fieldList = List(KhaosStructField("row", "String"))
    new Schema(fieldList)
  }
  /** 初始化参数 */
  def init(kc: KhaosContext, config: String): Unit = {
    //获取配置信息
    implicit val formats = DefaultFormats
    hdfsCheckConfig = TechCheckUtils.getHdfsCheckConfigByKC(kc)
    newTechCheckConfig = parse(config, true).extract[NewTechCheckConfig]
    newTechCheckInfo = JSON.parseObject(kc.conf.getString("techCheckInfo"), classOf[NewTechCheckInfo] )
    if(StringUtils.isNotEmpty(newTechCheckConfig.spliter_code))
      newTechCheckInfo.setSpliter(newTechCheckConfig.spliter_code)
  }
  //进行技术检核
  def dataFrameChecked(data: DataFrame, newTechCheckInfo: NewTechCheckInfo): DataFrame = {
    val df: DataFrame = data
    val schemaFields = df.schema.fields
    val encoder = RowEncoder(df.schema)

    val rowCheck = udf { (row: String) => check(row,newTechCheckInfo) }
    val checkedDF: DataFrame = df.withColumn("checked",rowCheck(col("row")))
    checkedDF
  }

  // 封装udf 检核后数据处理，如果检核结果是警告，且有默认值替换，将数据处理成替换后的结果
  def getNormal(row: String,checked:Seq[String]): String ={
    if(checked !=null && checked(3) != null )
      return ""
    else
      return row;
  }

  //技术检核核心流程，封装为udf函数
  def check(row:String,newTechCheckInfo:NewTechCheckInfo): Array[String] ={

    var field:Field = null
    //checkMsg 结构： 1.错误级别ErrorCode.lever ， 2.错误代码ErrorCode.code 3.错误代码错误信息描述， 4.替换值(默认值替换，为空说明不用替换)
    var checkMsg:Array[String] = null
    try{
      // | 是特殊正则字符需加上转义字符
      val cols = row.split(newTechCheckInfo.getSpliter.replaceAll("\\|","\\\\|"))
      //字段个数检核
      if(newTechCheckInfo.getFields.size != cols.size)
        throw new KsyunTechCheckException(KsyunErrorCode.COLUMN_CHECK,"预期字段数是"+newTechCheckInfo.getFields.size + "实际是："+cols.size + "("+ row +")"  )
      //主键非空检核
      val fields = newTechCheckInfo.getFields
      //Tuple4 说明 ：1原始值 2替换值 3触发的检核规则 4告警信息
      var tmpret:Tuple4[String, String, KsyunErrorCode,String] = null
      KsyunCheckRule.keyCheck(newTechCheckInfo.getKeys,cols)
      for(i <- 0 until fields.size()){
        field = fields.get(i)
        tmpret = field.check(cols(i))
        if(tmpret != null){//有告警信息
          if(checkMsg == null){
            checkMsg = Array(tmpret.three.lever,tmpret.three.code,tmpret.four,null)
          }else{
            //告警信息叠加
            checkMsg(1) = checkMsg(1) +  tmpret.three.code// Array(tmpret.three.lever,checkMsg(2) + ,null)
            checkMsg(2) = checkMsg(2) + tmpret.four
          }
          //默认值替换，日期 换行符 等
          if(tmpret.second != null){
            cols(i) = tmpret.second
            checkMsg(3) = cols.mkString(newTechCheckInfo.getSpliter)
          }
        }
      }
    }catch{
      //错误记录累加器+1  暂时不用累加，直接统计errorDF的记录数
//      kc.addAbnormalAccumulator
      case ex: KsyunTechCheckException =>{
//        checkMsg = ex.getMessage
          checkMsg = Array(ex.getErrorCode.lever, ex.getErrorCode.code,ex.getMessage(),null)
      }
      case e: Exception =>{
        //未知异常 ，打印出全部日志堆栈信息，提供排查信息
        checkMsg = Array(KsyunErrorCode.DEFAULT_ERROR.lever ,KsyunErrorCode.DEFAULT_ERROR.code ,KsyunTechCheckException.getExceptionToString(e),null)
      }
        log.warn(checkMsg.toString);
    }
    //todo czc 需求确认，日志格式
//    if(checkMsg != null)
//      print("-------------------------------------------------------------")
//      print(JSON.toJSONString(checkMsg,SerializerFeature.WriteMapNullValue))
    checkMsg
  }
}