package com.kingsoft.dc.khaos.module.spark.preprocess.specific.quality

import java.util

import com.alibaba.fastjson.serializer.SerializerFeature
import com.alibaba.fastjson.JSON
import com.kingsoft.dc.khaos.KhaosContext
import com.kingsoft.dc.khaos.extender.meta.model.check.{DmCheck, MQCheck}
import com.kingsoft.dc.khaos.innertype.Schema
import com.kingsoft.dc.khaos.metadata.{Dependency, KhaosStructField}
import com.kingsoft.dc.khaos.module.spark.constants.SchedulerConstants
import com.kingsoft.dc.khaos.module.spark.metadata.preprocess.specific.quality._
import com.kingsoft.dc.khaos.module.spark.preprocess.transform.TransformStrategy
import com.kingsoft.dc.khaos.module.spark.util.{CheckUtils, RocketMQProducer}
import com.kingsoft.dc.khaos.util.Logging
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.functions.{col, concat_ws}
import org.apache.spark.sql.types._
import org.apache.spark.sql.{Column, DataFrame, Dataset, Row}
import org.json4s.DefaultFormats
import org.json4s.jackson.JsonMethods.{compact, parse, render}

import scala.collection.JavaConversions._
import scala.collection.mutable.{ArrayBuffer, ListBuffer}

class BusinessCheck extends TransformStrategy with Logging with Serializable {

  private var operatorConfig: BussinessCheckInfo = null
  private var checkParams: String = null
  private var businessCheckRule: util.List[DmCheck] = null
  private var dataFrame: DataFrame = null
  private var lowerDataFrame: DataFrame = null
  private var mqCheckInfo: MQCheck = null
  private var testRun = "false"
  private var subTable = false
  private var limit = 10
  private var suffix = ""
  var runEnv = "test"
  var isReport = false

  override def exec(kc: KhaosContext,
                    module_id: String,
                    config: String,
                    dependences: Seq[Dependency],
                    targets: Seq[Dependency]): Seq[(String, DataFrame)] = {
    log.info(s"business check start ......")
    log.info(s"business check config json is: ${config}")
    //先初始化runEnv在init方法中会使用
    runEnv = kc.conf.getString(SchedulerConstants.RUN_ENV)
    init(kc, config)
    val techCheck = new TechCheck
    //获取任务级别参数
    val taskFlowNum = s"${kc.conf.getString(SchedulerConstants.JOB_INSTANCE_ID)}-${kc.conf.getString(SchedulerConstants.EXEC_TRIED_TIMES)}"
    val taskId = kc.conf.getString(SchedulerConstants.JOB_ID)
    val taskName = kc.conf.getString(SchedulerConstants.JOB_NAME)
    var taskType = "DATA_INTEGRATION"
    if (testRun.equals("true")) {
      taskType = "TEST_RUN"
    }
    val businessTime = s"${techCheck.string2Date(kc.conf.getString(SchedulerConstants.BIZ_DATE))} ${kc.conf.getString(SchedulerConstants.BIZ_TIME)}"
    val taskSubmitTime = techCheck.timeStamp2Date(kc.conf.getString(SchedulerConstants.SUBMIT_TIME))
    val batchNum = kc.conf.getString(SchedulerConstants.BATCH_NO)
    var taskProjectId = kc.conf.getString(SchedulerConstants.PROJECT_ID)
    val taskProjectName = kc.conf.getString(SchedulerConstants.PROJECT_NAME)
    val dataSourceName = JSON.parseObject(checkParams).get("ds_name").toString
    val tenantId = kc.conf.getString(SchedulerConstants.TENANT_ID)

    //存储错误信息的集合，后面转换成DataFrame
    var mqRows = new util.ArrayList[Row]()
    val allEsSchema = StructType(Seq(StructField("taskFlowNum", StringType, true), StructField("ruleId", StringType, true),
      StructField("taskSubmitTime", StringType, true), StructField("tenantId", StringType, true),
      StructField("dataDetaile", StringType, true), StructField("errorDescribe", ArrayType(StringType), true)))
    val emptyRDD = kc.sparkSession.sparkContext.emptyRDD[Row]
    var emptyEsDF: DataFrame = kc.sparkSession.createDataFrame(emptyRDD, allEsSchema)
    val extract_fields: List[ColOption] = operatorConfig.extract_fields
    val db_name = operatorConfig.db_name
    val table_name = operatorConfig.table_name
    extract_fields.foreach(f => {
      //列名
      val field = f.field.toLowerCase
      //数据类型
      val data_type = f.data_type
      //长度
      val length = f.length
      val bCheck = f.option.business_check
      //检核是否开启
      val on_off = bCheck.on_off
      val rule_options = bCheck.rule_options
      //检核开启才进行验证
      if (on_off) {
        rule_options.map(ruleOption => {
          var errorNum = 0
          var errDataFrame: DataFrame = null
          var resultDescribe: String = null
          val rule_id = ruleOption.rule_id //规则id
          val rule_name = ruleOption.rule_name //规则名称
          val rule_type = ruleOption.rule_type //规则类型(检核类型)
          val dmCheck = getRuleParams(rule_id)
          if (dmCheck != null) {
            var operatorFilter: OperatorFilter = null
            if (dmCheck.getOperatorFilter != null && !dmCheck.getOperatorFilter.equals("")) {
              implicit val formats = DefaultFormats
              operatorFilter = parse(dmCheck.getOperatorFilter, true).extract[OperatorFilter]
            }
            rule_type match {
              case "ONLY" => {
                //因为试运行存在分表情况，所以现在使用json中的表名
                var str = db_name + "." + table_name
                if (dataFrame == null) {
                  var (schema, newDataFrame) = kc.structData[DataFrame](str)
                  if (testRun.equals("true")) {
                    val limitDataFrame = newDataFrame.limit(limit)
                    newDataFrame = limitDataFrame
                  }
                  lowerDataFrame = renameDF(newDataFrame)
                  dataFrame = timestamp2String(lowerDataFrame)
                }
                log.info(s"开始对${operatorConfig.db_name}库${operatorConfig.table_name}表${field}字段进行唯一性检核")
                val check = new SingleCheck
                errDataFrame = check.analysisData(dataFrame, field)
                errorNum = errDataFrame.count().toInt
                resultDescribe = "不符合唯一性检查规则"
              }
              case "NOT_EMPTY" => {
                var str = db_name + "." + table_name
                if (dataFrame == null) {
                  var (schema, newDataFrame) = kc.structData[DataFrame](str)
                  if (testRun.equals("true")) {
                    val limitDataFrame = newDataFrame.limit(limit)
                    newDataFrame = limitDataFrame
                  }
                  lowerDataFrame = renameDF(newDataFrame)
                  dataFrame = timestamp2String(lowerDataFrame)
                }
                log.info(s"开始对${operatorConfig.db_name}库${operatorConfig.table_name}表${field}字段进行非空检核")
                val check = new NotEmptyCheck
                errDataFrame = check.analysisData(dataFrame, field)
                errorNum = errDataFrame.count().toInt
                resultDescribe = "不符合非空检查规则"
              }
              case "VALUE_RANGE" => {
                var str = db_name + "." + table_name
                if (dataFrame == null) {
                  var (schema, newDataFrame) = kc.structData[DataFrame](str)
                  if (testRun.equals("true")) {
                    val limitDataFrame = newDataFrame.limit(limit)
                    newDataFrame = limitDataFrame
                  }
                  log.info(s"库名表名++++$str")
                  log.info(s"上游获取经过limit后的df++++${newDataFrame.show(false)}")
                  lowerDataFrame = renameDF(newDataFrame)
                  dataFrame = timestamp2String(lowerDataFrame)
                }
                log.info(s"开始对${operatorConfig.db_name}库${operatorConfig.table_name}表${field}字段进行取值范围检核")
                val check = new RangeValueCheck
                errDataFrame = check.analysisData(dataFrame, operatorFilter, field)
                errorNum = errDataFrame.count().toInt
                resultDescribe = "不符合取值范围检查规则"
              }
              case "VALUE_CONSTRAINT" => {
                log.info(s"开始对${operatorConfig.db_name}库${operatorConfig.table_name}表${field}字段进行取值约束检核")
                var list: ListBuffer[Row] = new ListBuffer[Row]()
                val check = new ConstraintsValueCheck
                operatorFilter.expressions.foreach(ex => {
                  val str = ex.split("-<>-")
                  val strings1 = str(0).split(" ")
                  val strings2 = str(1).split(" ")
                  var relationTable2 = strings1(0)
                  //判断是否需要从上游获取新的dataFrame
                  var tableStr = ""
                  if (dataFrame == null) {
                    tableStr = db_name + "." + table_name
                    var (schema, newDataFrame) = kc.structData[DataFrame](tableStr)
                    if (testRun.equals("true")) {
                      val limitDataFrame = newDataFrame.limit(limit)
                      newDataFrame = limitDataFrame
                    }
                    lowerDataFrame = renameDF(newDataFrame)
                    dataFrame = timestamp2String(lowerDataFrame)
                  }
                  var df1: DataFrame = null
                  //relation_table不为空说明是取值约束的附属表
                  if (operatorConfig.relation_table != None) {
                    relationTable2 = operatorConfig.relation_table.get
                  }
                  if (relationTable2.equals(tableStr)) {
                    df1 = dataFrame
                  }
                  else {
                    var (schema, df) = kc.structData[DataFrame](relationTable2)
                    if (testRun.equals("true")) {
                      val limitDataFrame = df.limit(limit)
                      df = limitDataFrame
                    }
                    val lowerDataFrame1: DataFrame = renameDF(df)
                    df1 = timestamp2String(lowerDataFrame1)
                  }
                  val r = check.analysisData(df1, dataFrame, str(0), str(1), list) //每次返回的错误行循环累加
                  list = r
                })
                //错误行从总表中过滤出来
                errDataFrame = dataFrame.flatMap(row => {
                  var errorRows: ListBuffer[Row] = new ListBuffer[Row]()
                  if (list.contains(row)) {
                    errorRows += row
                  }
                  errorRows
                })(RowEncoder(dataFrame.schema))
                errorNum = errDataFrame.count().toInt
                resultDescribe = "不符合取值约束检查规则"
              }
              case "DATA_FORMAT" => {
                var str = db_name + "." + table_name
                if (dataFrame == null) {
                  var (schema, newDataFrame) = kc.structData[DataFrame](str)
                  if (testRun.equals("true")) {
                    val limitDataFrame = newDataFrame.limit(limit)
                    newDataFrame = limitDataFrame
                  }
                  lowerDataFrame = renameDF(newDataFrame)
                  dataFrame = timestamp2String(lowerDataFrame)
                }
                log.info(s"开始对${operatorConfig.db_name}库${operatorConfig.table_name}表${field}字段进行数据格式检核")
                val check = new DataFormatCheck
                errDataFrame = check.analysisData(dataFrame, operatorFilter, field)
                errorNum = errDataFrame.count().toInt
                resultDescribe = "不符合数据格式检查规则"
              }
              case "FLUCTUATIONS_IN" => {
                log.info(s"开始对${operatorConfig.db_name}库${operatorConfig.table_name}表${field}字段进行波动检核")
              }
              case "OPERATIONAL_BALANCE" => {
                log.info(s"开始对${operatorConfig.db_name}库${operatorConfig.table_name}表${field}字段进行运算平衡检核")
              }
              case "TOTAL_SCORE" => {
                log.info(s"开始对${operatorConfig.db_name}库${operatorConfig.table_name}表${field}字段进行总分检核")
              }
            }
            //处理mq数据格式
            var isStop = "false"
            //强规则并且达到规则上限的时候阻塞
            if ("STRONG".equals(dmCheck.getRuleStrength) && errorNum >= dmCheck.getAlarmMax) {
              throw new ArithmeticException(s"强规则${dmCheck.getRuleName}错误数量达到阈值上限导致任务终止")
              isStop = "true"
            }

            // del start by lianghuaxin #10620 20210621
            //            var countNum = dataFrame.count()
            //            val mqRow: Row = Row(taskFlowNum, taskId, taskName, taskType, businessTime, taskSubmitTime, batchNum, rule_id.toString,
            //              taskProjectId, dmCheck.getDataSourceId.toString, dmCheck.getDbName, dmCheck.getTableName, dmCheck.getColumnName, errorNum.toString, countNum.toString, isStop, testRun, taskProjectName, dataSourceName)
            //            mqRows.add(mqRow)
            // del end by lianghuaxin #10620 20210621

            //将转换成字符串的timestamp转换回来
            errDataFrame = transformDfSchema(lowerDataFrame, errDataFrame)
            //需要验证，后面需要添加从接口获取的数据进行处理
            var errDataFrameLimit: DataFrame = errDataFrame.limit(500)
            val newEsDF = errDataFrameLimit.rdd.map(line => {
              val resultList = ArrayBuffer[String]()
              resultList += s"$db_name|@|$table_name|@|$taskSubmitTime|@|$field|@|${line.getAs(s"$field")}|@|$resultDescribe"
              var esRow: Row = Row(taskFlowNum, rule_id.toString, taskSubmitTime, tenantId, line.mkString("|"), resultList)
              esRow
            })

            // add start by lianghuaxin #10620 20210621
            val countNum = dataFrame.count()
            val errorNumByEs = newEsDF.count()
            val mqRow: Row = Row(taskFlowNum, taskId, taskName, taskType, businessTime, taskSubmitTime, batchNum, rule_id.toString,
              taskProjectId, dmCheck.getDataSourceId.toString, dmCheck.getDbName, dmCheck.getTableName, dmCheck.getColumnName, errorNumByEs.toString, countNum.toString, isStop, testRun, taskProjectName, dataSourceName)
            mqRows.add(mqRow)
            // add end by lianghuaxin #10620 20210621

            val frame = kc.sparkSession.createDataFrame(newEsDF, allEsSchema)
            emptyEsDF = emptyEsDF.union(frame)
          }
        }
        )
      }
    })
    //汇总mq
    if (!mqRows.isEmpty) {
      val allMqSchema = StructType(Seq(StructField("taskFlowNum", StringType, true), StructField("taskId", StringType, true), StructField("taskName", StringType, true), StructField("taskType", StringType, true),
        StructField("businessTime", StringType, true), StructField("taskSubmitTime", StringType, true), StructField("batchNum", StringType, true), StructField("ruleId", StringType, true), StructField("projectId", StringType, true),
        StructField("dataSourceId", StringType, true), StructField("dbName", StringType, true), StructField("tableName", StringType, true), StructField("columnName", StringType, true),
        StructField("errNum", StringType, true), StructField("countNum", StringType, true), StructField("isStop", StringType, true), StructField("testRun", StringType, true), StructField("projectName", StringType, true), StructField("dataSourceName", StringType, true)))
      val allMqDataFrame: DataFrame = kc.sparkSession.createDataFrame(mqRows, allMqSchema)
      //汇总到一个dataframe中
      val countSchema = StructType(Seq(StructField("count_info_mq", StringType, true)))
      val countDF = allMqDataFrame.map(line => {
        val json = s"""{"taskFlowNum":"${line.getAs("taskFlowNum")}","taskId":"${line.getAs("taskId")}","taskName":"${line.getAs("taskName")}","taskType":"${line.getAs("taskType")}","businessTime":"${line.getAs("businessTime")}","taskSubmitTime":"${line.getAs("taskSubmitTime")}","batchNum":"${line.getAs("batchNum")}","ruleId":"${line.getAs("ruleId")}","projectId":"${line.getAs("projectId")}","dataSourceId":"${line.getAs("dataSourceId")}","dbName":"${line.getAs("dbName")}","tableName":"${line.getAs("tableName")}","columnName":"${line.getAs("columnName")}","errNum":"${line.getAs("errNum")}","countNum":"${line.getAs("countNum")}","isStop":"${line.getAs("isStop")}","testRun":"${line.getAs("testRun")}","projectName":"${line.getAs("projectName")}","dataSourceName":"${line.getAs("dataSourceName")}"}""".stripMargin
        Row(json)
      })(RowEncoder(countSchema))
      log.info(s"MQ数据=====================>  ${countDF.show()}")
      // todo jx 检核上报mq逻辑
      if ((!countDF.rdd.isEmpty() && runEnv.equalsIgnoreCase("online")) || (!countDF.rdd.isEmpty() && isReport) || testRun.equals("true")) {
        //写入mq
        checkSinkMQ(kc, countDF)
      }
    }

    //汇总es
    kc.conf.set("stop_check", "false")
    // todo jx 检核上报es逻辑
    if (runEnv.equalsIgnoreCase("online") || isReport || testRun.equals("true")) {
      addResult(targets.head, emptyEsDF)
    }
    else {
      addResult(targets.head, createEmptyESDf(kc))
    }
  }

  // 初始化参数
  def init(kc: KhaosContext, config: String): Unit = {
    if (true == false) {
      val str = "21202F2938212B3E22272626252E434D"
    }
    // 单环境runEnv只有test,所以根据此标签判断业务检核是否上报,online为true(说明此job已经是上线状态)上报;
    val jobIsOnline = kc.conf.getBoolean("is_online", false)
    // 单环境single;一体化unify
    val deployEnv = kc.conf.getString("common.deploy.env", null)
    // 如果是单环境,可以根据job实在上线判断是否上报es或mq
    // 如果是一体化环境,测试态任务也可以上线周期调度(而非online状态)
    if (deployEnv == "single" && jobIsOnline) isReport = true
    logInfo(s"deployEnv=$deployEnv, jobIsOnline=$jobIsOnline")
    //获取配置信息
    implicit val formats = DefaultFormats
    operatorConfig = parse(config, true).extract[BussinessCheckInfo]
    checkParams = compact(render(operatorConfig.extender.check.params))
    val sub_table = operatorConfig.sub_table
    if (sub_table != None) {
      if (sub_table.get.on_off.equals("true")) {
        suffix = sub_table.get.suffix
        subTable = true
      }
    }
    //获取检核规则配置
    val test_run = operatorConfig.test_run
    if (test_run != None && test_run.get) {
      //试运行
      businessCheckRule = CheckUtils.getDesignTableCheckRule(kc, operatorConfig.extender.check.clazz, checkParams)
      testRun = "true"
      limit = operatorConfig.limit.get
    }
    else if (runEnv.equalsIgnoreCase("test")) {
      /**
        * 跑测试作业流
        * 先用现有规则id去获取所有生产规则，再从结果中获取到测试规则id（designId）
        * 然后再拼凑未发布的和已发布所有测试态规则id去获取测试态规则
        */
      businessCheckRule = CheckUtils.getDesignTableCheckRule(kc, operatorConfig.extender.check.clazz, checkParams)
      //businessCheckRule = queryRuleRunTestJob(kc, operatorConfig.extender.check.clazz, checkParams)
    }
    else {
      //跑生产作业流
      businessCheckRule = CheckUtils.getCheckRule(kc, operatorConfig.extender.check.clazz, checkParams)
    }

    //判断规则是否失效
    val ruleIds = JSON.parseObject(checkParams).getJSONArray("rule_ids")
    if (ruleIds.size != businessCheckRule.size) {
      log.info(s"检核规则配置已失效, 请重新配置规则!")
      //throw new Exception(s"检核规则配置已失效, 请重新配置规则!")
    }
    //获取检核上报MQ配置
    mqCheckInfo = CheckUtils.getMQCheckInfo(kc, operatorConfig.extender.check.clazz)
  }

  def createEmptyESDf(kc: KhaosContext): DataFrame = {
    val schema = StructType(Seq(StructField("taskFlowNum", StringType, true), StructField("ruleId", StringType, true),
      StructField("taskSubmitTime", StringType, true), StructField("tenantId", StringType, true),
      StructField("dataDetaile", StringType, true), StructField("errorDescribe", ArrayType(StringType), true)))

    val emptyRDD = kc.sparkSession.sparkContext.emptyRDD[Row]
    val emptyESDf = kc.sparkSession.createDataFrame(emptyRDD, schema)
    emptyESDf
  }

  def getRuleParams(ruleId: String): DmCheck = {
    for (cr <- businessCheckRule) {
      if (ruleId.toLong == cr.getId) {
        return cr
      }
    }
    null
  }

  //21202F2938212B3E22272626252E434D
  override def schema(kc: KhaosContext,
                      module_id: String,
                      config: String,
                      dependencies: Seq[Dependency]): Schema = {

    val schema = if (operatorConfig.extract_fields.isEmpty) {
      kc.schemaChannel.getSchema(dependencies.head.getSource())
    }
    else {
      val schemaList = operatorConfig.extract_fields.map { optInfo =>
        KhaosStructField(optInfo.field, optInfo.data_type)
      }
      new Schema(schemaList)
    }
    schema
  }

  /**
    * 写mq
    *
    * @param kc
    * @param data
    */
  def checkSinkMQ(kc: KhaosContext, data: DataFrame): Unit = {
    val nameServer = mqCheckInfo.getNamesrvAddr
    val topic = mqCheckInfo.getTopic
    val groupName = "diProducer"
    val tags = "dataIntegration"

    var df: DataFrame = data.repartition(1)
    df = df.select(concat_ws(",", df.columns.map(col_name => col(col_name)): _*) as "mq_sink")

    df.foreachPartition(partitionOfRecords => {
      val sinkList = new util.ArrayList[String]
      partitionOfRecords.foreach(info => {
        val mqSink = info.getAs[String]("mq_sink")
        sinkList.add(mqSink)
      })
      RocketMQProducer.syncSend(nameServer, groupName, topic, tags, sinkList)
    })
  }

  /**
    * df的字段名大写统一转小写
    *
    * @param data
    * @return
    */
  def renameDF(data: DataFrame): DataFrame = {
    val colArr = new ArrayBuffer[Column]()
    data.schema.fields.foreach(field => {
      colArr += data.col(field.name) as (field.name.toLowerCase)
    })
    data.select(colArr: _*)
  }

  /**
    * df中时间戳字段统一转字符串（使用getAs的时候会出现2019-05-08 15:24:09.0的情况造成数据无法比较）
    *
    * @param data
    * @return
    */
  def timestamp2String(data: DataFrame): DataFrame = {
    val colArr = new ArrayBuffer[Column]()
    data.schema.fields.foreach(field => {
      if (field.dataType.equals(TimestampType)) {
        colArr += data.col(field.name).cast(StringType)
      }
      else {
        colArr += data.col(field.name)
      }
    })
    data.select(colArr: _*)
  }

  /**
    * 将原始df中被转换成string字段类型的转换回timestamp
    *
    * @param targetDf
    * @param sourceDf
    * @return
    */
  def transformDfSchema(targetDf: DataFrame, sourceDf: DataFrame): DataFrame = {
    var df: DataFrame = sourceDf
    targetDf.schema.fields.foreach(targetField => {
      sourceDf.schema.fields.foreach(sourceField => {
        if (sourceField.name.equals(targetField.name) && targetField.dataType.equals(TimestampType)) {
          df = df.withColumn(sourceField.name, df.col(sourceField.name).cast(TimestampType))
        }
      })
    })
    df
  }

  /**
    * 2020.5.6修改
    * 作业流在测试态执行的时候，先根据现有id去查询一下生产态规则
    * 如果查询到了替换掉相应id，再根据这些id去查询测试态规则集合
    *
    * @param kc
    * @param className
    * @param checkParamsJson
    * @return
    */
  def queryRuleRunTestJob(kc: KhaosContext, className: String, checkParamsJson: String): util.List[DmCheck] = {
    //先查询出存在的发布态规则
    log.info(s"规则id相关参数为=======> $checkParamsJson")
    businessCheckRule = CheckUtils.getCheckRule(kc, operatorConfig.extender.check.clazz, checkParams)
    if (businessCheckRule.isEmpty) {
      CheckUtils.getDesignTableCheckRule(kc, operatorConfig.extender.check.clazz, checkParams)
    }
    else {
      var map = JSON.parseObject(checkParams, classOf[util.HashMap[Any, Any]])
      val ruleIds: util.List[Long] = map.get("rule_ids").asInstanceOf[util.List[Long]]
      log.info(s"规则id集合为=======> ${ruleIds.toString}")
      var idMap: util.HashMap[Long, Long] = new util.HashMap[Long, Long]()
      //遍历将原来的生产态id拿掉，放入测试态id
      for (a <- businessCheckRule) {
        ruleIds.remove(a.getId)
        ruleIds.add(a.getDesignId)
        //将两个id的映射放入map中，后面替换
        map + (a.getDesignId -> a.getId)
      }
      log.info(s"两个id映射为=======> ${idMap.toString}")
      //覆盖掉原来的参数中id集合
      map + ("rule_ids" -> ruleIds)
      val str: String = JSON.toJSONString(map, SerializerFeature.PrettyFormat)
      log.info(s"规则id集合为=======> ${str}")
      val checks = CheckUtils.getDesignTableCheckRule(kc, operatorConfig.extender.check.clazz, str)
      //将id替换回入参的id，后面代码中会根据前端传的这个id寻找对应rule
      for (a <- checks) {
        if (idMap.containsKey(a.getId)) {
          a.setId(idMap.get(a.getId))
        }
      }
      checks
    }
  }
}

