package generator

import generator.entity.Row
import generator.util.{ GenerateFromHSSFExcel, captureName, dataBaseConnect}
import org.apache.poi.hssf.usermodel.HSSFSheet
import org.apache.poi.ss.usermodel.CellType

import java.io.{File, PrintWriter}
import scala.collection.mutable.ListBuffer

object Generator {
//  val FileName: String = "C:\\Users\\16624\\Documents\\WeChat Files\\wxid_u2jm0ieu550n22\\FileStorage\\File\\2023-09\\1.0与2.0表关系0813(1).xls"
  val FileName: String = "datas/1.0与2.0表关系.xls"
  val sinkFuncFile = "datas/StreamSinkFunc.java"
  val mainFile = "datas/StreamMain.java"

  def main(args: Array[String]): Unit = {
    val generator = GenerateFromHSSFExcel[List[Row]](FileName, 1, getDataSource)
//    generator.generate(generateTablesSql)
//        generator.generate(generateTransformSql)
//    generator.generate(generateMergeSql)
//    generator.generate(generateStreamSql)
    generator.generate(generateStreamMain)
//    dataBaseConnect.findDataType(new Row(null, "G_SUBS", "POWEROFF_FLAG", null, null, null, null, null, null, null))

  }

  def getDataSource(sheet: HSSFSheet): List[Row] = {
    val lastNum = sheet.getLastRowNum
    val list = ListBuffer[Row]()
    for (a <- 1 to lastNum) {
      val row = sheet.getRow(a)
      val field_type_detail_cell = row.getCell(14)
      field_type_detail_cell.setCellType(CellType.STRING)
      var field_type_detail = field_type_detail_cell.getStringCellValue
      var field_type = row.getCell(15).getStringCellValue
      val cj1_tableName = row.getCell(5).getStringCellValue
      val cj1_field = row.getCell(6).getStringCellValue
      val info = row.getCell(3).getStringCellValue
      val comment = row.getCell(7).getStringCellValue
      val commentRowCell = row.getCell(17)
      var commentRow = ""
      if (commentRowCell != null) commentRow = commentRowCell.getStringCellValue
      val commentTable = row.getCell(2).getStringCellValue

      val back_field_type_cell = row.getCell(13)
      var back_field_type_detail_cell = row.getCell(12)

      var back_field_type = ""
      if (back_field_type_cell != null) back_field_type = back_field_type_cell.getStringCellValue

      var back_field_type_detail = ""
      if (back_field_type_detail_cell != null && back_field_type_detail_cell.getCellType.name == "ERROR") {
        back_field_type_detail = back_field_type_detail_cell.toString
        back_field_type_detail_cell = null
      }
      if (back_field_type_detail_cell != null) back_field_type_detail = back_field_type_detail_cell.toString

      try {
        back_field_type_detail = back_field_type_detail.toDouble.toInt.toString
      } catch {
        case _ => back_field_type_detail = ""
      }

      if (!isValid(field_type)) field_type = back_field_type
      if (!isValid(field_type_detail) && (field_type != "DATE" || field_type != "date")) field_type_detail = back_field_type_detail
      //      if (isValid(field_type))
      val cj2_field = row.getCell(11).getStringCellValue
      val cj2_tableName = row.getCell(9).getStringCellValue
      list += Row(
        info,
        cj1_tableName.toUpperCase,
        cj1_field.toUpperCase,
        field_type,
        field_type_detail,
        toSafeString(comment),
        toSafeString(commentTable),
        toSafeString(commentRow),
        cj2_tableName.toUpperCase,
        cj2_field,
        isValidDev(toSafeString(commentRow)),
        captureName.captureName(cj1_field,true),
        captureName.captureName(cj2_field,true)
      )
    }
    list.toList
  }

  def makeFile(path:String,data:String):Unit={
    val file = new File(path)
    val writer = new PrintWriter(file)
    writer.write(data)
    writer.close()
  }


  def generateTablesSql(list: List[Row]): Unit = {
    list.groupBy(_.cj1_tableName).foreach {
      case (tableName, unsortlist) => {
        var flag = false
        val list = unsortlist.sortBy(row => isValid(row))
        var primaryKey: String = null
        val str = new StringBuilder()
        val commentStr = new StringBuilder(s"comment on table $tableName is\n '${toSafeString(list(0).commentTable)}';\n")
        str.append(s"drop table $tableName cascade constraints;\n")
        str.append("create table " + tableName + "( \n")
        for (i <- 0 until list.length) {
          val row = list(i)
          if (!isValid(row)) str.append("-- ")
          else flag = true
          str.append(s" ${row.cj1_field} ${getValidField(row.field_type,row.field_type_detail)}")
//          if (isValid(row.field_type_detail)) str.append(s"(${row.field_type_detail})")
          if (row.comment == "主键") {
            primaryKey = row.cj1_field
            str.append(" not null")
          }
          if (isValid(primaryKey) || i != list.length - 1) str.append(",")
          if (!isValid(row) || isValid(row.commentRow) ) str.append("-- " + toSafeString(row.commentRow))
          str.append("\n")


          if (!isValid(row)) commentStr.append("-- ")
          commentStr.append("comment on column " + tableName + "." + row.cj1_field + " is\n")
          if (!isValid(row)) commentStr.append("-- ")
          commentStr.append("'" + row.info.replace("\n", " ").replace("\r", " ") + "';\n")
        }
        if (isValid(primaryKey)) str.append(s"constraint PK_$tableName primary key ($primaryKey)\n")
        str.append(");\n")
        var tar: String = null
        //if (tableName == "C_IT_RUN" || tableName == "D_MEAS_BOX" || tableName == "S_IR_READ") flag = false
        if (flag) tar = str.toString() + "\n" + commentStr.toString()
        else tar = "-- " + str.toString().replace("\n", "\n-- ") + "\n-- " + commentStr.toString().replace("\n", "\n-- ")
        println(tar)
        //        println(commentStr.toString())

      }
    }
  }

  def generateTransformSql(list: List[Row]): Unit = {
    list.groupBy(_.cj1_tableName).foreach {
      case (tableName, unsortlist) => {
        var flag = false //是否有效
        var Singleflag = true //是否对应单表
        val list = unsortlist.sortBy(row => isValid(row))
        var cj2_tableName: String = list.last.cj2_tableName
        val str = new StringBuilder()
        var node = ","
        str.append("sb.append(\"(\") \n\t\tsb.append(\" select \")\n\n\t\t")
        for (i <- 0 until list.length) {
          val row = list(i)
          if (isValidDev(row) ) {
            flag = true

            if (i == list.length - 1) node = " "
            if (!cj2_tableName.contains(row.cj2_tableName)) {
              Singleflag = false
              cj2_tableName = cj2_tableName + "&&" + row.cj2_tableName
            }
          } else str.append("//")
          str.append("sb.append(\" " + row.cj2_field.padTo(30, ' ') + "       " + row.cj1_field.padTo(30, ' ') + node + "\")\n\t\t")
        }

        println(s"***********************************************\n****        ${cj2_tableName}_to_$tableName   *************\n***********************************************\n")
        if (flag && Singleflag) {
          val headStr = s"package com.dareway.transform.convert.single\n\nimport com.dareway.transform.common.SingleToSingleConvert\n\nobject ${cj2_tableName}_to_$tableName extends SingleToSingleConvert {\n\toverride def convert(): Unit = {\n\t\tval sb = new StringBuilder()\n\t\t"
          print(headStr)
          str.append("\n\t\tsb.append(\" from cj2." + cj2_tableName + " \")\n\t\tsb.append(\")\")\n\t\tconvert(sb.toString(), \"" + cj2_tableName + "\", \"" + tableName + "\")\n\t")
          str.append("}\n")
          str.append("\tdef main(args: Array[String]): Unit = {\n\t\tconvert()\n\t}\n}")
          println(str.toString())
          //makeFile(s"E:\\program\\scala\\ElecGuarantee\\Transform\\transform\\src\\main\\scala\\com\\dareway\\transform\\convert\\single\\${cj2_tableName}_to_$tableName.scala",headStr+str.toString())

        } else if (!flag) {
          println("----       没有有效字段               ----*\n\n")
        } else if (!Singleflag) {
          println("----       不是单对单                 ----*\n\n")
        }

      }
    }
  }

  def generateMergeSql(list:List[Row]):Unit = {
    list.groupBy(_.cj1_tableName).foreach {
      case (tableName, unsortlist) => {
        var flag = false //是否有效
        var Singleflag = true //是否对应单表
        var primaryflag = false //是否有主键
        val list = unsortlist.sortBy(row => isValid(row))
        var cj2_tableName: String = list.last.cj2_tableName

        val str = new StringBuilder()
        val setStr  = new StringBuilder()
        val insertStrValues = new StringBuilder()
        val insertStr = new StringBuilder()
        val primaryStr = new StringBuilder()
        var node = ","

        str.append(s"MERGE into $tableName t USING (SELECT\n")
        for (i <- 0 until list.length) {
          val row = list(i)
          if (isValid(row)) {
            flag = true
            if (i == list.length - 1) node = " "

            if (!cj2_tableName.contains(row.cj2_tableName)) {
              Singleflag = false
              cj2_tableName = cj2_tableName + "&&" + row.cj2_tableName
            }

            if (row.comment == "主键" || row.comment == "必须字段") {
              primaryflag = true
              primaryStr.append(s"\ton (t.${row.cj1_field} = s.${row.cj2_field})\n")
            } else {
              setStr.append(s"\t\tt.${row.cj1_field} = s.${row.cj2_field}" + node +"\n")
            }
            str.append("\t\t\t\t\t ? as " + row.cj2_field + node + "\n")

            insertStr.append(" " + row.cj1_field+node+" ")
            insertStrValues.append(s" s.${row.cj2_field}"+node+" ")
          }

        }
        str.append(s"\t\t\tFrom ${cj2_tableName}) s \n")
        if (primaryflag) str.append(primaryStr)
        else str.append(s"\ton ()\n")
        str.append("\tWHEN MATCHED THEN UPDATE SET\n")
        str.append(setStr)
        str.append("\tWHEN NOT MATCHED THEN INSERT\n")
        str.append("\t\t( ")
        str.append(insertStr)
        str.append(" )\n")
        str.append("\t\tVALUES ( ")
        str.append(insertStrValues)
        str.append(" )")
        println(s"----------------------------------------------*\n----        ${cj2_tableName} to $tableName   ------------*\n----------------------------------------------*\n")
        if (flag && Singleflag && primaryflag) {
          println(str.toString())
        } else if (!flag) {
          println("----       没有有效字段               ----*\n\n")
        } else if (!Singleflag) {
          println("----       不是单对单                 ----*\n\n")
        } else if (!primaryflag) {
          println("----       主键未知                 ----*\n\n")
          println(str.toString())
        }

      }
    }
  }

  def generateStreamSql(list:List[Row]):Unit={
    var fileStr = ""
    list.groupBy(_.cj1_tableName).foreach {
      case (tableName, unsortlist) => {
        var flag = false //是否有效
        var Singleflag = true //是否对应单表
        var primaryflag = false //是否有主键
        var primaryKey:Row = null
        val list = unsortlist.sortBy(row => isValid(row))
        var cj2_tableName: String = list.last.cj2_tableName
        val head = new StringBuilder()
        var str = new StringBuilder()
        val setStr = new StringBuilder()
        val insertStrValues = new StringBuilder()
        val insertStr = new StringBuilder()
        val primaryStr = new StringBuilder()
        val setStaStr = new StringBuilder()
        var node = ","
        var id = 0

        str.append("\" MERGE into " + tableName + " t \"\n\t\t\t\t+\" USING (\"\n\t\t\t\t\t+ \"SELECT \"\n")
        for (i <- 0 until list.length) {
          val row = list(i)
          if (isValidDev(row)) {
            flag = true
            if (i == list.length - 1) node = " "

            if (!cj2_tableName.contains(row.cj2_tableName)) {
              Singleflag = false
              cj2_tableName = cj2_tableName + "&&" + row.cj2_tableName
            }
            id += 1

          } else {
            str.append("\t\t\t\t\t\t//"+row.commentRow+"\n//")
            setStr.append("\t\t\t\t\t\t\t//"+row.commentRow+"\n//")
            insertStr.append("\t\t\t\t\t\t\t//"+row.commentRow+"\n//")
            insertStrValues.append("\t\t\t\t\t\t\t//"+row.commentRow+"\n//")
            setStaStr.append("\t\t\t\t//"+row.commentRow+"\n//")
          }

          if (row.comment == "主键" || row.comment == "必须字段") {
            primaryflag = true
            primaryKey = row
            primaryStr.append(s"on (t.${row.cj1_field} = s.${row.cj2_field})")
          } else {
            setStr.append("\t\t\t\t\t\t\t" + toRef(s"t.${row.cj1_field} = s.${row.cj2_field}" + node + " "))
          }
          str.append("\t\t\t\t\t\t+ \"? as " + row.cj2_field + node + " \"\n")
          var trans = ""
          if (dataTypeDev(row) == "Date") trans = "(Date)"
          setStaStr.append(s"\t\t\t\tpstmt1.set${dataTypeDev(row)}(${id}, $trans v.get${captureName.captureName(row.cj1_field, true)}());\n")
          insertStr.append("\t\t\t\t\t\t\t" + toRef(" " + row.cj1_field + node + " "))
          insertStrValues.append("\t\t\t\t\t\t\t" + toRef(s" s.${row.cj2_field}" + node + " "))


        }

        head.append(s"package com.dareway.${captureName.captureName(tableName,true).toLowerCase};/*\n * @author: gjh \n* @date: 2023/9/12 9:25 \n*/\n\nimport com.config.MyConfig;\nimport com.entity.yx1.${captureName.captureName(tableName, true)};\nimport org.apache.flink.configuration.Configuration;\nimport org.apache.flink.streaming.api.functions.sink.RichSinkFunction;\nimport java.sql.Date;\nimport java.sql.Connection;\nimport java.sql.DriverManager;\nimport java.sql.PreparedStatement;\nimport java.util.List;\n\n")

        head.append(s"public class ${cj2_tableName.toUpperCase}_To_${captureName.captureName(tableName, true)}SinkFunc extends RichSinkFunction<List<${captureName.captureName(tableName,true)}>> {\n\tConnection conn; \n\tPreparedStatement pstmt1; \n\tPreparedStatement pstmt2;\n\t@Override\n\tpublic void open(Configuration parameters) throws Exception {\n\t\t")
        head.append(s"conn = DriverManager.getConnection(MyConfig.ORACLE_URL, MyConfig.ORACLE_USER, MyConfig.ORACLE_PASS);\n\t\tString sql1 = ")

        str = head.append(str)
        str.append("\t\t\t\t\t\t+ \" From DUAL \" \n\t\t\t\t\t\t+ \" ) s \"\n")
        if (primaryflag) str.append("\t\t\t\t\t"+toRef(primaryStr.toString()))
        else str.append("\t\t\t\t\t"+toRef(s"ON () "))
        str.append("\t\t\t\t\t"+ toRef("WHEN MATCHED     THEN ") + "\t\t\t\t\t" + toRef("\tUPDATE SET "))
        str.append(setStr)
        str.append("\t\t\t\t\t"+ toRef("WHEN NOT MATCHED THEN ") + "\t\t\t\t\t" + toRef("\tINSERT ( "))
        str.append(insertStr)
        str.append("\t\t\t\t\t\t\t" + toRef(") "))
        str.append("\t\t\t\t\t" + toRef("     VALUES ( "))
        str.append(insertStrValues)
        str.append("\t\t\t\t\t\t\t" + toRef(") ")+"\t\t\t\t\t\t\t;\n")
        var primaryKeyField = "primaryKeyField"
        var primaryKeyFieldType = "String"
        if(primaryKey!=null) {
          primaryKeyField=primaryKey.cj1_field
          primaryKeyFieldType=dataTypeDev(primaryKey)
        }
        str.append("\t\tString sql2 = \"delete from "+tableName+" where "+primaryKeyField+"  = ? \";\n\n\t\tpstmt1 = conn.prepareStatement(sql1);\n\t\tpstmt2 = conn.prepareStatement(sql2);\n\n\t}\n\n")
        str.append(s"\t@Override\n\tpublic void invoke(List<${captureName.captureName(tableName,true)}> value, Context context) throws Exception {\n\t\tfor (${captureName.captureName(tableName,true)} v : value) {\n")
        str.append("\t\t\tif(v.getType().equals(\"delete\")){\n\t\t\t\tpstmt2.set"+primaryKeyFieldType+s"(1, v.get${captureName.captureName(primaryKeyField,true)}());\n\t\t\t\tpstmt2.addBatch();\n\t\t\t}else{\n")
        str.append(setStaStr+"\t\t\t\tpstmt1.addBatch();\n\t\t\t}\n\t\t}\n\n\t\tpstmt2.executeBatch();\n\t\tpstmt1.executeBatch();\n\n\t}\n\n\t@Override\n\tpublic void close() throws Exception {\n\t\tif (pstmt1 != null) {\n\t\t\tpstmt1.close();\n\t\t}\n\t\tif (pstmt2 != null) {\n\t\t\tpstmt2.close();\n\t\t}\n\t\tif (conn != null){\n\t\t\tconn.close();\n\t\t}\n\t}\n}\n")
        var tarStr = s"----------------------------------------------*\n----        ${cj2_tableName} to $tableName   ------------*\n----------------------------------------------*\n"
        if (flag && Singleflag && primaryflag) {
          tarStr += str.toString()
        } else if (!flag) {
          tarStr += "----       没有有效字段               ----*\n\n" + str.toString()
        } else if (!Singleflag) {
          tarStr += "----       不是单对单                 ----*\n\n" + str.toString()
        } else if (!primaryflag) {
          tarStr += "----       主键未知                 ----*\n\n" + str.toString()
        }
        println(tarStr)
        fileStr += tarStr
//        appendFile(sinkFuncFile,tarStr)
      }
    }
    makeFile(sinkFuncFile,fileStr)
  }

  def generateStreamMain(list:List[Row]):Unit={
    var fileStr = ""
    list.groupBy(_.cj1_tableName).foreach {
      case (tableName, unsortlist) => {
        var flag = false //是否有效
        var Singleflag = true //是否对应单表
        var primaryflag = false //是否有主键
        var primaryKey: Row = null
        val list = unsortlist.sortBy(row => isValid(row))
        var cj2_tableName: String = list.last.cj2_tableName

        val setBeforeStr = new StringBuilder()
        val setAfterStr = new StringBuilder()
        var str = new StringBuilder()
        var node = ","

        for (i <- 0 until list.length) {
          val row = list(i)
          if (isValidDev(row)) {
            flag = true
            if (i == list.length - 1) node = " "

            if (!cj2_tableName.contains(row.cj2_tableName)) {
              Singleflag = false
              cj2_tableName = cj2_tableName + "&&" + row.cj2_tableName
            }

          } else {
            setBeforeStr.append("\t\t\t\t\t\t//"+row.commentRow+"\n//")
            setAfterStr.append("\t\t\t\t\t\t//"+row.commentRow+"\n//")
          }
          if (row.comment == "主键" || row.comment == "必须字段") {
            primaryflag = true
            primaryKey = row
          } else {
          }
          setBeforeStr.append("\t\t\t\t\t\td_bean.set" + captureName.captureName(row.cj1_field, true) + "(j_bean.getBefore().get" + dataTypeDev(row) + "(\"" + captureName.captureName(row.cj2_field, false) + "\".toUpperCase()));\n")
          setAfterStr.append("\t\t\t\t\t\td_bean.set" + captureName.captureName(row.cj1_field, true) + "(j_bean.getAfter().get" + dataTypeDev(row) + "(\"" + captureName.captureName(row.cj2_field, false) + "\".toUpperCase()));\n")

        }

        var primaryKeyField = "primaryKeyField"
        var primaryKeyFieldType = "String"
        if (primaryKey != null) {
          primaryKeyField = primaryKey.cj1_field
          primaryKeyFieldType = dataTypeDev(primaryKey)
        }
        str.append("package com.dareway." + captureName.captureName(tableName, true).toLowerCase + ";/* \n* @author: gjh \n* @date: 2023/9/12 9: 50 \n*/\nimport com.alibaba.fastjson.JSON;\nimport com.alibaba.fastjson.JSONObject;\nimport com.config.MyConfig;\nimport com.entity.JObject;\nimport com.entity.yx1." + captureName.captureName(tableName, true) + ";\nimport com.utils.UserDeserialization;\nimport com.ververica.cdc.connectors.base.options.StartupOptions;\nimport com.ververica.cdc.connectors.base.source.jdbc.JdbcIncrementalSource;\nimport com.ververica.cdc.connectors.oracle.source.OracleSourceBuilder;\nimport org.apache.flink.api.common.eventtime.WatermarkStrategy;\nimport org.apache.flink.api.common.restartstrategy.RestartStrategies;\nimport org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend;\nimport org.apache.flink.streaming.api.CheckpointingMode;\nimport org.apache.flink.streaming.api.datastream.DataStreamSource;\nimport org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;\nimport org.apache.flink.streaming.api.environment.CheckpointConfig;\nimport org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;\nimport org.apache.flink.streaming.api.functions.ProcessFunction;\nimport org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;\nimport org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;\nimport org.apache.flink.streaming.api.windowing.time.Time;\nimport org.apache.flink.streaming.api.windowing.windows.TimeWindow;\nimport org.apache.flink.table.api.bridge.java.StreamTableEnvironment;\nimport org.apache.flink.util.Collector;\nimport java.time.Duration;\nimport java.util.ArrayList;\nimport java.util.List;\nimport java.util.Properties;\nimport java.util.concurrent.TimeUnit;")
        str.append("\npublic class " + captureName.captureName(tableName, true) + "_Main { \n\tpublic static void main (String[] args) throws Exception { \n\t\t//Configuration conf = new Configuration(); \n\t\t// conf.setString(\"execution.savepoint.path\",\"hdfs://10.1.50 \n\t\t// .211:8020/flinkCK/5dbfd3a4150dc40d5bd79869c4029689/chk-125\"); \n\t\tStreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); \n\t\tenv.setParallelism(1); \n\t\tStreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); \n\t\t//2.1 开启Checkpoint,每隔5秒钟做一次CK  ,并指定CK的一致性语义 \n\t\tenv.enableCheckpointing(10000L, CheckpointingMode.EXACTLY_ONCE); \n\t\t//        env.enableCheckpointing(5 1000L, CheckpointingMode.EXACTLY_ONCE); \n\t\tenv.getCheckpointConfig().setAlignedCheckpointTimeout(Duration.ofMillis(5 * 1000L)); \n\t\tenv.getCheckpointConfig().setTolerableCheckpointFailureNumber(5); \n\t\t// 2.2 设置任务关闭的时候保留最后一次 CK 数据 \n\t\tenv.getCheckpointConfig().setExternalizedCheckpointCleanup( \n\t\t\tCheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION); \n\t\tenv.getCheckpointConfig().setMaxConcurrentCheckpoints(1); \n\t\t// 2.3 设置超时时间为 1 分钟 \n\t\t// env.getCheckpointConfig().setCheckpointTimeout(600 * 1000L); \n\t\t//        env.getCheckpointConfig().setCheckpointInterval(60000L); \n\t\tenv.getCheckpointConfig().setMinPauseBetweenCheckpoints(10000L); \n\t\t//  2.6 设置状态后端 \n\t\tenv.setStateBackend(new EmbeddedRocksDBStateBackend(true)); \n\t\tenv.getCheckpointConfig().setCheckpointStorage( \n\t\t\"hdfs://10.1.50.211:8020/flinkCKtest\" \n\t\t); \n\t\tenv.setRestartStrategy(RestartStrategies.fixedDelayRestart( \n\t\t\t3, // 尝试重启的次数 \n\t\t\torg.apache.flink.api.common.time.Time.of(1, TimeUnit.MINUTES)) // 间隔 \n\t\t);\n\n \n\t\tProperties debeziumProperties = new Properties(); \n\t\tdebeziumProperties.setProperty(\"log.mining.strategy\", \"online_catalog\"); \n\t\tdebeziumProperties.setProperty(\"log.mining.continuous.mine\", \"true\"); \n\t\tdebeziumProperties.setProperty(\"bigint.unsigned.handling.mode\", \"long\"); \n\t\tdebeziumProperties.setProperty(\"decimal.handling.mode\", \"double\"); \n\t\tdebeziumProperties.setProperty(\"database.tablename.case.insensitive\", \"false\");\n\n \n\t\tJdbcIncrementalSource < String > oracleChangeEventSource = \n\t\t\tnew OracleSourceBuilder() \n\t\t\t\t.hostname(\"10.1.52.115\") \n\t\t\t\t.port(1521) \n\t\t\t\t.databaseList(\"XE\") \n\t\t\t\t.schemaList(\"FLINK\") \n\t\t\t\t.tableList(\"FLINK." + cj2_tableName.toUpperCase + "\") \n\t\t\t\t.username(\"flink\") \n\t\t\t\t.password(\"flink\") \n\t\t\t\t.deserializer(new UserDeserialization()) \n\t\t\t\t.includeSchemaChanges(true) // output the schema changes as well \n\t\t\t\t.startupOptions(StartupOptions.initial()) \n\t\t\t\t.debeziumProperties(debeziumProperties) \n\t\t\t\t.splitSize(8) \n\t\t\t\t.build();\n\n \n\t\tDataStreamSource < String > oracleParallelSource = env.fromSource( \n\t\t\toracleChangeEventSource, \n\t\t\tWatermarkStrategy.noWatermarks(), \n\t\t\t\"OracleParallelSource\");\n\n \n\t\t" + captureName.captureName(tableName, true) + " d_bean = new " + captureName.captureName(tableName, true) + "();\n \n\t\tJObject j_bean = new JObject();\n\n \n\t\tSingleOutputStreamOperator < " + captureName.captureName(tableName, true) + " > process = oracleParallelSource.process(new ProcessFunction < String, \n\t\t\t" + captureName.captureName(tableName, true) + " > () { \n\t\t\t\t@Override \n\t\t\t\tpublic void processElement(String value, Context ctx, Collector < " + captureName.captureName(tableName, true) + " > out) throws Exception { \n\t\t\t\t\tJSONObject jsonObject = JSON.parseObject(value); \n\t\t\t\t\td_bean.setEmpty(); \n\t\t\t\t\tj_bean.setBefore(JSON.parseObject(jsonObject.getString(\"before\"))); \n\t\t\t\t\tj_bean.setAfter(JSON.parseObject(jsonObject.getString(\"after\"))); \n\t\t\t\t\tj_bean.setType(jsonObject.getString(\"type\")); \n\t\t\t\t\tj_bean.setTableName(jsonObject.getString(\"tableName\")); \n")
        str.append(" \n\t\t\t\tif (j_bean.getTableName().equals(\"" + cj2_tableName + "\".toUpperCase())) {\n\t\t\t\t\td_bean.setEmpty();\n\t\t\t\t\tif (j_bean.getType().equals(\"delete\")) {\n" + setBeforeStr.toString() + "\t\t\t\t\t\td_bean.setType(j_bean.getType());\n\t\t\t\t\t} else {\n" + setAfterStr.toString() + "\t\t\t\t\t\td_bean.setType(j_bean.getType());\n\t\t\t\t\t}\n\t\t\t\t\tout.collect(d_bean);\n\t\t\t\t}\n\t\t\t}\n\t\t});\n")
        str.append("\t\t//输出主流 \n\t\tprocess.keyBy(v -> Math.abs(v.get" + captureName.captureName(primaryKeyField,true) + "().hashCode()) % MyConfig.w_num) \n\t\t\t\t.window(TumblingProcessingTimeWindows.of(Time.seconds(MyConfig.w_time))) \n\t\t\t\t.process(new ProcessWindowFunction < " + captureName.captureName(tableName, true) + ", List < " + captureName.captureName(tableName, true) + " >, Integer, TimeWindow > () { \n\t\t\t\t\t@Override \n\t\t\t\t\tpublic void process(Integer integer, Context context, Iterable < " + captureName.captureName(tableName, true) + " > elements, \n\t\t\t\t\t\tCollector < List < " + captureName.captureName(tableName, true) + " >> out) throws Exception { \n\t\t\t\t\t\tArrayList < " + captureName.captureName(tableName, true) + " > arr = new ArrayList < " + captureName.captureName(tableName, true) + " > (); \n\t\t\t\t\t\tfor (" + captureName.captureName(tableName, true) + " c : elements ) { \n\t\t\t\t\t\t\tarr.add(c); \n\t\t\t\t\t\t}; \n\t\t\t\t\t\tout.collect(arr); \n\t\t\t\t\t} \n\t\t\t\t}) \n\t\t\t\t.addSink(new "+cj2_tableName.toUpperCase+"_To_"+captureName.captureName(tableName, true)+"SinkFunc"+"());\n \n\t\tenv.execute(\"flink cdc\");\n\t}\n}\n")

        var tarStr = s"----------------------------------------------*\n----        ${cj2_tableName} to $tableName   ------------*\n----------------------------------------------*\n"
        if (flag && Singleflag && primaryflag) {
          tarStr += str.toString()
        } else if (!flag) {
          tarStr +=  "----       没有有效字段               ----*\n\n" + str.toString()
        } else if (!Singleflag) {
          tarStr += "----       不是单对单                 ----*\n\n" + str.toString()
        } else if (!primaryflag) {
          tarStr += "----       主键未知                 ----*\n\n" + str.toString()
        }
        println(tarStr)
        fileStr += tarStr
//        appendFile(mainFile,tarStr)
      }
    }
    makeFile(mainFile,fileStr)
  }

  def isValid(str: String): Boolean = {
    str != null && str != ""
  }

  def isValidField(str: String,details:String): Boolean = {
    ((str.contains("NUMBER") || str.contains("VARCHAR")) && details.matches("-?\\d+"))  ||
      (str.contains("DATE") || str.contains("date")) || str.matches("(?:VARCHAR2\\(\\d+\\)|NUMBER\\(\\d+\\)|NUMBER\\(\\d+,\\d+\\))")
  }
  def getValidField(str: String,details:String): String = {
    if ((str.contains("NUMBER") || str.contains("VARCHAR")) && details.matches("-?\\d+")) {
      s"$str($details)"
    } else if (str.contains("DATE") || str.contains("date")) {
      "DATE"
    } else str
  }
  def toSafeString(str:String) :String = {
    str.replace("\n", " ").replace("\r", " ")
  }
  def isValid(row: Row): Boolean = {
    row.field_type != null && row.field_type != "" &&
      (!isValid(row.commentRow) || !row.commentRow.contains("不处理")) &&
      isValidField(row.field_type,row.field_type_detail)//&&
      //(row.cj1_tableName != "C_IT_RUN" && row.cj1_tableName != "D_MEAS_BOX" && row.cj1_tableName != "S_IR_READ")
  }

  def isValidDev(row:Row):Boolean = {
    (!isValid(row.commentRow) ||
      (
        !row.commentRow.contains("不处理") &&
          !row.commentRow.contains("业务暂未涉及") &&
          !row.commentRow.contains("2.0无此业务") &&
          !row.commentRow.contains("无此字段") &&
          !row.commentRow.contains("用采数据库无此字段") &&
          !row.commentRow.contains("业务暂不使用")
        )
      )
  }

  def toRef(str:String):String = {
    "+ \"" + str + "\"\n"
  }

  def dataType(str:String):String = {
      if (str.contains("NUMBER")) "Long"
      else if (str.contains("VARCHAR2")) "String"
      else if (str == "DataTypeNotFound") "DataTypeNotFound"
      else "Date"
  }

  def dataTypeDev(row:Row):String = {
//    db.findDataType(row)
    dataType(dataBaseConnect.findDataType(row,"cqcqdb","cqcqdb"))
  }

  def isValidDev(commentRow: String): Boolean = {
    (!isValid(commentRow) ||
      (
        !commentRow.contains("不处理") &&
          !commentRow.contains("业务暂未涉及") &&
          !commentRow.contains("2.0无此业务") &&
          !commentRow.contains("无此字段") &&
          !commentRow.contains("用采数据库无此字段") &&
          !commentRow.contains("业务暂不使用")
        )
      )
  }
}
