package generator

import generator.common.FreeMarker
import generator.entity.{RowInfo, RowInfoPlus}
import generator.util.captureName

import java.io.{File, PrintWriter}
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
import scala.io.Source
import scala.util.matching.Regex

object GeneratorFileFromFile {
  var fromTableName = ""
  def getFieldFromSetFunc(str: String): String = {
    var funcName = str.substring(3)
    val regex = new Regex("([A-Z])")
    regex.replaceAllIn(funcName,matcher=>{
      "_"+matcher.group(1)
    }).substring(1)
  }

  def getFromMain(tableName: String):mutable.HashMap[String,String] = {
    val map:mutable.HashMap[String,String] = mutable.HashMap()
    val mainFile = new File("E:\\program\\scala\\StreamTransform\\src\\main\\java\\com\\kafka\\"+tableName.replaceAll("_","").toLowerCase()+"\\"+captureName.captureName(tableName,true)+"_Main.java")
    val regex = new Regex("d_bean.set([0-9a-zA-Z_]+)\\(j_bean.getAfter\\(\\).get[a-zA-Z]+\\(\"([a-zA-Z_]+)\".toUpperCase\\(\\)\\)\\);")
    val content = Source.fromFile(mainFile).mkString.replace("\r","")
    val tableNameRegex = new Regex("//topic:([a-zA-Z_]+)")
    fromTableName = tableNameRegex.findFirstMatchIn(content).get.group(1)
    regex.findAllMatchIn(content).foreach(matcher=>{
      map.put(matcher.group(1),matcher.group(2))
    })
    map
  }

  def transferMain(tableName: String): Unit = {
    val fileName = "E:\\program\\scala\\StreamTransform\\data\\kafka_back\\" + tableName.replaceAll("_", "").toLowerCase() + "\\" + captureName.captureName(tableName, true) + "_Main.java"
    var textFile = new File(fileName)
    val entityName = captureName.captureName(tableName, true)
    var content = Source.fromFile(textFile).mkString

    var regexFile = new Regex("keyBy\\(v -> Math.abs\\(v.get([a-zA-Z_0-9]+)\\(\\).hashCode\\(\\)\\) % Integer.valueOf\\(argsMap.get\\(\"w_num\"\\)\\)\\)")
    if (regexFile.findFirstMatchIn(content).isEmpty) {
      regexFile = new Regex("keyBy\\(v -> Math.abs\\(\\(v.get([a-zA-Z_0-9]+)\\(\\).toString\\(\\)\\).hashCode\\(\\)\\) % Integer.valueOf\\(argsMap.get\\(\"w_num\"\\)\\)\\)")
    }
    if (regexFile.findFirstMatchIn(content).isEmpty) {
      println()
    }
    var keyStr = ""
    var str = ""
    regexFile.replaceAllIn(content, matcher => {
      str = matcher.group(1)
      keyStr = "keyBy(v -> Math.abs((v.get"+matcher.group(1)+"().toString()).hashCode()) % Integer.valueOf(argsMap.get(\"w_num\")))"
      ""
    })
    content = content.replace(".process(new ProcessWindowFunction","$").replace(".setParallelism(Integer.valueOf(argsMap.get(\"ParallelismNum\")));","#")

    content = "\\$[^#]+#".r.replaceFirstIn(content,".process(new ProcessWindowFunction<"+entityName+", List<"+entityName+">, Integer, TimeWindow>() {\n                                MapState<String, String> mstate;\n                                @Override\n                                public void process(Integer integer, Context context, Iterable<"+entityName+"> elements,\n                                                    Collector<List<"+entityName+">> out) throws Exception {\n                                    ArrayList<"+entityName+"> arr = new ArrayList<"+entityName+">();\n                                    TreeMap<String, "+entityName+"> treemap = new TreeMap<String, "+entityName+">();\n\n                                    mstate = getRuntimeContext().getMapState(new MapStateDescriptor<String, String>(\n                                            \"window-pv\", String.class,\n                                            String.class));\n\n\n                                    for ("+entityName+" c : elements) {\n                                        String pretime = mstate.get(c.get"+str+"() + \"\");\n                                        if (pretime == null) {\n                                            pretime = \"0\";\n                                        }\n                                        if (c.getCurrentTs().compareTo(pretime) >= 0) {\n                                            //arr.add(c);\n                                            //更新当前时间\n                                            treemap.put(c.get"+str+"().toString(), c);\n\n                                            mstate.put(c.get"+str+"() + \"\", c.getCurrentTs());\n\n                                        }\n                                    }\n                                    ;\n\n                                    Set<String> strings = treemap.keySet();\n                                    for (String i : strings) {\n                                        arr.add(treemap.get(i));\n                                    }\n                                    ;\n                                    out.collect(arr);\n                                }\n                            }).setParallelism(Integer.valueOf(argsMap.get(\"ParallelismNum\")));")

    content = content.replace("$",".process(new ProcessWindowFunction").replace("#",".setParallelism(Integer.valueOf(argsMap.get(\"ParallelismNum\")));")

    content = content.replace("import org.apache.flink.api.common.eventtime.WatermarkStrategy;","import org.apache.flink.api.common.eventtime.WatermarkStrategy;\nimport org.apache.flink.api.common.state.MapState;\nimport org.apache.flink.api.common.state.MapStateDescriptor;\nimport java.util.TreeMap;import java.util.Set;")

    //    content = content.replace("WatermarkStrategy.forBoundedOutOfOrderness(Duration.ofSeconds(5))","WatermarkStrategy.noWatermarks()")
//    content = content.replace("TumblingEventTimeWindows.of(Time.seconds(MyConfig.w_time))","TumblingProcessingTimeWindows.of(Time.seconds(MyConfig.w_time))")
//    content = content.replace("import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;","import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;\nimport org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows;")



    val tarfileName = "E:\\program\\scala\\StreamTransform\\src\\main\\java\\com\\kafka\\" + tableName.replaceAll("_", "").toLowerCase() + "\\" + captureName.captureName(tableName, true) + "_Main.java"

    content = Source.fromFile(tarfileName).mkString

    println(tableName.toUpperCase()+" "+"getKafkaSource.getSource\\(topics,[ ]*argsMap,[ ]*\"([a-zA-Z0-9_]+)\"\\);".r.findFirstMatchIn(content).get.group(1))
//    content = content.replace(".setParallelism(6)","").replaceFirst(".setParallelism\\(Integer.valueOf\\(argsMap.get\\(\"ParallelismNum\"\\)\\)\\)","")
//
//    val writer = new PrintWriter(tarfileName)
//    writer.write(content)
//    writer.close()
  }

  def transferEntity(tableName: String): Unit = {
    val entryName = "E:\\program\\scala\\StreamTransform\\src\\main\\java\\com\\entity\\yx3\\" + captureName.captureName(tableName, true) + ".java"
    val tarentryName = "E:\\program\\scala\\StreamTransform\\src\\main\\java\\com\\entity\\yx1\\" + captureName.captureName(tableName, true) + ".java"

    var content = Source.fromFile(entryName).mkString
    content = content.replace("yx3", "yx1")
    var tarcontent = "public void setEmpty() {"
    "private[ ]+[a-zA-Z_]+[ ]+([a-zA-Z_]+);".r.findAllMatchIn(content).foreach(matcher=>tarcontent=tarcontent+"\n\t\tthis."+matcher.group(1)+" = null;")
    tarcontent += "\n\t}"
    content = "public void setEmpty\\(\\)\\s*\\{[^\\}]+\\}".r.replaceFirstIn(content,tarcontent)
    val writer2 = new PrintWriter(tarentryName)
    writer2.write(content)
    writer2.close()
  }
  def transferSink(tableName: String): Unit = {
    val sinkName = "E:\\program\\scala\\StreamTransform\\src\\main\\java\\com\\kafka_back\\" + tableName.replaceAll("_", "").toLowerCase() + "\\" + captureName.captureName(tableName, true) + "SinkFunc.java"
    val tarSinkName = "E:\\program\\scala\\StreamTransform\\src\\main\\java\\com\\kafka\\" + tableName.replaceAll("_", "").toLowerCase() + "\\" + captureName.captureName(tableName, true) + "SinkFunc.java"
    val entityName = captureName.captureName(tableName, true)
    var content = Source.fromFile(sinkName).mkString
    content = content.replace("kafka_back", "kafka")//.replace("LinkedHashMap","#")
//    var key = ""
//    "entitys.put\\(v.([a-zA-Z]+)\\(\\)[^;]+;".r.replaceAllIn(content,matcher=>{
//      key = matcher.group(1)
//      captureName.captureName(tableName, true)+" entity = entitys.get(v."+key+"());if (entity == null || (entity.getOpts().compareTo(v.getOpts())<0 || (entity.getOpts().compareTo(v.getOpts())==0&&entity.getCurrentTs().compareTo(v.getCurrentTs())<0))) {\n\t\t\t\tentitys.put(v."+key+"(),v);\n\t\t\t}"
//    })
//    content = content.replace("import java.util.Comparator;","import java.util.*;")
//    content = "value.sort\\([^#]+#".r.replaceFirstIn(content,s"ArrayList<$entityName> al = new ArrayList<$entityName>();\n        al.addAll(value);\n        Collections.sort(al, Comparator.comparingLong($entityName::$key)\n                .thenComparing($entityName::getOpts)\n                .thenComparing($entityName::getCurrentTs)\n        );\n\t\t#")
//    content = content.replace("#","LinkedHashMap")
//
//    content = ("for \\("+entityName+" v : value\\) \\{[^\\}]+\\}[^\\}]+\\}").r.replaceFirstIn(content,s"for (${entityName} v : value) {\n\t\t\tentitys.put(v.${key}(),v);\n\t\t}")
//    content = new Regex("String,[ ]*"+entityName).replaceAllIn(content,"Long,"+entityName)

    content = content.replace(".thenComparing("+entityName+"::getOpts)","")
    val writer3 = new PrintWriter(tarSinkName)
    writer3.write(content)
    writer3.close()


  }

  def transferSink2(tableName: String): Unit = {
    val entityName = captureName.captureName(tableName, true)

    var key = ""


    val sinkName2 = "E:\\program\\scala\\StreamTransform\\data\\kafka_back\\" + tableName.replaceAll("_", "").toLowerCase() + "\\" + captureName.captureName(tableName, true) + "SinkFunc2.java"
    val tarSinkName2 = "E:\\program\\scala\\StreamTransform\\src\\main\\java\\com\\kafka\\" + tableName.replaceAll("_", "").toLowerCase() + "\\" + captureName.captureName(tableName, true) + "SinkFunc2.java"
    var content = Source.fromFile(sinkName2).mkString
//    content = content.replace("LinkedHashMap","#")
//    content = content.replace("kafka_back", "kafka")
//    "entitys.put\\(v.([a-zA-Z]+)\\(\\)[^;]+;".r.replaceAllIn(content, matcher => {
//      key = matcher.group(1)
//      captureName.captureName(tableName, true) + " entity = entitys.get(v." + key + "());if (entity == null || (entity.getOpts().compareTo(v.getOpts())<0 || (entity.getOpts().compareTo(v.getOpts())==0&&entity.getCurrentTs().compareTo(v.getCurrentTs())<0))) {\n\t\t\t\tentitys.put(v." + key + "(),v);\n\t\t\t}"
//    })
//    content = content.replace("import java.util.Comparator;","import java.util.*;")
//    content = "value.sort\\([^#]+#".r.replaceFirstIn(content, s"ArrayList<$entityName> al = new ArrayList<$entityName>();\n        al.addAll(value);\n        Collections.sort(al, Comparator.comparingLong($entityName::$key)\n                .thenComparing($entityName::getOpts)\n                .thenComparing($entityName::getCurrentTs)\n        );\n\t\t#")
//    content = content.replace("#", "LinkedHashMap")
//    content = ("for \\("+entityName+" v : value\\) \\{[^\\}]+\\}[^\\}]+\\}").r.replaceFirstIn(content,s"for (${entityName} v : value) {\n\t\t\tentitys.put(v.${key}(),v);\n\t\t}")

    //    content = new Regex("String,[ ]*"+entityName).replaceAllIn(content,"Long,"+entityName)

    //    content = content.replace("From DUAL",",0 as flag From DUAL").replace("WHEN NOT MATCHED THEN",",t.DELETE_FLAG = s.flag WHEN NOT MATCHED THEN")
//    content = content.replace("INSERT (","INSERT ( DELETE_FLAG,").replace("VALUES (","VALUES ( s.flag,")
//    content = content.replace("String sql2","//String sql2").replace("pstmt2 = conn.prepareStatement(sql2);","//pstmt2 = conn.prepareStatement(sql2);")
//    content = content.replace("pstmt2.executeBatch();","//pstmt2.executeBatch();").replace("pstmt2.clearBatch();","//pstmt2.clearBatch();").replace("pstmt2.clearParameters();","//pstmt2.clearParameters();")
//    val primeKey = "entitys.put\\(v.([a-zA-Z]+)\\(\\),v\\);".r.findFirstMatchIn(content).get.group(1)
//  var index = 0
//    "if[ ]*\\(v.getType\\(\\).equals\\(\"D\"\\)\\)[ ]*\\{[^\\}]+}[ ]*else[ ]*\\{([^\\}]+)\\}".r.replaceAllIn(content,matcher=>{
//
//
//      var str = ""
//      "(//[ ]*)?pstmt1.set([a-zA-Z]+)\\(([0-9]+),[^;]+;".r.replaceAllIn(matcher.group(1),matcher2=>{
//        if (matcher2.group(1) == null) {
//          index = matcher2.group(3).toInt
//        }
//        str
//      })
//    })
//    content = "delete[ ]*from[ ]*([a-zA-Z_]+)[ ]*where[ ]*([a-zA-Z_]+)[ ]*=[ ]*\\?".r.replaceAllIn(content,matcher=>{
//      s"update ${matcher.group(1)} set DELETE_FLAG = 1 where ${matcher.group(2)} = ?"
//    })
    //content = content.replace(".thenComparing("+entityName+"::getOpts)","")

//    content = "for \\(([a-zA-Z]+) v : al\\) \\{[\\s]+entitys.put\\(v.get([a-zA-Z_0-9]+)\\(\\),[ ]*v\\);[\\s]+\\}".r.replaceAllIn(content,matcher=>{
//      "java.util.Date now = new java.util.Date();\n\t\tfor ("+matcher.group(1)+" v : al) {\n\t\t\tentitys.put(v.get"+matcher.group(2)+"(), v); \n\t\t}"
//    })
    //content = content.replace("import com.entity.yx1","import com.config.MyProperty;\nimport com.entity.yx1")

    content = content.replace("0 as flag","? as flag")

    val writer4 = new PrintWriter(tarSinkName2)
    writer4.write(content)
    writer4.close()
  }
//  val primKeyList =
  def main(args: Array[String]): Unit = {
    //    dataBaseConnect.connect("c_cons_prc")
    val marker = new FreeMarker()
    val empty = "\\s*\n*\\s*"

    var regex = new Regex("String sql1 = ([^;]+);")
    val regexDelete = new Regex("String sql2 = ([^;]+);") //"FC_CONS_GC_RELA","R_CP_MP_RELA","c_elec_addr","c_meter","C_CONS_CONTACT_RELA",
    val pstmRegex = new Regex(s"pstmt1.set[A-Za-z0-9_]+\\(([0-9]+),\\s*(\\(Date\\))?\\s*v.get([0-9a-zA-Z_]+)\\(\\)\\);")
    val tableList = List[String]("c_bill_rela","C_CONS","D_METER","c_meter","FC_CONS_GC_RELA","R_CP_MP_RELA","c_elec_addr","C_CONS_CONTACT_RELA","D_LC_EQUIP","FC_GC","c_meter_read","c_meter_mp_rela","FC_GC_TYPE_CODE","g_TG","R_CP_CONS_RELA","C_MP","R_TMNL_RUN","R_CP","C_CONS_PRC","G_LINE","G_LINE_RELA","G_LINE_TG_RELA","g_subs","g_subs_line_rela","c_mp_it_rela","C_CONTACT","C_CUST","c_it_run","c_ps","g_tran","C_ELEC_ADDR_DET")

    //"D_METER","D_LC_EQUIP","FC_GC","c_meter_read","c_meter_mp_rela","FC_GC_TYPE_CODE","g_TG","R_CP_CONS_RELA","C_MP","R_TMNL_RUN","R_CP","C_CONS","C_CONS_PRC","G_LINE","G_LINE_RELA","G_LINE_TG_RELA","g_subs","g_subs_line_rela","c_mp_it_rela","C_CONTACT","C_CUST","c_it_run","c_ps","g_tran","C_ELEC_ADDR_DET"
    var textFile:File = null

    for (tableName <- tableList) {
      //println(tableName)

//      collect(tableName)

//      transferEntity(tableName)

//        transferSink(tableName)



//      transferSink2(tableName)
      transferMain(tableName)
    }
  }


  def collect(tableName: String): List[RowInfoPlus] = {
    println(tableName + " start")
    val list:mutable.HashMap[String,RowInfoPlus] = mutable.HashMap[String,RowInfoPlus]()
    val mainFile = new File("E:\\program\\scala\\StreamTransform\\data\\kafka_back\\" + tableName.replaceAll("_", "").toLowerCase() + "\\" + captureName.captureName(tableName, true) + "_Main.java")
    val regex = new Regex("//(\\?)?d_bean.set([0-9a-zA-Z_]+)\\(j_bean.getAfter\\(\\).get[a-zA-Z]+\\(\"([a-zA-Z_]+)\".toUpperCase\\(\\)\\)")
    var content = Source.fromFile(mainFile).mkString
    val tableNameRegex = new Regex("//topic:([a-zA-Z_]+)")
    fromTableName = tableNameRegex.findFirstMatchIn(content).get.group(1)
    regex.findAllMatchIn(content).foreach(matcher => {
      val rowinfo = RowInfoPlus(tableName, fromTableName, null, matcher.group(3), matcher.group(2), matcher.group(1) != null,0)
      list.put(rowinfo.sinkFieldName,rowinfo)
    })
    "//(\\?)?d_bean.set([0-9a-zA-Z_]+)\\(\"[0-9]+\"\\)".r.findAllMatchIn(content).foreach(matcher => {
      val rowinfo = RowInfoPlus(tableName, fromTableName, null, "99999999", matcher.group(2), matcher.group(1) != null, 0)
      list.put(rowinfo.sinkFieldName, rowinfo)
    })

    val sinkName = "E:\\program\\scala\\StreamTransform\\data\\kafka_back\\" + tableName.replaceAll("_", "").toLowerCase() + "\\" + captureName.captureName(tableName, true) + "SinkFunc.java"
    val entityName = captureName.captureName(tableName, true)
    content = Source.fromFile(sinkName).mkString
    var sql1 = "String[ ]+sql1[ ]+=([^;]+);".r.findFirstMatchIn(content).get.group(1).replace(" as ","$")

    val funcs:List[String] = List[String]("RE_F_TRANSCODE","SUBSTRB","DECODE","RE_F_PRODUCE_MF_TRANSCODE")

    val map:mutable.HashMap[Int,String] = mutable.HashMap[Int,String]()
    var index = 1
    "([0-9a-zA-Z_\\?' \\(\\)]+)\\$".r.replaceAllIn(sql1,matcher=>{
      map.put(index,matcher.group(1))
      index = index + 1
      ""
    })
//    var state = "@([^#]+)#".r.findFirstMatchIn(sql1.replace("SELECT","@").replace("From","#"))
//      .get.group(1).split(",")
    val empty = "[ ]*"
    var FieldIndex = 0
    var lastField = ""
    s"pstmt1.set([a-zA-Z]+)\\(([0-9]+),${empty}(\\(Date\\))?${empty}([^;]+);".r.findAllMatchIn(content).foreach(matcher=>{
      var i = FieldIndex

      var value = matcher.group(4)
      val maybeMatch = "v.get([a-zA-Z_]+)".r.findFirstMatchIn(value)
      if (maybeMatch.nonEmpty) {

        val FieldName = maybeMatch.get.group(1)
        if (!lastField.equals(FieldName)) {
          FieldIndex = FieldIndex + 1
        }
        i = FieldIndex
        val FieldExp = map.get(i).get.trim
        lastField = FieldName
        println(i+"   "+FieldName)
        if (tableName=="C_MP" && i == 18) {
          val flag = true
        }
        list.put(FieldName,list.get(FieldName).get.updateExp(FieldExp,i))
      } else {
        FieldIndex = FieldIndex + 1
        i = FieldIndex
        println(i+"   !!!!!!fromOther")
      }
    })



    println(tableName + " complete")
    list.values.toList
  }
}
