package com.fulu.game.bigdata.realtime.source.table

import java.util.Properties

import com.alibaba.fastjson.{JSON, JSONObject}
import com.fulu.game.bigdata.realtime.config.Config
import com.fulu.game.bigdata.realtime.entity.{CurrentDate, Earliest, KafkaStrategy, Latest, Source}
import com.fulu.game.bigdata.realtime.utils.TimeUtils
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.typeutils.RowTypeInfo
import org.apache.flink.api.scala.typeutils.Types
import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment, _}
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.flink.table.api.Table
import org.apache.flink.table.api.scala.{StreamTableEnvironment, _}
import org.apache.flink.table.expressions.ExpressionParser
import org.apache.flink.types.Row

import scala.collection.JavaConverters._
import scala.collection.mutable.ListBuffer


object TableUtils {

  def createTable(env: StreamExecutionEnvironment, tableEnv: StreamTableEnvironment, source: Source): Unit = {

    source.getTableType match {
      case "kafka" => createKafkaTable(env, tableEnv, source)
      case "hive" => createHiveTable(tableEnv, source)
      case _ =>
    }
  }

  def createHiveTable(tableEnv: StreamTableEnvironment, source: Source): Unit = {

    val fieldStr = source.getFields.asScala.map(_.getName).mkString(",")

    val sql = s"select $fieldStr from hive.${source.getHiveTableName}"
    val table = tableEnv.sqlQuery(sql)
    tableEnv.createTemporaryView(source.getTableName, table)
  }


  def createKafkaTable(env: StreamExecutionEnvironment, tableEnv: StreamTableEnvironment, source: Source): Unit = {

    val properties = new Properties()
    properties.setProperty("bootstrap.servers", Config.getKafkaServer())
    properties.setProperty("group.id", source.getKafkaGroup)
    val consumer = new FlinkKafkaConsumer[String](source.getKafkaTopic, new SimpleStringSchema(), properties)
//    if(source.isFromBeginning){
//      consumer.setStartFromEarliest()
//    } else {
//      consumer.setStartFromLatest()
//    }

    val kafkaStrategy = getKafkaStartStrategy(source)
    println("kafkaStrategy : " + kafkaStrategy.getClass.getName)

    kafkaStrategy match {
      case _: Latest => consumer.setStartFromLatest()
      case _: Earliest => consumer.setStartFromEarliest()
      case CurrentDate(millisecond) => consumer.setStartFromTimestamp(millisecond)
      case _ => consumer.setStartFromTimestamp(TimeUtils.millsOfToday()
      )
    }


    if (source.isCanalTable) {
      createCanalTable(env, tableEnv, consumer, source)
    } else {
      createPlainTable(env, tableEnv, consumer, source)
    }

  }

  private def createPlainTable(env: StreamExecutionEnvironment, tableEnv: StreamTableEnvironment, consumer: FlinkKafkaConsumer[String], source: Source): Table = {


    val fieldTypes = source.fields.asScala.map(_.getType).map(t => parseRowTypeInfo(t))
    val fieldNames = source.fields.asScala.map(_.getName)

    implicit val tpe: TypeInformation[Row] = new RowTypeInfo(fieldTypes.toArray, fieldNames.toArray)

    val table = env.addSource(consumer).setParallelism(3).map(item => JSON.parseObject(item)).map(json => {

      val fields = source.fields.asScala

      val result = new ListBuffer[Any]
      for (field <- fields) {
        val fieldName = if (field.getName.contains(".")) field.getName.substring(field.getName.indexOf(".") + 1) else field.getName
        val fieldValue =
          field.`type` match {
            case "boolean" => json.getBoolean(fieldName)
            case "byte" => json.getByte(fieldName)
            case "double" => json.getDouble(fieldName)
            case "float" => json.getFloat(fieldName)
            case "int" => json.getInteger(fieldName)
            case "long" => json.getLong(fieldName)
            case "timestamp" => json.getTimestamp(fieldName)
            case _ => json.getString(fieldName)
          }
        result.+=(fieldValue)
      }
      val row = new Row(result.size)
      for (i <- result.indices) {
        row.setField(i, result(i))
      }
      row
    }).toTable(tableEnv, ExpressionParser.parseExpressionList(fieldNames.mkString(",")).asScala: _*)
    tableEnv.createTemporaryView(source.getTableName, table)
    table
  }

  private def createCanalTable(env: StreamExecutionEnvironment, tableEnv: StreamTableEnvironment, consumer: FlinkKafkaConsumer[String], source: Source): Table = {

    val fieldTypes = source.fields.asScala.map(_.getType).map(t => parseRowTypeInfo(t)).+=(Types.STRING).+=(Types.LONG)
    val fieldNames = source.fields.asScala.map(_.getName).+=("db_action").+=("action_timestamp")

    implicit val tpe: TypeInformation[Row] = new RowTypeInfo(fieldTypes.toArray, fieldNames.toArray)
    val originDataStream = env.addSource(consumer).setParallelism(3)
    val table = originDataStream.map(item => JSON.parseObject(item)).filter(json => !json.getBooleanValue("isDdl")).flatMap(json => {
      json.getJSONArray("data").toArray(new Array[JSONObject](1)).map(jo => {

        val fields = source.fields.asScala

        val result = new ListBuffer[Any]

        for (field <- fields) {
          val fieldName = if (field.getName.contains(".")) field.getName.substring(field.getName.indexOf(".") + 1) else field.getName
          val fieldValue =
            field.`type` match {
              case "boolean" => jo.getBoolean(fieldName)
              case "byte" => jo.getByte(fieldName)
              case "double" => jo.getDouble(fieldName)
              case "float" => jo.getFloat(fieldName)
              case "int" => jo.getInteger(fieldName)
              case "long" => jo.getLong(fieldName)
              case "timestamp" => jo.getTimestamp(fieldName)
              case _ => jo.getString(fieldName)
            }

          result.+=(fieldValue)
        }
        result.+=(json.getString("type"))
        result.+=(json.getLong("ts"))

        val row = new Row(result.size)
        for (i <- result.indices) {
          row.setField(i, result(i))
        }
        row
      })
    }).toTable(tableEnv, ExpressionParser.parseExpressionList(fieldNames.mkString(",")).asScala: _*)

    val primaryKey = source.getFields.asScala.filter(field => field.isPrimaryKey).map(field => field.getName)
    /*if (primaryKey.length > 0) {
      val primaryKeyStr = primaryKey.mkString(",")
      println(primaryKeyStr.length)
      println(primaryKeyStr)
      val fieldNamesStr = fieldNames.mkString(",")
      val tempTableName = source.getTableName + "_temp"
      tableEnv.createTemporaryView(tempTableName, table)
      val distinctTable = tableEnv.sqlQuery(s"select $fieldNamesStr from (select $fieldNamesStr , row_number() over(partition by $primaryKeyStr order by action_timestamp desc) as row_rank from $tempTableName) where row_rank=1")
      tableEnv.createTemporaryView(source.getTableName, distinctTable)
      distinctTable
    } else {
      tableEnv.createTemporaryView(source.getTableName, table)
      table
    }*/
    tableEnv.createTemporaryView(source.getTableName, table)
    table
  }

  private def parseRowTypeInfo(fieldType: String): TypeInformation[_] = {
    fieldType match {
      case "boolean" => Types.BOOLEAN
      case "byte" => Types.BYTE
      case "double" => Types.DOUBLE
      case "float" => Types.FLOAT
      case "int" => Types.INT
      case "long" => Types.LONG
      case "short" => Types.SHORT
      case "timestamp" => Types.SQL_TIMESTAMP
      case _ => Types.STRING
    }
  }

  private def getKafkaStartStrategy(source: Source) : KafkaStrategy = {

    val strategy = source.getKafkaStrategy

    if(strategy == null) KafkaStrategy(source.isFromBeginning) else KafkaStrategy(strategy)

  }
}
