package com.shujia.dw

import com.shujia.udf.DateUDF
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.table.catalog.hive.HiveCatalog
import org.apache.flink.types.Row

object OdsETLToDwdComment {
  def main(args: Array[String]): Unit = {
    val bsEnv = StreamExecutionEnvironment.getExecutionEnvironment
    val bsSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build()

    val bsTableEnv = StreamTableEnvironment.create(bsEnv, bsSettings)

    val hiveCatalog = new HiveCatalog("myHive", "sent", "SentimentCompute/src/main/resources")

    bsTableEnv.registerCatalog("myHive", hiveCatalog)
    bsTableEnv.useCatalog("myHive")
    bsTableEnv.createTemporarySystemFunction("formatDate", classOf[DateUDF])

    val commentTable = bsTableEnv.sqlQuery(
      """
        |select distinct
        |  comment_id
        |  ,formatDate(comment_time) as comment_time
        |  ,like_count
        |  ,comment_text
        |  ,md5(cast(user_id as String)) as user_id
        |  ,weibo_id
        |from ods.ods_kafka_comment
        |""".stripMargin)

    val commentDS = commentTable.toRetractStream[Row]
    val table = commentDS.toTable(bsTableEnv, $"flag", $"row").select($"row")
    bsTableEnv.createTemporaryView("tmp", table)
    bsTableEnv.executeSql(
      """
        |insert into dwd.dwd_kafka_comment_msk
        |select
        |  comment_id
        |  ,comment_time
        |  ,like_count
        |  ,comment_text
        |  ,user_id
        |  ,weibo_id
        |from tmp
        |""".stripMargin)


    bsEnv.execute()


  }

}
