package com.shujia.dw

import com.shujia.udf.DateUDF
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.table.catalog.hive.HiveCatalog
import org.apache.flink.types.Row

object OdsETLToDwdWeibo {
  def main(args: Array[String]): Unit = {
    val bsEnv = StreamExecutionEnvironment.getExecutionEnvironment
    val bsSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build()

    val bsTableEnv = StreamTableEnvironment.create(bsEnv, bsSettings)

    val hiveCatalog = new HiveCatalog("myHive", "sent", "SentimentCompute/src/main/resources")

    bsTableEnv.registerCatalog("myHive", hiveCatalog)
    bsTableEnv.useCatalog("myHive")

    bsTableEnv.createTemporarySystemFunction("formatDate", classOf[DateUDF])

    // Update
    val weiboTable = bsTableEnv.sqlQuery(
      """
        |select distinct
        |  id
        |  ,comments_count
        |  ,formatDate(created_at) as created_at
        |  ,source
        |  ,reposts_count
        |  ,attitudes_count
        |  ,text
        |  ,md5(cast(user_id as string)) as user_id
        |from ods.ods_kafka_weibo
        |where id <> 'null'
        |""".stripMargin)
    // Update => Retract
    val weiboDS = weiboTable.toRetractStream[Row]
    // 去除Retract流中的标记，转成Append
    val table = weiboDS.toTable(bsTableEnv, $"flag", $"row").select($"row")

    bsTableEnv.createTemporaryView("tmp", table)

    bsTableEnv.executeSql(
      """
        |insert into dwd.dwd_kafka_weibo_msk
        |select
        |  id
        |  ,comments_count
        |  ,created_at
        |  ,source
        |  ,reposts_count
        |  ,attitudes_count
        |  ,text
        |  ,user_id
        |from tmp
        |""".stripMargin)

    bsEnv.execute()

  }

}
