package com.yanggu.bigdata.realtime.app.dws

import com.yanggu.bigdata.realtime.app.function.KeywordUDTF
import com.yanggu.bigdata.realtime.bean.KeywordStats
import com.yanggu.bigdata.realtime.common.GmallConfig._
import com.yanggu.bigdata.realtime.common.GmallConstant._
import com.yanggu.bigdata.realtime.utils.{ClickHouseUtil, KafkaUtil}
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._

/**
 * DWS层关键字的Flink的app。使用FlinkSQL来实现
 *
 */
//数据流: 前端埋点数据 -> nginx -> 日志服务器 -> Kafka(ods_base_log) -> Flink(ods->dwd) -> Kafka(dwd) -> Flink(dwd->dws)
//程 序:  mock-server-log -> nginx -> logger.sh -> Kafka(zk) -> Flink(BaseLogApp) -> Kafka(dwd_page_log) -> Flink(KeywordStatsApp)
object KeywordStatsApp {

  def main(args: Array[String]): Unit = {
    //1. 创建执行环境和表的执行环境
    val environment = StreamExecutionEnvironment.getExecutionEnvironment
    environment.setParallelism(1)
    val settings = EnvironmentSettings
      .newInstance()
      .inStreamingMode()
      .build()
    val tableEnvironment = StreamTableEnvironment.create(environment, settings)

    //2. 声明动态表和自定义函数
    //2.1 注册自定义的udtf函数
    tableEnvironment.createTemporarySystemFunction("ik_analyze", classOf[KeywordUDTF])

    //2.2 使用SQL的DDL方式定义表(数据源是kafka)
    tableEnvironment.executeSql(
      s"""
         |CREATE TABLE page_view (
         |-- 对于值为JSON字符串的可以MAP, key是STRING, value按照对应的数据类型
         |-- https://nightlies.apache.org/flink/flink-docs-release-1.14/docs/dev/table/types/#map
         |  common MAP<STRING, STRING>,
         |  page MAP<STRING, STRING>,
         |  ts BIGINT,
         |  row_time AS TO_TIMESTAMP(FROM_UNIXTIME(ts / 1000, 'yyyy-MM-dd HH:mm:ss')),
         |  WATERMARK FOR row_time AS row_time - INTERVAL '2' SECOND
         |) ${KafkaUtil.getKafkaDDL(KAFKA_BROKER_LIST, "dwd_page_log", "keyword_stats_app")}
         |""".stripMargin)

    val fullWordView = tableEnvironment.sqlQuery(
      """
        |SELECT
        | page['item'] full_word,
        | row_time
        |FROM
        | page_view
        |WHERE
        | page['page_id'] = 'good_list'
        |AND page['item'] IS NOT NULL
        |""".stripMargin)

    //使用自定义函数将full_word进行分词
    val keywordView = tableEnvironment.sqlQuery(
      s"""
         |SELECT
         | keyword,
         | row_time
         |FROM
         | $fullWordView,
         | LATERAL TABLE(ik_analyze(full_word)) as T(keyword)
         |""".stripMargin)

    //按照keyword和窗口进行分组
    val keywordStatsView = tableEnvironment.sqlQuery(
      s"""
         |SELECT
         |  keyword,
         |  COUNT(*) AS ct,
         |  '$KEYWORD_SEARCH' AS source,
         |  DATE_FORMAT(TUMBLE_START(row_time, INTERVAL '10' SECOND), 'yyyy-MM-dd HH:mm:ss') AS stt,
         |  DATE_FORMAT(TUMBLE_END(row_time, INTERVAL '10' SECOND), 'yyyy-MM-dd HH:mm:ss') AS edt,
         |  UNIX_TIMESTAMP() * 1000 ts
         |FROM
         |  $keywordView
         |GROUP BY
         |  keyword,
         |  TUMBLE(row_time, INTERVAL '10' SECOND)
         |""".stripMargin)

    //5. 将table转换成dataStream, 输出到clickhouse中
    //这里直接使用appendStream即可。因为滚动时间窗口不涉及到聚合类的操作, 不会更新原有的数据
    val resultStream = tableEnvironment.toAppendStream[KeywordStats](keywordStatsView)

    resultStream.print("resultStream>>>>>>")
    resultStream.addSink(ClickHouseUtil.getSinkFunction[KeywordStats](
      "INSERT INTO keyword_stats(stt, edt, keyword, source, ct, ts) VALUES(?, ?, ?, ?, ?, ?, )"))

    //6. 启动job
    environment.execute()
  }

}
