package com.yjy.scala

import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.EnvironmentSettings
import org.apache.flink.table.api.bridge.scala.{StreamTableEnvironment, tableConversions}
import org.apache.flink.types.Row

object FlinkKafkaDDL {

  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    val settings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build
    val tEnv = StreamTableEnvironment.create(env, settings)

    // 创建kafka源表
    val createTable =
      """
        |CREATE TABLE user_behavior (
        |    userId BIGINT,
        |    itemId BIGINT,
        |    categoryId BIGINT,
        |    behavior STRING,
        |    ts TIMESTAMP(3) METADATA FROM 'timestamp',
        |    proctime as PROCTIME(),                       -- 通过计算列产生一个处理时间列
        |    WATERMARK FOR ts as ts - INTERVAL '5' SECOND  -- 在ts上定义watermark，ts成为事件时间列
        |) WITH (
        |   'connector' = 'kafka',
        |   'topic' = 'user_behavior',
        |   'properties.bootstrap.servers' = 'bigserver1:9092,bigserver2:9092,bigserver3:9092',
        |   'properties.group.id' = 'testGroup',
        |   'scan.startup.mode' = 'earliest-offset',
        |   'format' = 'json'
        |)
      """.stripMargin

    tEnv.executeSql(createTable)

    /**
     * 控制台打印，需要放开 env.execute()
     */
    //查询kafka源表
    val query =
    """
      |SELECT userId, HOUR(TUMBLE_START(ts, INTERVAL '10' SECOND)) as hour_of_day, COUNT(*) as buy_cnt
      |FROM user_behavior
      |WHERE behavior = 'buy'
      |GROUP BY userId,TUMBLE(ts, INTERVAL '10' SECOND)
      """.stripMargin

    val result = tEnv.sqlQuery(query)
    result.toRetractStream[Row].print

    /**
     * 统计每个小时的购买量
     */
    // 创建mysql结果表，sink到mysql结果表
    val result_mysql =
    """
      |CREATE TABLE buy_cnt_per_hour (
      |    id BIGINT,
      |    hour_of_day BIGINT,
      |    buy_cnt BIGINT,
      |    PRIMARY KEY (id) NOT ENFORCED
      |) WITH (
      |   'connector' = 'jdbc',
      |   'url' = 'jdbc:mysql://localhost:3306/flink-test?useSSL=false',
      |   'table-name' = 'pvuv_sink_01',
      |   'username' = 'root',
      |   'password' = 'root',
      |   'driver' = 'com.mysql.jdbc.Driver'
      |)
       """.stripMargin

    //    tEnv.executeSql(result_mysql)
    //
    //    tEnv.executeSql(
    //      """
    //        |INSERT INTO buy_cnt_per_hour
    //        |SELECT userId as id, HOUR(TUMBLE_START(ts, INTERVAL '10' SECOND)) as hour_of_day, COUNT(*) as buy_cnt
    //        |FROM user_behavior
    //        |WHERE behavior = 'buy'
    //        |GROUP BY userId, TUMBLE(ts, INTERVAL '10' SECOND)
    //       """.stripMargin)

    // 创建kafka结果表，sink 到kafka结果表
    val result_kafka =
      """
        |CREATE TABLE user_behavior_kafka (
        |    id BIGINT,
        |    hour_of_day BIGINT,
        |    buy_cnt BIGINT,
        |    PRIMARY KEY (id) NOT ENFORCED
        |)
        |WITH (
        |    'connector' = 'upsert-kafka',
        |    'topic' = 'buy_cnt_per_hour',
        |    'properties.bootstrap.servers' = 'bigserver1:9092,bigserver2:9092,bigserver3:9092',
        |    'key.format' = 'json',
        |    'value.format' = 'json'
        |)
      """.stripMargin

    //    tEnv.executeSql(result_kafka)
    //
    //    tEnv.executeSql(
    //      """
    //        |INSERT INTO user_behavior_kafka
    //        |SELECT userId as id, HOUR(TUMBLE_START(ts, INTERVAL '10' SECOND)), COUNT(*)
    //        |FROM user_behavior
    //        |WHERE behavior = 'buy'
    //        |GROUP BY userId, TUMBLE(ts, INTERVAL '10' SECOND)
    //      """.stripMargin)

    //创建es结果表，sink到es结果表
    val result_es =
      """
        |CREATE TABLE buy_cnt_per_hour_es (
        |    id BIGINT,
        |    hour_of_day BIGINT,
        |    buy_cnt BIGINT
        |) WITH (
        |    'connector' = 'elasticsearch-7',
        |    'hosts' = 'http://localhost:9200',
        |    'index' = 'buy_cnt_per_hour',
        |    'sink.bulk-flush.max-actions' = '1',
        |    'format' = 'json'
        |)
        |""".stripMargin

    //    tEnv.executeSql(result_es)
    //
    //    tEnv.executeSql(
    //      """
    //        |INSERT INTO buy_cnt_per_hour_es
    //        |SELECT userId as id, HOUR(TUMBLE_START(ts, INTERVAL '10' SECOND)), COUNT(*)
    //        |FROM user_behavior
    //        |WHERE behavior = 'buy'
    //        |GROUP BY userId, TUMBLE(ts, INTERVAL '10' SECOND)
    //      """.stripMargin)

    /**
     * 构建每天以10分钟的粒度进行统计独立用户数应用
     */
    /*val cumulative_uv_es =
      """
        |CREATE TABLE cumulative_uv (
        |    time_str STRING,
        |    uv BIGINT
        |) WITH (
        |    'connector' = 'elasticsearch-7',
        |    'hosts' = 'http://localhost:9200',
        |    'index' = 'cumulative_uv',
        |    'format' = 'json'
        |)
        |""".stripMargin

    tEnv.executeSql(cumulative_uv_es)

    tEnv.executeSql(
      """
        |CREATE VIEW uv_per_10min AS
        |SELECT
        | userId as time_str,categoryId as uv
        |FROM user_behavior
      """.stripMargin)

    tEnv.executeSql(
      """
        |INSERT INTO cumulative_uv
        |SELECT time_str, MAX(uv)
        |FROM uv_per_10min
        |GROUP BY time_str
        |""".stripMargin)*/

    /**
     * 构建按分类进行排行，取出想要的结果应用。
     */
    /*//创建mysql类目表
    val category_dim =
    """
      |CREATE TABLE category_dim (
      |    sub_category_id BIGINT,  -- 子类目
      |    parent_category_id BIGINT -- 顶级类目
      |) WITH (
      |    'connector' = 'jdbc',
      |    'url' = 'jdbc:mysql://10.121.18.12:3306/test',
      |    'table-name' = 'category',
      |    'driver' = 'com.mysql.jdbc.Driver',
      |    'username' = 'root',
      |    'password' = 'root'
      |)
      |""".stripMargin

    tEnv.executeSql(category_dim)

    //创建es类目统计结果表
    val top_category =
      """
        |CREATE TABLE top_category (
        |    category_name STRING,  -- 类目名称
        |    buy_cnt BIGINT  -- 销量
        |) WITH (
        |    'connector' = 'elasticsearch-7',
        |    'hosts' = 'http://localhost:9200',
        |    'index' = 'top_category',
        |    'format' = 'json'
        |)
        |""".stripMargin

    tEnv.executeSql(top_category)

    tEnv.executeSql(
      """
        |CREATE VIEW rich_user_behavior AS
        |SELECT U.userId, U.itemId, U.behavior,
        |  CASE C.parent_category_id
        |    WHEN 1 THEN '服饰鞋包'
        |    WHEN 2 THEN '家装家饰'
        |    WHEN 3 THEN '家电'
        |    WHEN 4 THEN '美妆'
        |    WHEN 5 THEN '母婴'
        |    WHEN 6 THEN '3C数码'
        |    WHEN 7 THEN '运动户外'
        |    WHEN 8 THEN '食品'
        |    ELSE '其他'
        |  END AS category_name
        |FROM user_behavior AS U LEFT JOIN category_dim FOR SYSTEM_TIME AS OF U.proctime AS C
        |ON U.categoryId = C.sub_category_id
        |""".stripMargin)

    tEnv.executeSql(
      """
        |INSERT INTO top_category
        |SELECT category_name, COUNT(*) buy_cnt
        |FROM rich_user_behavior
        |WHERE behavior = 'buy'
        |GROUP BY category_name
        |""".stripMargin)*/

    //创建clickhouse类目表
    val cktable =
    """
      |CREATE TABLE sink_ck_table (
      |    name VARCHAR,
      |    grade BIGINT,
      |    rate FLOAT,
      |    more FLOAT,
      |    PRIMARY KEY (name, grade) NOT ENFORCED /* 如果指定 pk，进入 upsert 模式 */
      |) WITH (
      |    'connector' = 'clickhouse',
      |    'url' = 'clickhouse://<host>:<port>',
      |    'username' = '<username>',
      |    'password' = '<password>',
      |    'database-name' = 'default',        /* ClickHouse 数据库名，默认为 default */
      |    'table-name' = 'd_sink_table',      /* ClickHouse 数据表名 */
      |    'sink.batch-size' = '1000',         /* batch 大小 */
      |    'sink.flush-interval' = '1000',     /* flush 时间间隔 */
      |    'sink.max-retries' = '3',           /* 最大重试次数 */
      |    'sink.partition-strategy' = 'hash', /* hash | random | balanced */
      |    'sink.partition-key' = 'name',      /* hash 策略下的分区键 */
      |    'sink.ignore-delete' = 'true'       /* 忽略 DELETE 并视 UPDATE 为 INSERT */
      |)
      |""".stripMargin

    tEnv.executeSql(cktable)

    tEnv.executeSql(
      """
        |INSERT INTO sink_ck_table
        |SELECT name, grade, rate FROM source
        |""".stripMargin)


    env.execute("Flink SQL DDL")

  }

}
