package com.shujia.flink.sql

import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
object Demo8ProcTime {
  def main(args: Array[String]): Unit = {
    //flink  流处理的环境
    val bsEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    //设置flink  sql 配置
    val bsSettings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .useBlinkPlanner() // 使用blink 的计划器（解析sql）
      .inStreamingMode()
      .build()

    //创建table 环境
    val bsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(bsEnv, bsSettings)

    /**
      * PROCTIME(): 获取处理时间的函数
      * AS ： 通过后面的函数动态增加一列
      *
      */

    bsTableEnv.executeSql(
      """
        |
        |CREATE TABLE user_actions (
        |  user_name STRING,
        |  user_action_time AS PROCTIME()
        |) WITH (
        |  'connector' = 'kafka',
        | 'topic' = 'users',
        | 'properties.bootstrap.servers' = 'master:9092',
        | 'properties.group.id' = 'asdsadasd',
        | 'format' = 'csv',
        | 'scan.startup.mode' = 'earliest-offset',
        | 'csv.ignore-parse-errors' = 'true'
        |)
        |
        |
      """.stripMargin)

    bsTableEnv.executeSql(
      """
        |CREATE TABLE print_table(
        |user_name STRING,
        |start_time TIMESTAMP(3),
        |end_time TIMESTAMP(3),
        |c BIGINT
        |) WITH ('connector' = 'print')
        |
    """.stripMargin)

    /**
      * 每隔5秒统计用户出现的次数
      *
      */


    bsTableEnv.executeSql(
      """
        |insert into print_table
        |select
        |user_name,
        |TUMBLE_START(user_action_time, INTERVAL '5' SECOND) as start_time,
        |TUMBLE_END(user_action_time, INTERVAL '5' SECOND) as end_time,
        |count(1) as c
        |from
        |user_actions
        |group by user_name, TUMBLE(user_action_time, INTERVAL '5' SECOND)
        |
  """.stripMargin)

  }
}
