package com.shujia.flink.table

import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.EnvironmentSettings
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment

object Demo10SessionWindow {
  def main(args: Array[String]): Unit = {

    //创建flink 环境
    val bsEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    bsEnv.setParallelism(1)

    //设置table 环境的一些参数
    val bsSettings: EnvironmentSettings = EnvironmentSettings.newInstance()
      .useBlinkPlanner() //使用blikc计划器
      .inStreamingMode() //流模式
      .build()

    // 创建flink  table 环境
    val bsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(bsEnv, bsSettings)

    bsTableEnv.executeSql(
      """
        |CREATE TABLE clicks (
        |  u STRING,
        |  ts TIMESTAMP(3),
        |  url STRING,
        |  -- 声明 ts 是事件时间属性，并且用 延迟 5 秒的策略来生成 watermark
        |  WATERMARK FOR ts AS ts - INTERVAL '5' SECOND
        |) WITH (
        | 'connector' = 'kafka',
        | 'topic' = 'clicks1',
        | 'properties.bootstrap.servers' = 'master:9092,node1:9092,node2:9092',
        | 'properties.group.id' = 'testGroup',
        | 'format' = 'csv',
        | 'scan.startup.mode' = 'earliest-offset'
        |)
        |
        |
      """.stripMargin)


    bsTableEnv.executeSql(
      """
        |CREATE TABLE print_table (
        |  u STRING,
        |  win_start TIMESTAMP(3),
        |  win_end TIMESTAMP(3),
        |  c BIGINT
        |)
        |WITH ('connector' = 'print')
        |
      """.stripMargin)


    bsTableEnv.executeSql(
      """
        |insert into print_table
        |select
        | u,
        | SESSION_START(ts, INTERVAL '10' SECOND) as win_start,
        | SESSION_END(ts, INTERVAL '10' SECOND) as win_end,
        | count(1) as c
        | from
        |clicks
        |group by u,
        |SESSION(ts, INTERVAL '10' SECOND)
        |
        |
      """.stripMargin)
  }

}
