package com.shujia.sql

import org.apache.flink.table.api.{EnvironmentSettings, TableEnvironment}

object Demo11Updatekafka {
  def main(args: Array[String]): Unit = {
    val setting: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .inStreamingMode()
      .build()

    val table: TableEnvironment = TableEnvironment.create(setting)


    /**
      * source 表
      *
      */
    table.executeSql(
      """
        |CREATE TABLE student_kafka (
        |    id STRING,
        |    name STRING,
        |    age INT,
        |    gender STRING,
        |    clazz STRING
        |) WITH (
        |  'connector' = 'kafka',
        |  'topic' = 'student',
        |  'properties.bootstrap.servers' = 'master:9092',
        |  'properties.group.id' = 'testGroup',
        |  'scan.startup.mode' = 'latest-offset',
        |  'format' = 'csv'
        |)
        |
      """.stripMargin)


    table.executeSql(
      """
        |CREATE TABLE clazz_num (
        |    clazz STRING,
        |    num BIGINT,
        |    PRIMARY KEY (clazz) NOT ENFORCED
        |) WITH (
        |  'connector' = 'upsert-kafka',
        |  'topic' = 'clazz_num1',
        |  'properties.bootstrap.servers' = 'master:9092',
        |  'key.format' = 'avro',
        |  'value.format' = 'avro'
        |)
        |
      """.stripMargin)



    /**
      * 统计班级的人数
      *
      */

    table.executeSql(
      """
        |insert into clazz_num
        |select clazz,count(1) as num from student_kafka
        |group by clazz
        |
      """.stripMargin)

  }

}
