package com.shujia.flink.sql

import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.EnvironmentSettings
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment

object Demo8FlinkOnCanal {
  def main(args: Array[String]): Unit = {
    val bsEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val bsSettings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .useBlinkPlanner() //使用blink计划器
      .inStreamingMode() //使用流模型
      .build()

    //创建flink sql的执行环境
    val bsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(bsEnv, bsSettings)


    /**
      *
      * 读取canal-json格式的数据，等到一个不断更新的表
      *
      */
    bsTableEnv.executeSql(
      """
        |CREATE TABLE student (
        |id STRING,
        |name STRING ,
        |age BIGINT,
        |gender STRING ,
        |clazz STRING
        |) WITH (
        | 'connector' = 'kafka',
        | 'topic' = 'student.student',
        | 'properties.bootstrap.servers' = 'master:9092,node1:9092,node2:9092',
        | 'properties.group.id' = 'testGroup',
        | 'format' = 'canal-json',
        | 'scan.startup.mode' = 'earliest-offset',
        | 'canal-json.ignore-parse-errors' = 'true'
        |)
        |
      """.stripMargin)


    bsTableEnv.executeSql(
      """
        |CREATE TABLE clazz_count (
        |  clazz STRING,
        |  c BIGINT,
        |  PRIMARY KEY (clazz) NOT ENFORCED
        |) WITH (
        | 'connector' = 'jdbc',
        | 'url' = 'jdbc:mysql://master:3306/bigdata?useUnicode=true&characterEncoding=utf-8',
        | 'table-name' = 'clazz_count',
        | 'username' = 'root',
        | 'password' ='123456'
        |)
        |
      """.stripMargin)
    bsTableEnv.executeSql(
      """
        |insert into clazz_count
        |select clazz,count(1) as c from student group by clazz
        |
      """.stripMargin)


  }

}
