package com.shujia.flink.sql

import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._

object Demo7CanalJson {
  def main(args: Array[String]): Unit = {


    //flink  流处理的环境
    val bsEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    //设置flink  sql 配置
    val bsSettings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .useBlinkPlanner() // 使用blink 的计划器（解析sql）
      .inStreamingMode()
      .build()

    //创建table 环境
    val bsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(bsEnv, bsSettings)

    /**
      * canal-json： 自动将kafka中的数据识别为insert  delete update  的操作
      *
      */

    bsTableEnv.executeSql(
      """
        |CREATE TABLE student_canal (
        |  id STRING,
        |  name STRING,
        |  age INT,
        |  gender STRING,
        |  clazz STRING
        |) WITH (
        | 'connector' = 'kafka',
        | 'topic' = 'student.student',
        | 'properties.bootstrap.servers' = 'master:9092',
        | 'properties.group.id' = 'asdasd',
        | 'format' = 'canal-json' ,
        | 'scan.startup.mode' = 'earliest-offset',
        | 'canal-json.ignore-parse-errors' = 'true'
        |)
        |
      """.stripMargin)


    bsTableEnv.executeSql(
      """
        |CREATE TABLE mysql_sink (
        |  clazz STRING,
        |  c BIGINT,
        |  PRIMARY KEY (clazz) NOT ENFORCED
        |) WITH (
        |   'connector' = 'jdbc',
        |   'url' = 'jdbc:mysql://master:3306/test?useUnicode=true&characterEncoding=utf-8',
        |   'table-name' = 'clazz_num',
        |   'username'='root',
        |   'password'='123456'
        |)
        |
        """.stripMargin)


    bsTableEnv.executeSql(
      """
        |insert into mysql_sink
        |select clazz,count(1) as c from student_canal group by clazz
        |
        """.stripMargin)


  }

}
