package com.shujia.flink.sql

import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._

object Demo7OnKafka {
  def main(args: Array[String]): Unit = {

    //flink  流处理的环境
    val bsEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    //设置flink  sql 配置
    val bsSettings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .useBlinkPlanner() // 使用blink 的计划器（解析sql）
      .inStreamingMode()
      .build()

    //创建table 环境
    val bsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(bsEnv, bsSettings)

    /**
      *
      * source
      *
      */

    bsTableEnv.executeSql(
      """
        |CREATE TABLE kafka_source (
        |  id STRING,
        |  name STRING,
        |  age INT,
        |  gender STRING,
        |  clazz STRING
        |) WITH (
        | 'connector' = 'kafka',
        | 'topic' = 'student',
        | 'properties.bootstrap.servers' = 'master:9092',
        | 'properties.group.id' = 'asdasdasd',
        | 'format' = 'json',
        | 'json.ignore-parse-errors' = 'true',
        | 'scan.startup.mode' = 'earliest-offset'
        |)
        |
      """.stripMargin)

    /**
      *
      * sink
      *
      */

//    bsTableEnv.executeSql(
//      """
//        |CREATE TABLE kafka_sink(
//        |  id STRING,
//        |  name STRING,
//        |  age INT,
//        |  gender STRING,
//        |  clazz STRING
//        |) WITH (
//        | 'connector' = 'kafka',
//        | 'topic' = 'student_sink',
//        | 'properties.bootstrap.servers' = 'master:9092',
//        | 'properties.group.id' = 'asdasd',
//        | 'format' = 'csv'
//        |)
//        |
//        """.stripMargin)

    /**
      * 如果查询流是一个append 可以写入kafka
      *
      */

//    bsTableEnv.executeSql(
//      """
//        |
//        |insert into kafka_sink
//        |select * from kafka_source
//        |
//      """.stripMargin)

    /**
      * kafka  csv json 格式  不支持 更新的流
      *
      * canal-json;只能用在source中
      *
      *
      * flink-format-changelog-json 第三方的工具包，支持将变更的数据量写入到kafka中
      *
      **/
    bsTableEnv.executeSql(
      """
        |CREATE TABLE kafka_sink(
        |  clazz STRING,
        |  c BIGINT
        |) WITH (
        | 'connector' = 'kafka',
        | 'topic' = 'student_sink',
        | 'properties.bootstrap.servers' = 'master:9092',
        | 'format' = 'changelog-json'
        |)
        |
      """.stripMargin)

    bsTableEnv.executeSql(
      """
        |
        |insert into kafka_sink
        |select clazz,count(1) as c from kafka_source group by clazz
        |
      """.stripMargin)

  }
}
