package com.shujia.flink.sql

import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.types.Row

object Demo2SqlWC {
  def main(args: Array[String]): Unit = {

    val bsEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val bsSettings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .useBlinkPlanner() //指定计划器为blink
      .inStreamingMode() //指定流模式
      .build()
    /**
      *
      * 构建flink table环境
      *
      */
    val bsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(bsEnv, bsSettings)

    /**
      * executeSql： 可以写DDL语句， 也可以写insert into
      *
      * sqlQuery: 可以写select 查询数据
      */


    /**
      * 创建source表
      *
      */

    bsTableEnv.executeSql(
      """
        |CREATE TABLE words_source (
        |  word STRING
        |) WITH (
        |  'connector' = 'kafka',
        |  'topic' = 'words',
        |  'properties.bootstrap.servers' = 'master:9092,node1:9092,node2:9092',
        |  'properties.group.id' = 'asdasd',
        |  'scan.startup.mode' = 'earliest-offset',
        |  'format' = 'csv',
        |   'csv.ignore-parse-errors' = 'true'
        |)
        |
      """.stripMargin)

    /**
      * 结果表
      *
      */

    bsTableEnv.executeSql(
      """
        |CREATE TABLE count_sink (
        |  word STRING,
        |  c BIGINT,
        |  PRIMARY KEY (word) NOT ENFORCED
        |) WITH (
        |  'connector' = 'jdbc',
        |  'url' = 'jdbc:mysql://master:3306/student',
        |  'table-name' = 'count',
        |  'username' = 'root',
        |  'password' = '123456'
        |)
        |
      """.stripMargin)


    /**
      * 分组求和，flink会将每一个单词的数量以状态的形式保存到checkpoint中
      *
      */

   bsTableEnv.executeSql(
      """
        |insert into count_sink
        |
        |select word,count(1) as c from words_source group by word
        |
      """.stripMargin)


  }
}
