package com.shujia.flink.sql

import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._

object Demo3ToMysql {
  def main(args: Array[String]): Unit = {
    //创建flink的环境
    val bsEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val bsSettings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .useBlinkPlanner() //使用blink计划器
      .inStreamingMode() //流模式
      .build()

    /**
      * 创建flink sql 环境
      *
      */

    val bsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(bsEnv, bsSettings)


    /**
      *
      * source 表
      * 从kafka中读取数据得到一个动摇表
      *
      */
    bsTableEnv.executeSql(
      """
        |CREATE TABLE words (
        | word STRING
        |) WITH (
        | 'connector' = 'kafka',
        | 'topic' = 'words',
        | 'properties.bootstrap.servers' = 'master:9092',
        | 'properties.group.id' = 'asdsa',
        | 'format' = 'csv',
        | 'scan.startup.mode' = 'latest-offset',
        | 'csv.ignore-parse-errors'='true'
        |)
        |
      """.stripMargin)

    /**
      *
      * sink 表
      *
      * 将数据保存到mysql用到的表
      * 需要先在mysql中创建word_count
      *
      */


    bsTableEnv.executeSql(
      """
        |
        |CREATE TABLE mysql_sink (
        |  word STRING,
        |  c BIGINT,
        |  PRIMARY KEY (word) NOT ENFORCED
        |) WITH (
        |   'connector' = 'jdbc',
        |   'url' = 'jdbc:mysql://master:3306/bigdata',
        |   'table-name' = 'word_count',
        |   'username'='root',
        |   'password'='123456'
        |)
      """.stripMargin)

    /**
      * 执行连续查询，将结果写入到sink表中
      *
      */


    bsTableEnv.executeSql(
      """
        |insert into mysql_sink
        |select word, count(1) as c from words group by word
        |
      """.stripMargin)


  }

}
