package com.shujia.flink.sql

import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.EnvironmentSettings
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment

object Demo5FileSystem {
  def main(args: Array[String]): Unit = {

    //创建flink的环境
    val bsEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val bsSettings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .useBlinkPlanner() //使用blink计划器
      .inStreamingMode() //流模式
      .build()

    /**
      * 创建flink sql 环境
      *
      */

    val bsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(bsEnv, bsSettings)


    /**
      * 读取文件-- 有界流
      *
      */

    bsTableEnv.executeSql(
      """
        |
        |CREATE TABLE student (
        | id STRING,
        | name STRING,
        | age BIGINT,
        | gender STRING,
        | clazz STRING
        |)  WITH (
        |  'connector' = 'filesystem',
        |  'path' = 'flink/data/students.json',
        |  'format' = 'json',
        |  'json.ignore-parse-errors' = 'true'
        |)
        |
      """.stripMargin)


    bsTableEnv.executeSql(
      """
        |CREATE TABLE print_table WITH ('connector' = 'print')
        |LIKE student (EXCLUDING ALL)
        |
      """.stripMargin)

    /**
      * 创建kafka sink 表
      *
      */

    bsTableEnv.executeSql(
      """
        |CREATE TABLE student_sink (
        | id STRING,
        | name STRING,
        | age BIGINT,
        | gender STRING,
        | clazz STRING
        |) WITH (
        | 'connector' = 'kafka',
        | 'topic' = 'student2',
        | 'properties.bootstrap.servers' = 'master:9092',
        | 'format' = 'json'
        |)
        |
        |
      """.stripMargin)


    bsTableEnv.executeSql(
      """
        |
        |insert into student_sink
        |select * from student
        |
      """.stripMargin)


  }

}
