package com.shujia.flink.sql

import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._

object Demo5ReadFile {
  def main(args: Array[String]): Unit = {
    //flink  流处理的环境
    val bsEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    //设置flink  sql 配置
    val bsSettings: EnvironmentSettings = EnvironmentSettings
      .newInstance()
      .useBlinkPlanner() // 使用blink 的计划器（解析sql）
      .inStreamingMode()
      .build()

    //创建table 环境
    val bsTableEnv: StreamTableEnvironment = StreamTableEnvironment.create(bsEnv, bsSettings)

    bsTableEnv.executeSql(
      """
        |CREATE TABLE student_file (
        |  id STRING,
        |  name STRING,
        |  age INT,
        |  gender STRING,
        |  clazz STRING
        |)  WITH (
        |  'connector' = 'filesystem',
        |  'path' = 'data/students.txt',
        |  'format' = 'csv',
        |  'csv.field-delimiter'=','
        |)
        |
      """.stripMargin)

    /**
      * EXCLUDING ALL:
      * 抛弃上面表的四个属性
      * 'connector' = 'filesystem',
      * 'path' = 'data/students.txt',
      * 'format' = 'csv',
      * 'csv.field-delimiter'=','
      *
      * 只保留一个属性
      * 'connector' = 'print'
      */
    bsTableEnv.executeSql(
      """
        |CREATE TABLE print_table WITH ('connector' = 'print')
        |LIKE student_file (EXCLUDING ALL)
        |
      """.stripMargin)

    bsTableEnv.executeSql(
      """
        |insert into print_table
        |select * from student_file
      """.stripMargin)
  }
}
