package com.shujia.flink.sql

import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.EnvironmentSettings
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment

import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.types.Row


object Demo12FLinkOnFileSystem {

  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val settings: EnvironmentSettings = EnvironmentSettings.newInstance()
      .useBlinkPlanner()
      .inStreamingMode()
      .build()


    val bsEnv: StreamTableEnvironment = StreamTableEnvironment.create(env, settings)


    /**
      * 读取hdfs 数据需要使用  hdfs://master:9000/data/student
      *
      */

    bsEnv.executeSql(
      """
        |CREATE TABLE student (
        |  id string,
        |  name string,
        |  age int,
        |  clazz string,
        |  gender string
        |) PARTITIONED BY (gender) WITH (
        |  'connector' = 'filesystem',
        |  'path' = 'flink/data/student',
        |  'format' = 'csv'
        |)
        |
      """.stripMargin)


    /**
      * 用于测试的sin  print
      *
      */

    bsEnv.executeSql(
      """
        |
        |CREATE TABLE print_table (
        |clazz string,
        |num bigint
        |) WITH (
        | 'connector' = 'print'
        |)
        |
        |
      """.stripMargin)



    bsEnv.executeSql(
      """
        |
        |insert into print_table
        |select clazz,count(1) as num from student group by clazz
        |
        |
      """.stripMargin)



  }

}
