package com.shujia.sql

import org.apache.flink.streaming.api.scala._

import org.apache.flink.table.api._

import org.apache.flink.table.api.Table
import org.apache.flink.table.api.bridge.scala.StreamTableEnvironment

object Demo3DynamicTable {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    val table: StreamTableEnvironment = StreamTableEnvironment.create(env)

    /**
      * 1、构建一个流
      *
      */

    val linesDS: DataStream[String] = env.socketTextStream("master", 8888)

    val studentDS: DataStream[(String, String, Int, String, String)] = linesDS.map(line => {
      val split: Array[String] = line.split(",")
      (split(0), split(1), split(2).toInt, split(3), split(4))
    })

    /**
      * 2、在流上定义表
      * Table： 类似Spark sql中的DataFrame
      *
      */

    val studentTable: Table = table.fromDataStream(studentDS, $"id", $"name", $"age", $"gender", $"clazz")

    /**
      * 3、在动态表上进行连续查询,得到一个新的动态表
      *
      */
    //DSl语法
    val clazzNumTable: Table = studentTable
      .groupBy($"clazz")
      .select($"clazz", $"clazz".count())

    /**
      * 4、将结果表再转换流，打印结果
      *
      */

    clazzNumTable.execute().print()


    env.execute()


  }

}
