package com.example.tableSql

import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.table.api.{EnvironmentSettings, Table, TableSchema}
import org.apache.flink.table.api.bridge.scala.{BatchTableEnvironment, StreamTableEnvironment}


object Demo2 {

  case class Supp(name: String, part: String, number: String, cost:String)

  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    //    val fsSettings = EnvironmentSettings.newInstance().useOldPlanner().inStreamingMode().build()
    //    val fsEnv = StreamExecutionEnvironment.getExecutionEnvironment
    //    fsEnv.setParallelism(1)
    //    val tEnv = StreamTableEnvironment.create(fsEnv, fsSettings)
    env.setParallelism(1)
    val tEnv = StreamTableEnvironment.create(env)

    val dsStr: DataStream[String] = env.readTextFile("E:\\note\\flink-project\\temp\\supplier_data.csv")

    val sDs: DataStream[Supp] = dsStr.map(data => {
      val arrs: Array[String] = data.split(",")
      Supp(arrs(0), arrs(1), arrs(2).toString, arrs(3))
    })

    //    sDs.print()
    //    env.execute()


    //    示例一
//    tEnv.createTemporaryView("aa", sDs)
//    //    tEnv.sqlQuery("select * from aa").execute().print()
//    tEnv.executeSql("select * from aa").print()

    //    示例二
    val table: Table = tEnv.fromDataStream(sDs)
    val table1 = table.select("name,number").filter("name == 'Supplier X'")

    table1.execute().print()
//
//    val value: DataStream[(String, String)] = table1.toAppendStream[(String, String)]
//    value.print()



    env.execute()
  }
}
