package com.wudl.flink.stream.tablesql

import org.apache.flink.api.scala.ExecutionEnvironment
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.{DataTypes, EnvironmentSettings, Table, TableEnvironment}
import org.apache.flink.table.api.scala._
import org.apache.flink.table.descriptors
import org.apache.flink.table.descriptors.{Csv, FileSystem, OldCsv, Schema}



object TableSqlApi {
  def main(args: Array[String]): Unit = {
//    // 1. 创建环境
//    val env = StreamExecutionEnvironment.getExecutionEnvironment
//    env.setParallelism(1)
//    val tableEnv = StreamTableEnvironment.create(env)


    // 1. 创建环境
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    val tableEnv = StreamTableEnvironment.create(env)


    //    //  基于老版本的 Planner的流式处理
    //    val setting = EnvironmentSettings.newInstance().useOldPlanner().inStreamingMode().build()
    //    val oldStreamTableEnv = StreamTableEnvironment.create(env, setting)
    //    // 基于老版本的批处理
    //    val batchEnv = ExecutionEnvironment.getExecutionEnvironment
    //    val oldBatchTableEnv = BatchTableEnvironment.create(batchEnv)
    //    //********************** 新版本 ***************************************//
    //    // 基于blink planner的流处理
    //    val blinkStreamSetting = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build()
    //    val blinkStreamEvn = StreamTableEnvironment.create(env, blinkStreamSetting)
    //    //  基于blink planner批处理
    //    val blinkBatchEnv = EnvironmentSettings.newInstance().useBlinkPlanner().inBatchMode().build()
    //    val blinkBathTableEnv = TableEnvironment.create(blinkBatchEnv)

    // **************************************** //
    // ************* 读取文件  ***********//
    val filePath = "F:\\ideaWorkSpace2020\\demo\\Flink-wudl\\src\\main\\resources\\sensor.txt";
//    tableEnv.connect(new FileSystem().path(filePath)).withFormat(new Csv()).withSchema(new Schema().field("id", DataTypes.STRING()).field("timestamp", DataTypes.BIGINT()).field("temperature", DataTypes.DOUBLE()))
//      .createTemporaryTable("inputTable")
    tableEnv.connect(new FileSystem().path(filePath))
      .withFormat(new Csv())
      .withSchema(new Schema()
        .field("id", DataTypes.STRING())
        .field("timestamp", DataTypes.BIGINT())
        .field("temperature", DataTypes.DOUBLE())
      )
      .createTemporaryTable("inputTable")






//    val inpuTable:Table = tableEnv.from("inputTable")
//    inpuTable.toAppendStream[(String,Long,Double)].print()


        val inputTable: Table = tableEnv.from("inputTable")
        inputTable.toAppendStream[(String, Long, Double)].print()

        env.execute("table api test")


  }

}
