package tableapi

import bean.SensorReading
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.table.api.scala.{StreamTableEnvironment, _}
import org.apache.flink.table.api.{DataTypes, EnvironmentSettings, Table}
import org.apache.flink.table.descriptors._
import org.apache.flink.types.Row

/**
  * @Description: TODO QQ1667847363
  * @author: xiao kun tai
  * @date:2021/11/27 12:14
  */
object Table8_ProcessTime {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime)

    //file source
    val inputPath: String = "src/main/resources/sensor.txt"
    val fileStream: DataStream[String] = env.readTextFile(inputPath)

    val socketStream = env.socketTextStream("192.168.88.106", 7777)

    //先转换为特定的类型
    val dataStream: DataStream[SensorReading] = fileStream.map(data => {
      val arr = data.split(",")
      SensorReading(arr(0), arr(1).toLong, arr(2).toDouble)
    })

    val settings: EnvironmentSettings = EnvironmentSettings.newInstance()
      .useBlinkPlanner()
      .inStreamingMode()
      .build()

    //创建表执行环境
    val tableEnv = StreamTableEnvironment.create(env, settings)

    val sensorTable: Table = tableEnv.fromDataStream(dataStream, 'id, 'temperature, 'timestamp, 'pt.proctime)

    sensorTable.printSchema()
    sensorTable.toAppendStream[Row].print("table")


    tableEnv.connect(new FileSystem()
      .path(inputPath)
    )
      .withFormat(new Csv())
      .withSchema(new Schema()
        .field("id", DataTypes.STRING())
        .field("temperature", DataTypes.DOUBLE())
        .field("timestamp", DataTypes.BIGINT())
        .field("pt", DataTypes.TIMESTAMP(3))
        .proctime()
      )
      .createTemporaryTable("processTimeTable")

    val sinkDDL: String =
      """
        |create table dataTable (
        | id varchar(20) not null,
        | ts bigint,
        | temperature double,
        | pt AS PROCTIME()
        |) with (
        | 'connector.type' = 'filesystem',
        | 'connector.path' = 'file:///E:\程序代码\Flink\FlinkTutorialScala\src\main\resources\sensor.txt',
        | 'format.type' = 'csv'
        |)
      """.stripMargin
    tableEnv.sqlUpdate(sinkDDL) // 执行 DDL

    env.execute("table api test")
  }


}
