package com.kingjw.flinkAPI

import java.sql.Timestamp
import java.time.LocalDateTime

import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.{DataTypes, EnvironmentSettings, Table}
import org.apache.flink.table.api.scala._
import org.apache.flink.table.descriptors.{Csv, FileSystem, Kafka, Rowtime, Schema}

/**
 *
 * @Package: com.kingjw.flinkAPI
 * @ClassName: StreamToTable
 * @Author: 王坚伟
 * @CreateTime: 2022/1/22 15:53
 * @Description:
 */
object StreamToTable {

  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    env.setStreamTimeCharacteristic(TimeCharacteristic.ProcessingTime);
    val settings = EnvironmentSettings.newInstance()
      .useBlinkPlanner()
      .inStreamingMode().build()
    val tableEnv = StreamTableEnvironment.create(env,settings)
//    val inputStream = env.readTextFile("input/sensor.txt")
//    val inputStream = env.socketTextStream("10.0.3.207", 7777)

    tableEnv.connect(new Kafka()
      .version("0.11")
      .topic("sensor")
      .property("zookeeper.connect", "hadoop117:2181,hadoop118:2181,hadoop119:2181")
      .property("bootstrap.servers", "hadoop117:9092")
    ).withFormat(new Csv())
      .withSchema(new Schema().field("id",DataTypes.STRING())
            .field("timestamp",DataTypes.BIGINT())
            .field("temperature",DataTypes.DOUBLE())
        .rowtime(
          new Rowtime()
            .timestampsFromField("timestamp")
            .watermarksPeriodicBounded(1000)
        )
        .field("pt",DataTypes.TIMESTAMP(3)).proctime()
      ).createTemporaryTable("inputTable")

    val table: Table = tableEnv.from("inputTable")
    val sensorTable2: Table = table.select('id, 'timestamp as 'ts,'pt)


//    val dataStream: DataStream[SensorReading] = inputStream
//      .map(data => {
//        val dataArray = data.split(",")
//        SensorReading(dataArray(0), dataArray(1).toLong, dataArray(2).toDouble)
//      })
//    val sensorTable2: Table = tableEnv.fromDataStream(dataStream, 'id, 'timestamp as 'ts,'pt.proctime)

//    tableEnv.connect(new Kafka()
//      .version("0.11")
//      .topic("sensor")
//      .property("zookeeper.connect", "hadoop117:2181,hadoop118:2181,hadoop119:2181")
//      .property("bootstrap.servers", "hadoop117:9092")
//    )
//      .withFormat(new Csv())
//      .withSchema(new Schema().field("id",DataTypes.STRING())
//        .field("temp",DataTypes.BIGINT())
//      ).createTemporaryTable("outputTable")

//    val sinkDDL:String =
//      """
//        |create table jdbcOutputTable (
//        |id varchar(20) not null,
//        |ts bigint not null
//        |) with (
//        |'connector.type' = 'jdbc',
//        |'connector.url' = 'jdbc:mysql://localhost:3306/mybatis?useSSL=false&useUnicode=true&characterEncoding=utf-8&serverTimezone=GMT%2B8',
//        |'connector.table' = 'sensor',
//        |'connector.driver' = 'com.mysql.jdbc.Driver',
//        |'connector.username' = 'root',
//        |'connector.password' = '123456'
//        |)
//        |""".stripMargin
//
//    tableEnv.sqlUpdate(sinkDDL)
//
//
//    sensorTable2.insertInto("jdbcOutputTable")
    sensorTable2.toAppendStream[(String,Long,Timestamp)].print("sql")
    env.execute("ni hao ya")
  }

}
