package com.xinqing.bigdata.test.tableAPI

import java.util.Properties

import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api.scala._
import org.apache.flink.table.api.{DataTypes, Table}
import org.apache.flink.table.descriptors._


/**
  * @Author:CHQ
  * @Date:2020 /6/22 15:41
  * @Description
  */
object KafkaSource {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    val tableEnvironment: StreamTableEnvironment = StreamTableEnvironment.create(env)

    // 创建kafka相关的配置 source
    val properties = new Properties()
    properties.setProperty("bootstrap.servers", "10.201.7.175:9092")
    properties.setProperty("zookeeper.connect", "10.201.7.175:2181")
    properties.setProperty("group.id", "consumer-group")
    properties.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    properties.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    properties.setProperty("auto.offset.reset", "latest")

    tableEnvironment.connect(new Kafka().version("0.11").properties(properties).topic("sensor"))
      .withSchema(new Schema().field("id", DataTypes.STRING()).field("timestamps", DataTypes.BIGINT()).field("temperature", DataTypes.DOUBLE()))
      .withFormat(new Csv)
      .createTemporaryTable("kafkaTable")

    val table: Table = tableEnvironment.from("kafkaTable")

    val value: DataStream[(String, Long, Double)] = table.toAppendStream[(String, Long, Double)]

    value.print()

    env.execute("flink sql")
  }
}
