package com.shujia.flink

import java.util.Properties

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer08
import org.apache.flink.table.api.Types
import org.apache.flink.table.api.scala.StreamTableEnvironment
import org.apache.flink.table.descriptors._
import org.apache.flink.types.Row

object Demo10tableONKafka {
  def main(args: Array[String]): Unit = {

    val streamEnv = StreamExecutionEnvironment.getExecutionEnvironment
    //设置时间类型为事件事件
    streamEnv.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)
    val tableEnv = StreamTableEnvironment.create(streamEnv)


    //读取kafka数据注册成一张表
    tableEnv
      .connect(
        new Kafka()
          .version("0.8")
          .topic("WeiBoCommentTopic")
          .startFromEarliest()
          .property("zookeeper.connect", "node1:2181,node2:2181,node3:2181")
          .property("bootstrap.servers", "node1:9092,node2:9092,node3:9092")
          .property("group.id", "asdasdadf")
          .property("auto.offset.reset", "earliest"))
      .withFormat(new Json().deriveSchema())

      /**
        * {"article_id": "4395725619871130",
        * "sentiment_id": 1,
        * "comment_id": "4395726031285336",
        * "created_at": "2019-07-19 11:16:57", "
        * user_name": "Peak瑞金",
        * "user_id": 6064231088,
        * "total_number": 0,
        * "like_count": 1,
        * "text": "安排"}
        */
      .withSchema(new Schema()
      .field("article_id", Types.STRING)
      .field("sentiment_id", Types.LONG)
      .field("comment_id", Types.STRING)
      .field("created_at", Types.STRING)
      .field("user_name", Types.STRING)
      .field("user_id", Types.LONG)
      .field("total_number", Types.LONG)
      .field("like_count", Types.LONG)
      .field("text", Types.STRING)
    ).inAppendMode()
      .registerTableSource("comment")


    val table = tableEnv.scan("comment")

    tableEnv.toAppendStream[Row](table).print()

    streamEnv.execute()

  }
}
