package com.zhao.demo.unbound.demo03_source.sample03_kafka

import java.util.Properties

import org.apache.flink.api.common.serialization.{DeserializationSchema, SimpleStringSchema}
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.flink.api.scala._

/**
 * Description: 从kafka分布式集群汇总实时采集数据案例展示<br/>
 * Copyright (c) ，2020 ， 赵 <br/>
 * This program is protected by copyright laws. <br/>
 * Date： 2020/11/26 16:05
 *
 * @author 柒柒
 * @version : 1.0
 */

object ReadDataFromKafkaDemo {
  def main(args: Array[String]): Unit = {
    //1.环境
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    //2.从kafka中获取数据,计算,并展示
    val topic = "raytek"
    val valueDeserializer: DeserializationSchema[String] = new SimpleStringSchema
    val props: Properties = new Properties

    //将资源目录下的配置文件装载到Properties实例中,封装了和kafka服务器连接的参数信息
    props.load(this.getClass.getClassLoader.getResourceAsStream("consumer.properties"))

    env.addSource(new FlinkKafkaConsumer[String](topic,valueDeserializer,props)).print("kafka->")

    //正式执行
    env.execute(this.getClass.getSimpleName)
  }
}














