package com.haimalab.demo.kafka

import java.util.Properties

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer010

/**
  * ETL（抽取、转换、加载）可实施过程中也可能中ELT。
  *
  * 任务方式：
  * EL: 从外部系统中抽取数据，直接加载到内部系统中。数据湖的推荐方式。
  * ETL: 从外部或内部系统中抽取数据，进行转换处理，然后加载到内部系统中。
  */
object KafkaSinkJob {


  def main(args: Array[String]) {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    val properties = new Properties()
    properties.setProperty("bootstrap.servers", "172.16.208.113:6667")
    properties.setProperty("group.id", "test" + System.currentTimeMillis())
    properties.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
    properties.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")

    val produce = new FlinkKafkaProducer010("ngy-test", new SimpleStringSchema(), properties)

    env.fromElements("hello word")
      .addSink(produce).name("kafka-sink").setParallelism(3)

    env.execute()

  }


}
