package com.lkh.compute

import java.util.Properties

import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.flink.api.scala._

object Test {
  def main(args: Array[String]): Unit = {

    //获取flink流处理环境
    val env = StreamExecutionEnvironment.getExecutionEnvironment

    val kafkaProps = new Properties()
    kafkaProps.setProperty("zookeeper.connect", "master:2181,node1:2181,node2:2181")
    kafkaProps.setProperty("bootstrap.servers", "master:9092,node1:9092,node2:9092")
    kafkaProps.setProperty("group.id", "1")

    //构建kafkaSource
    val kafkaSource = new FlinkKafkaConsumer[String]("user", new SimpleStringSchema, kafkaProps)

    kafkaSource.setStartFromGroupOffsets()

    val userDS = env.addSource(kafkaSource)

    /**
      * 将用户数据保存到hbase
      *
      * create 'user','info'
      */

    val filterDS = userDS.filter(_.split("\\|").length == 6)

    filterDS.print()

    env.execute()


  }
}
