package com.persagy.energy.app

import com.persagy.energy.sink.HbaseSink
import com.persagy.energy.utils.KafkaUtil
import org.apache.flink.streaming.api.scala._
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.TimeCharacteristic
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer

object File2Hbase {

  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    /* 设置并行度 */
    env.setParallelism(1)

    /* 设置时间语义 */
    env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime)

    val inputStream: DataStream[String] =
      env.addSource( new FlinkKafkaConsumer[String](KafkaUtil.topic, new SimpleStringSchema(), KafkaUtil.kafkaConsumerProperties()) )

    val value: DataStream[(String, String)] = inputStream.map(data => {
      val strings: Array[String] = data.split("\t")
      (strings(0), strings(1))
    })

//    value.print("energy_data: ")
    value.addSink(new HbaseSink)

    env.execute("energy : ")
  }
}
