package com.shujia.flink.sink

import org.apache.flink.streaming.api.scala._
import org.apache.hadoop.hbase.client.Put

object Demo4SinkHbase {

  def main(args: Array[String]): Unit = {

    val env = StreamExecutionEnvironment.getExecutionEnvironment

    val countDS = env.socketTextStream("node1", 8888)
      .flatMap(_.split(","))
      .map(word => (word, 1))
      .keyBy(_._1)
      .sum(1)
      .map(kv => kv._1 + "\t" + kv._2)


    val mapper = new HbaseMapper[String] {
      override def mapper(value: String): Put = {
        val word = value.split("\t")(0)
        val count = value.split("\t")(1)
        val put = new Put(word.getBytes())

        put.add("info".getBytes(), "count".getBytes(), count.getBytes())
        put
      }
    }

    //将结果保存到habse中
    countDS.addSink(new HbaseSink[String]("wc", mapper, "node1:2181,node2:2181,node3:2181"))

    countDS.print()

    env.execute("wc")


  }

}
