package com.shujia.sink

import java.sql.{Connection, DriverManager}

import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.api.scala._
import org.apache.hadoop.hbase.client.{HConnection, HConnectionManager, HTableInterface, Put}

object Demo4SinkHbase {
  def main(args: Array[String]): Unit = {

    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    val ds = env.socketTextStream("node1", 8888)

    val countDS = ds
      .flatMap(_.split(","))
      .map(word => (word, 1))
      .keyBy(_._1)
      .reduce((x, y) => (x._1, x._2 + y._2))


    val mapper = new HbaseMapper[(String, Int)] {
      override def getPut(kv: (String, Int)): Put = {
        val put = new Put(kv._1.getBytes())
        put.add("info".getBytes(), "count".getBytes(), String.valueOf(kv._2).getBytes())
        put
      }
    }


    countDS.addSink(new HbaseSink[(String, Int)]("word", mapper))

    env.execute("Demo4SinkHbase")

  }

}


