package kafka2Hbase
import org.apache.flink.api.scala._
import java.util.Collections
import com.streamxhub.streamx.flink.core.scala.FlinkStreaming
import com.streamxhub.streamx.flink.core.scala.sink.HBaseSink
import kafka2Hbase.UserDefinedSource.TestSource
import kafka2Hbase.Utils.MD5Util
import kafka2Hbase.bean.MyResult01
import org.apache.flink.streaming.api.scala.DataStream
import org.apache.hadoop.hbase.client.{Mutation, Put}
import org.apache.hadoop.hbase.util.Bytes

/**
 * @author HuBiWei
 * @create 2022/4/28
 */
object SinkDataToHbase extends FlinkStreaming {


  override def handle(): Unit = {

    val source: DataStream[MyResult01] = context.addSource(new TestSource)

/*    source.map(value => {
      print(value.name + "->" + value.age)
    })*/
    //定义转换规则...
      implicit def entry2Put(entity: MyResult01): java.lang.Iterable[Mutation] = {
      val put = new Put(Bytes.toBytes(MD5Util.code(entity.name)), System.currentTimeMillis())
      put.addColumn(Bytes.toBytes("info"), Bytes.toBytes("name"), Bytes.toBytes(entity.name))
      put.addColumn(Bytes.toBytes("info"), Bytes.toBytes("age"), Bytes.toBytes(entity.age.toString))
      Collections.singleton(put)
    }
    //1）插入方式1
    HBaseSink().sink[MyResult01](source, "test")

  }


}
