package com.shujia.rec.compute

import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.api.scala._
import com.shujia.rec.common.Constants
import com.shujia.rec.entry.CaseClass.LogEntry
import com.shujia.rec.mapper.HbaseMapper
import com.shujia.rec.sink.HbaseSink
import com.shujia.rec.util.{KafkaUtil, LogUtil}
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer010
import org.apache.hadoop.hbase.client.Put

object LogToHbase {
  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    //获取kafka source
    val kafkaSource: FlinkKafkaConsumer010[String] = KafkaUtil.getKafaSoure("LogToHbase", Constants.KAFKA_TOPIC)

    //读所有数据
    kafkaSource.setStartFromLatest()

    val logDS: DataStream[String] = env.addSource(kafkaSource)

    //过滤脏数据
    val filterDS: DataStream[String] = logDS.filter(log => LogUtil.verifyLog(log))

    val entryDS: DataStream[LogEntry] = filterDS.map(log => LogUtil.toEntry(log))

    //构建put
    val hbaseMapper: HbaseMapper[LogEntry] = new HbaseMapper[LogEntry] {
      override def getPut(kv: LogEntry): Put = {

        val rk: String = kv.userId + "_" + kv.proId
        val rowkey: Array[Byte] = rk.getBytes
        val put: Put = new Put(rowkey)
        put.add("info".getBytes, "action".getBytes, kv.ts, String.valueOf(kv.action).getBytes)

        put
      }
    }

    //create 'log',{NAME => 'info', VERSIONS => 100}

    //将数据保存到hbase
    entryDS.addSink(new HbaseSink[LogEntry](Constants.LOGTOHBASE_HBASE_TABLE, hbaseMapper))

    env.execute("LogToHbase")

  }
}
