package com.software.process.hbase

import org.apache.spark.SparkConf
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.Put
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapreduce.{TableInputFormat, TableOutputFormat}
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.mapreduce.Job
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.{Row, SparkSession}


object HBaseTest {
  Logger.getLogger("org").setLevel(Level.ERROR)
  def main(args: Array[String]): Unit = {
    //1.创建Spark环境配置对象
    val conf =new SparkConf().setAppName("SparkSqlCSVExample").setMaster("local").set("spark.testing.memory", "2147480000")
    //2.创建SparkSession对象
    val spark:SparkSession=SparkSession.builder().config(conf).getOrCreate()

    // 3. 读取CSV文件数据到DataFrame
    val filePath = "D:\\csv\\date.csv"
    val df = spark.read.format("csv").option("header", "true").load(filePath)

    // 4. 将DataFrame中的数据写入HBase表
    val tableName = "example_table2"
    val hbaseConf = getHbaseConf(tableName)
    val job = Job.getInstance(hbaseConf)
    job.setOutputFormatClass(classOf[TableOutputFormat[ImmutableBytesWritable]])
    job.getConfiguration.set(TableOutputFormat.OUTPUT_TABLE, tableName)

    val columnFamily = Bytes.toBytes("cf")
    val rdd = df.rdd.map(toPut(columnFamily))

    rdd.saveAsNewAPIHadoopDataset(job.getConfiguration)

    spark.stop()
  }

  def toPut(columnFamily: Array[Byte])(row: Row): (ImmutableBytesWritable, Put) = {
    val value = row.getAs[String]("value")
    val province = row.getAs[String]("province")

    val put = new Put(Bytes.toBytes(province))
    put.addColumn(columnFamily, Bytes.toBytes("row1"), Bytes.toBytes(value))

    (new ImmutableBytesWritable(Bytes.toBytes(province)), put)
  }

  def getHbaseConf(tableName: String): Configuration = {
    val hbaseConf = HBaseConfiguration.create()
    hbaseConf.set("hbase.zookeeper.quorum", "192.168.202.143:2181")
    hbaseConf.set("hbase.zookeeper.property.clientPort", "2181")
    hbaseConf.set(TableOutputFormat.OUTPUT_TABLE, tableName)
    hbaseConf
  }
}