package cn.lecosa.spark.hbase

import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.Put
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.mapred.JobConf
import org.apache.spark._
import org.apache.spark.SparkConf
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.hive.HiveContext
import org.apache.hadoop.hbase.HBaseConfiguration
import org.apache.hadoop.hbase.client.Put
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapred.TableOutputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.hbase.client.ConnectionFactory
import org.apache.hadoop.hbase.TableName

object HiveToHbase {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setAppName(s"${this.getClass.getName}");
    conf.setMaster("local[2]");
    val sc = new SparkContext(conf);
    val sqlContext = new SQLContext(sc);
    val hiveContext = new HiveContext(sc);
    import sqlContext.implicits._
    val df = hiveContext.sql("  select * from lecosa.emp ")

    //创建jobConf
    val hbaseconf = HBaseConfiguration.create(sc.hadoopConfiguration)
    val jobConf = new JobConf(hbaseconf)
    jobConf.setOutputFormat(classOf[TableOutputFormat])
    jobConf.set(TableOutputFormat.OUTPUT_TABLE, "emp1")
    val connetcion = ConnectionFactory.createConnection(hbaseconf);
    val admin = connetcion.getAdmin;
    val tabName = TableName.valueOf("emp1")
    if (admin.tableExists(tabName)) {
      admin.disableTable(tabName)
      admin.deleteTable(tabName)
    }
    HbaseUtil.createTable(admin, "emp1", "cf1")

    //创建hiveContext
    hiveContext.setConf("spark.sql.shuffle.partitions", "3")

    //保存到hbase
    var map = Map("one" -> 1)

    df.rdd.map(row => {
      val c1 = row(0).asInstanceOf[String]
      val c2 = row(1).asInstanceOf[String]
      val c3 = row(2).asInstanceOf[String]
      val rk = c1 + "_" + c2
      val p = new Put(Bytes.toBytes(rk))
      val mvalue=map.getOrElse(rk, 1)
      if(mvalue>=1){
    	  p.add(Bytes.toBytes("cf1"), Bytes.toBytes("c"+mvalue), Bytes.toBytes(c1))
    	  p.add(Bytes.toBytes("cf1"), Bytes.toBytes("c"+mvalue), Bytes.toBytes(c2))
    	  p.add(Bytes.toBytes("cf1"), Bytes.toBytes("c"+mvalue), Bytes.toBytes(c3))
    	  map=map+(rk->(mvalue+1))        
      }

      (new ImmutableBytesWritable, p)
    }).saveAsHadoopDataset(jobConf)

    
    println("mapde 的个数"+map.size)
    df.show();
    df.schema;
    sc.stop()
  }

}