package to_hbase_xiemingyang

import org.apache.hadoop.hbase.client._
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.hbase.{Cell, HBaseConfiguration, HColumnDescriptor, TableName}
import org.apache.hadoop.mapreduce.Job
import org.apache.spark.{SparkConf, SparkContext}

object makeBaseMap01_step01 {
  val hbase_zq = "10.120.65.181,10.120.67.164,10.120.64.243,10.120.64.144"
  val hbase_zqPort = "2181"
  val hBase_zParent = "/hbase"
  val configuration = HBaseConfiguration.create()
  this.configuration.set("hbase.zookeeper.quorum", this.hbase_zq)
  this.configuration.set("hbase.zookeeper.property.clientPort", this.hbase_zqPort)
  //该属性默认值就是/hbase
  this.configuration.set("zookeeper.znode.parent", this.hBase_zParent) //some condition you should add;
  val connection = ConnectionFactory.createConnection(this.configuration)
  val admin = connection.getAdmin()

  def curTable(tablename: String): Table = {
    val tName = TableName.valueOf(tablename)
    this.connection.getTable(tName)
  }

  def a_rowkey_isexits(tablename: String, rowkey: String): Boolean = {
    val table = curTable(tablename)
    val get: Get = new Get(Bytes.toBytes(rowkey))
    val result: Array[Cell] = table.get(get).rawCells()
    table.close()
    if (result isEmpty) {
      false
    } else {
      true
    }
  }

  def addFamilys(tableName: String, columnFamilys: String): Unit = {
    val tablename = TableName.valueOf(tableName)
    this.admin.disableTable(tablename)
    val hDescriptor = this.admin.getTableDescriptor(tablename)
    val hColumnDescriptor = new HColumnDescriptor(columnFamilys)
    hDescriptor.addFamily(hColumnDescriptor)
    this.admin.modifyTable(tablename, hDescriptor)
    this.admin.enableTable(tablename)
  }

  //这个方法应该没用
  def analysis_source_table_add_rowkey_for_all_webcode_sourcecode_BaseMapInfos(): Unit = {
    //init BaseMapInfo
    //add row key if not exits
    val sparkConf = new SparkConf().setAppName("HBaseMapsInfo_step01").setMaster("local")
    val sc = new SparkContext(sparkConf)
    val table: String = new String("BaseMapInfos")
    sc.hadoopConfiguration.set("hbase.zookeeper.quorum", "10.120.65.181,10.120.67.164,10.120.64.243,10.120.64.144")
    sc.hadoopConfiguration.set("hbase.zookeeper.property.clientPort", "2181")
    sc.hadoopConfiguration.set(TableOutputFormat.OUTPUT_TABLE, table)

    val job = Job.getInstance(sc.hadoopConfiguration)
    job.setOutputKeyClass(classOf[ImmutableBytesWritable])
    job.setOutputValueClass(classOf[Result])
    job.setOutputFormatClass(classOf[TableOutputFormat[ImmutableBytesWritable]])

    val inputFile = "hdfs://slave1:8020//tmp/worldData/Sources.json" //读取json文件/tmp/worldData/Sources.json
    val jsonStr = sc.textFile(inputFile);
    //表名BaseMapInfos
    val tablecur: Table = curTable(table)
    val result = jsonStr.map(s => scala.util.parsing.json.JSON.parseFull(s))
    val rdds = result.map {
      r =>
        r match {
          case Some(maps: Map[String, Any]) => {
            val thashmap: scala.collection.mutable.HashMap[String, String] = new scala.collection.mutable.HashMap[String, String]()

            maps.foreach(t => {
              thashmap.put(t._1.toString, t._2.toString)
            })
            //rowkey为WebCode_SourceCode
            val key = thashmap("WebCode") + "_" + thashmap(("SourceCode"))
            val put = new Put(Bytes.toBytes(key))
            if (a_rowkey_isexits(table, key)) {
              //addColumn(列族: Array[Byte],列名: Array[Byte],列值: Array[Byte])
              //get 'BaseMapInfos','Status:isdetal'啥也没查出来
              put.addColumn(Bytes.toBytes("Status"), Bytes.toBytes("isdetal"), Bytes.toBytes("over"))
            } else {
              put.addColumn(Bytes.toBytes("Status"), Bytes.toBytes("isdetal"), Bytes.toBytes("begin"))
            }
            (new ImmutableBytesWritable, put)
          }

        }
    }
    rdds.saveAsNewAPIHadoopDataset(job.getConfiguration())
    sc.stop()
  }

  //读取文件:hdfs://node01:8020//tmp/InternationalData_source/source.txt"
  //将结果写入到hbase的"DetailMapInfos"表中
  //表名"DetailMapInfos"
  //rowkey"WebCode"+"_"+""SourceCode"
  //列族:WebInfo
  //列名WebName,WebCode,SourceCode,SourceName
  def analysis_source_table_add_rowkey_for_all_webcode_sourcecode_DetailMapInfos(): Unit = {
    //init BaseMapInfo
    //add row key if not exits
    val sparkConf = new SparkConf().setAppName("HBaseMapsInfo_step02").setMaster("local[*]")
    val sc = new SparkContext(sparkConf)
    val table: String = new String("DetailMapInfos")
    sc.hadoopConfiguration.set("hbase.zookeeper.quorum", "10.120.65.181,10.120.67.164,10.120.64.243,10.120.64.144")
    sc.hadoopConfiguration.set("hbase.zookeeper.property.clientPort", "2181")
    sc.hadoopConfiguration.set(TableOutputFormat.OUTPUT_TABLE, table)

    val job = Job.getInstance(sc.hadoopConfiguration)
    job.setOutputKeyClass(classOf[ImmutableBytesWritable])
    job.setOutputValueClass(classOf[Result])
    job.setOutputFormatClass(classOf[TableOutputFormat[ImmutableBytesWritable]])

    val inputFile = "hdfs://node01:8020//tmp/InternationalData_source/source.txt" //读取json文件/tmp/worldData/Sources.json--测试
    val jsonStr = sc.textFile(inputFile);
    //表名"DetailMapInfos"
    val tablecur: Table = curTable(table)

    val result = jsonStr.map(s => scala.util.parsing.json.JSON.parseFull(s))
    val rdds = result.map {
      r =>
        r match {
          case Some(maps: Map[String, Any]) => {
            val thashmap: scala.collection.mutable.HashMap[String, String] = new scala.collection.mutable.HashMap[String, String]()

            maps.foreach(t => {
              thashmap.put(t._1.toString, t._2.toString)
            })
            //rowkey
            val key = thashmap("WebCode") + "_" + thashmap(("SourceCode"))
            val put = new Put(Bytes.toBytes(key))
            if (thashmap.contains("WebName")) {
              //列族 列名 列值
              put.addColumn(Bytes.toBytes("WebInfo"), Bytes.toBytes("WebName"), Bytes.toBytes(thashmap("WebName")))
            }
            if (thashmap.contains("WebCode")) {
              put.addColumn(Bytes.toBytes("WebInfo"), Bytes.toBytes("WebCode"), Bytes.toBytes(thashmap("WebCode")))
            } //SourceName
            if (thashmap.contains("SourceCode")) {
              put.addColumn(Bytes.toBytes("WebInfo"), Bytes.toBytes("SourceCode"), Bytes.toBytes(thashmap("SourceCode")))
            }
            if (thashmap.contains("SourceName")) {
              put.addColumn(Bytes.toBytes("WebInfo"), Bytes.toBytes("SourceName"), Bytes.toBytes(thashmap("SourceName")))
            } //SourceName
            (new ImmutableBytesWritable, put)
          }

        }
    }
    rdds.saveAsNewAPIHadoopDataset(job.getConfiguration)
    sc.stop()
  }


  def main(args: Array[String]): Unit = {
    // just run once
    //var ho:HbaseOPFi =new HbaseOPFi()
    //println(ho.a_rowkey_isexits(tablename="BaseMapInfos",rowkey="WTO_WTO"))
    //println(ho.a_rowkey_isexits(tablename="BaseMapInfos",rowkey="WTO_WTO23"))
    //analysis_source_table_add_rowkey_for_all_webcode_sourcecode_BaseMapInfos()
    //addFamilys("BaseMapInfos","Status")
    analysis_source_table_add_rowkey_for_all_webcode_sourcecode_DetailMapInfos()
  }

}

