package to_hbase_xiemingyang

import org.apache.hadoop.hbase.client.{ConnectionFactory, Get, Put, Table}
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.hbase._
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable.{ArrayBuffer, HashMap}
import scala.util.parsing.json.JSON

object makeConceptDic_step02 {
  val hbase_zq = "10.120.65.181,10.120.67.164,10.120.64.243,10.120.64.144"
  val hbase_zqPort = "2181"
  val hBase_zParent = "/hbase"
  val configuration = HBaseConfiguration.create()
  this.configuration.set("hbase.zookeeper.quorum", this.hbase_zq)
  this.configuration.set("hbase.zookeeper.property.clientPort", this.hbase_zqPort)
  this.configuration.set("zookeeper.znode.parent", this.hBase_zParent) //some condition you should add;
  val connection = ConnectionFactory.createConnection(this.configuration)
  val admin = connection.getAdmin()

  def inintTable(tablename: String, cloumnFamilys: Array[String]): Table = {
    val tName = TableName.valueOf(tablename)
    if (admin.tableExists(tName)) {
      connection.getTable(tName)
    } else {
      val tDescrip: HTableDescriptor = new HTableDescriptor(tName)
      for (cf <- cloumnFamilys) {
        val cl: HColumnDescriptor = new HColumnDescriptor(cf)
        tDescrip.addFamily(cl)
      }
      admin.createTable(tDescrip)
      connection.getTable(tName)
    }
  }

  def getFamily_column_value(tableName: String, rowkey: String, columnFamily: String): Map[String, String] = {
    val tName = TableName.valueOf(tableName)
    val table = this.connection.getTable(tName)
    val get: Get = new Get(Bytes.toBytes(rowkey))
    get.addFamily(Bytes.toBytes(columnFamily))
    val result: Array[Cell] = table.get(get).rawCells()
    var values: HashMap[String, String] = new HashMap[String, String]()
    if (result != null) {
      for (cell <- result) {
        val column_name: String = Bytes.toString(CellUtil.cloneQualifier(cell))
        val value: String = Bytes.toString(CellUtil.cloneValue(cell))
        values += (column_name -> value)
      }
    }
    table.close()
    values.toMap
  }

  def addFamilys(tableName: String, columnFamilys: String): Unit = {

    val tablename = TableName.valueOf(tableName)
    this.admin.disableTable(tablename)
    val hDescriptor = this.admin.getTableDescriptor(tablename)
    val hColumnDescriptor = new HColumnDescriptor(columnFamilys)
    val descriptor = hDescriptor.getFamily(Bytes.toBytes(columnFamilys))
    if (descriptor == null) {
      hDescriptor.addFamily(hColumnDescriptor)
      this.admin.modifyTable(tablename, hDescriptor)
    }
    this.admin.enableTable(tablename)


  }

  def curTable(tablename: String): Table = {
    val tName = TableName.valueOf(tablename)
    this.connection.getTable(tName)
  }

  def putDatas(tableNameStr: String, datas: Array[String]) = {
    val table: Table = curTable(tableNameStr)
    try {
      for (data <- datas) {
        val cell: Array[String] = data.split("\t")
        val put: Put = new Put(Bytes.toBytes(cell(0))) //rowkey
        // family/column/value
        put.addColumn(Bytes.toBytes(cell(1)), Bytes.toBytes(cell(2)), Bytes.toBytes(cell(3)))
        table.put(put)
      }
    } catch {
      case e: Exception => e.printStackTrace()
    } finally {
      table.close()
    }
  }

  def putData(tableNameStr: String, datas: Array[String]) = {
    val table: Table = curTable(tableNameStr)

    val cell: Array[String] = datas

    val put: Put = new Put(Bytes.toBytes(cell(0)))
    put.addColumn(Bytes.toBytes(cell(1)), Bytes.toBytes(cell(2)), Bytes.toBytes(cell(3)))
    table.put(put)
    table.close()

  }

  def makeConceptName(): Unit = {
    val conf = new SparkConf().setAppName("JSONApp").setMaster("local");
    //通过conf来创建sparkcontext
    val sc = new SparkContext(conf);
    val cloumns = Map(
      "WebCode" -> 0,
      "SourceCode" -> 1,
      "ConceptId" -> 2,
      "ConceptName" -> 3)

    val inputFile = "hdfs://slave1:8020//InternationalData/ConceptDic.json" //读取json文件
    val jsonStr = sc.textFile(inputFile);
    val webcodes = List("WHO")
    val result = jsonStr.map(s => JSON.parseFull(s))
    var conceptDic = result.map {
      r =>
        r match {
          case Some(maps: Map[String, Any]) => {
            var tl: Array[String] = Array("", "", "", "")
            var concept_code = ArrayBuffer[String]()
            var detail_code = ArrayBuffer[String]()
            maps.foreach(t => {
              val keys = t._1
              val valus = t._2
              var index = -1
              val contams = cloumns.contains(keys)
              if (contams) {
                index = cloumns(keys)
              } else {
                index = 4
              }
              if (index < 4) {
                tl(index) = valus.toString
              }
            })
            if (webcodes.contains(tl(0))) {
              //add familys
              if (tl(2) != "") {
                println(tl(2))
                addFamilys("DetailMapInfos", tl(2).replace("/", "_"))
              }
              // add a column for familys
              val value: Array[String] = Array(tl(0) + "_" + tl(1), tl(2).replace("/", "_"), "ConceptName", tl(3))
              putData("DetailMapInfos", value)
            }
            "successed"
          }

        }
    }.saveAsTextFile("hdfs://slave1:8020//tmp/worldData_table/conceptdic_who_02")
  }

  //下面的代码没有起作用
  //处理WebCode为UNIDO的数据
  //读取本地(idea)文件"ConceptNameList/UNIDO_ConceptName.txt" ???是否存在hdfs对应文件?未找到..
  //处理的是字段WebCode SourceCode ConceptId ConceptName,这些字段存在于concept表中,可能是从这个表提取的
  //写入HBase:表名为DetailMapInfos,rowkey为"WebCode"+"SourceCode" 列族为ConceptId,列名为"ConceptName",列值为ConceptName
  //get 'DetailMapInfos',"Area:ConceptName"  什么也查不到
  //get 'DetailMapInfos',"Series:ConceptName" 什么也查不到
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("JSONApp").setMaster("local");
    val sc = new SparkContext(conf);
    //这个是hdfs上的文件吗?
    val thepath = makeConceptDic_step02.getClass.getClassLoader().getResource("ConceptNameList/UNIDO_ConceptName.txt").getPath
    //val inputFile = "/data/splitDb/IMF_ConceptName.txt"//读取json文件
    println(thepath)
    //WebCode SourceCode ConceptId ConceptName--这些字段存在于concept表中
    //UNIDO	      CIP	      Area	    Area
    val jsonStr: RDD[Array[String]] = sc.textFile(thepath).map(_.split("\t"))
    val webcodes = List("UNIDO")
    jsonStr.foreach(data => {
      val tl = data
      if (webcodes.contains(tl(0))) {
        //add familys
        if (tl(2) != "") {
          println(tl(2))
          //不存在则添加列族,在表中添加列名为tl(2).replace("/", "_")的列族,列族为ConceptId的值
          addFamilys("DetailMapInfos", tl(2).replace("/", "_"))
        }
        // WebCode,SourceCode,ConceptId,ConceptName
        var value: Array[String] = Array()
        if (tl(1) == "") {
          value = Array(tl(0), tl(2).replace("/", "_"), "ConceptName", tl(3))
        } else {
          if (tl.length < 4) {
            //                    rowkey                    列族                                列名         列值
            value = Array(tl(0) + "_" + tl(1), tl(2).replace("/", "_"), "ConceptName", " ")
          } else {
            value = Array(tl(0) + "_" + tl(1), tl(2).replace("/", "_"), "ConceptName", tl(3))
          }
        }
        //rowkey为WebCode"+"_"+"SourceCode"
        //表名为DetailMapInfos,列族为ConceptId,列名为"ConceptName","列值为ConceptName
        putData("DetailMapInfos", value)
      }
    })

  }
}

