package com.sys.tdhclient.startapp

import java.util.Properties

import com.sys.tdhclient.startapp.SparkWtHbasePut.{port, quorum, tableName}
import com.sys.tdhclient.utils.{HBaseUtils, SparkSc}
import org.apache.hadoop.hbase.{HColumnDescriptor, HTableDescriptor, TableName}
import org.apache.hadoop.hbase.client.{Admin, HBaseAdmin, HTable, Put}
import org.apache.hadoop.hbase.mapreduce.{TableInputFormat, TableOutputFormat}
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession

object SparkCrudHbase {
  private val properties: Properties = SparkSc.getProperties()
  private val tableName: String = properties.getProperty("tableName")
//  private val quorum: String = properties.getProperty("quorum")
//  private val port: String = properties.getProperty("port")
  val conf = HBaseUtils.getHBaseConfiguration()
  conf.set(TableInputFormat.INPUT_TABLE, tableName)
  private val hadmin: Admin = HBaseUtils.getConnection.getAdmin
//  val hadmin = HBaseUtils.getHBaseAdmin(conf,tableName)
  def createTable(table_Name: String, columnFamilys: Array[String]) = {
    //操作的表名
    val tName = TableName.valueOf(table_Name)
    //当表不存在的时候创建Hbase表
    if (!hadmin.tableExists(tName)) {
      //创建Hbase表模式
      val descriptor = new HTableDescriptor(tName)
      //创建列簇i
      for (columnFamily <- columnFamilys) {
        descriptor.addFamily(new HColumnDescriptor(columnFamily))
      }
      //创建表
      hadmin.createTable(descriptor)
      println("create successful!!")
    }
  }
  def main(args: Array[String]): Unit = {
    var arr=new Array[String](1)
    arr(0)="info1"
    val aaaa = createTable(tableName, arr)
  }

}
