package org.example

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hbase.{HBaseConfiguration, HConstants, TableName}
import org.apache.hadoop.hbase.client.{ConnectionFactory, Result}
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat
import org.apache.hadoop.security.UserGroupInformation
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

import java.util.Properties

object ThinBulkLoad2 {

  System.setProperty("java.security.krb5.conf", "/Users/td/plant_code/write2Hbase/spark2hbase/src/main/resources/krb5.conf")

  //mysql连接信息
  val sparkConf = new SparkConf()
    .setAppName("mysql2hbase")
    .setMaster("local[2]")
    .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    //告知哪些类型需要序列化
    .registerKryoClasses(Array(classOf[ImmutableBytesWritable], classOf[Result]))

  val spark = SparkSession.builder.config(sparkConf).getOrCreate()


  // HBase 连接信息
  val hbaseZookeeperQuorum = "hdp73,hdp74,hdp75"
  val hbaseZookeeperPort = "2181"

  // 设置 Hadoop 配置，包括 HDFS 认证信息
  val hadoopConf = new Configuration()
  //    hadoopConf.set("fs.defaultFS", "hdfs://10.10.13.134:9000") // 设置 HDFS 地址
  hadoopConf.set("hadoop.security.authentication", "kerberos") // 设置认证方式，如果使用 Kerberos 认证
  UserGroupInformation.setConfiguration(hadoopConf)
  UserGroupInformation.loginUserFromKeytab("hdfs-cluster1@HDP.COM", "/Users/td/plant_code/write2Hbase/spark2hbase/src/main/resources/hdfs.headless.keytab")


  val jdbcUrl = s"jdbc:mysql://10.57.30.217:3306/test_cdc"
  val jdbcProperties = new Properties()
  jdbcProperties.setProperty("user", "root")
  jdbcProperties.setProperty("password", "123456")
  Class.forName("com.mysql.jdbc.Driver")

  val mysqlDF = spark.read.jdbc(jdbcUrl, "test_a", jdbcProperties)

  val fields = mysqlDF.columns.filterNot(_ == "id").sorted


  // HBase 配置
  val hbaseConf = HBaseConfiguration.create()
  hbaseConf.set(HConstants.ZOOKEEPER_QUORUM, hbaseZookeeperQuorum)
  hbaseConf.set(HConstants.ZOOKEEPER_CLIENT_PORT, hbaseZookeeperPort)

  // 添加 Kerberos 相关配置
  hbaseConf.set(TableOutputFormat.OUTPUT_TABLE, "person")
  hbaseConf.set("hadoop.security.authentication", "kerberos")
  UserGroupInformation.setConfiguration(hbaseConf)
  UserGroupInformation.loginUserFromKeytab("hbase/td-cloudstack02@HDP.COM", "/Users/td/plant_code/write2Hbase/spark2hbase/src/main/resources/hbase.service.keytab")

  val connection = ConnectionFactory.createConnection(hbaseConf)
  val tableName = TableName.valueOf("person2")


}
