package com.sugon.kerberos

import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.hadoop.security.UserGroupInformation
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
import org.apache.spark.sql.functions.lit


object KerBerosCS {

  def main(args: Array[String]): Unit = {
    /**
      *
      * spark-submit --master yarn  --num-executors 3 SparkDemo-jar-with-dependencies.jar /admin/12/ysk/douhj0827/20* /cs  20200109023 4
      *
      */

    val spark = SparkSession.builder().appName("kerberos cs ")
      .master("local[*]")
      //      .enableHiveSupport()
//      .config("spark.yarn.keytab", "/home/jerry/keytab/dp-admin.keytab")
//      .config("spark.yarn.principal", "dp/admin@GAI.COM")
      .getOrCreate()

    //kerberos集群配置文件配置
    System.setProperty("java.security.krb5.conf", "/etc/krb5.conf")


    spark.sparkContext.hadoopConfiguration.set("hadoop.security.authentication", "kerberos")
    // 参考 hdfs-site.xml  core-site.xml 配置属性
    spark.sparkContext.hadoopConfiguration.set("dfs.namenode.kerberos.principal.pattern", "*");

    //
    // 或者 System.setProperty("java.security.krb5.conf","/home/jerry/keytab/krb5.conf");
    spark.sparkContext.hadoopConfiguration.set("java.security.krb5.conf","/home/jerry/keytab/krb5.conf");

//    spark.sparkContext.hadoopConfiguration.addResource("modules/LogProcess/src/data/core-site.xml")
//    spark.sparkContext.hadoopConfiguration.addResource("modules/LogProcess/src/data/hdfs-site.xml")

    //用户登录
    UserGroupInformation.setConfiguration(spark.sparkContext.hadoopConfiguration)

    UserGroupInformation.loginUserFromKeytab("dp/admin", "/home/jerry/keytab/dp-admin.keytab")



    val rdd = spark.sparkContext.textFile("hdfs://10.111.32.184:8020/user/dp/file/data/liyiwen/17/97/6ad813e9-415d-4bdb-a4e4-6a84196c36f9/add_feature_column")
    rdd.foreach(x => println(x))


    rdd.saveAsTextFile("hdfs://10.111.32.184:8020/user/dp/file/data/jerry2")
    spark.close()
  }

}
