package tdhclient

import java.io.File

import com.sys.tdhclient.client.InitClient
import org.apache.spark.sql.SparkSession
import org.apache.spark.{SparkConf, SparkContext}

object SparkRreadHdfs {
  def main(args: Array[String]): Unit = {
    //加载配置文件，集群kerboers认证
    val properties = new InitClient().init()
    var confFilePath = System.getProperty("user.dir") + File.separator + "conf" + File.separator
    //创建sparkSparkSession
    val conf: SparkConf = new SparkConf().setAppName("SparkRreadHdfs").setMaster("local[*]")
    val sparkSession: SparkSession = SparkSession.builder().config(conf).getOrCreate()
    val sc: SparkContext = sparkSession.sparkContext
    //设置配置文件
    sc.hadoopConfiguration.set("hadoop.security.authentication","kerberos")
    sc.hadoopConfiguration.addResource("core-site.xml")
    sc.hadoopConfiguration.addResource("hdfs-site.xml")
    sc.hadoopConfiguration.addResource("yarn-site.xml")
    //读取hdfs上文件
     var data_path = properties.getProperty("hdfs_input_data_url")
     val lines = sc.textFile(data_path)
    lines.foreach(println)

  }
}
