package com.atguigu.member.controller

import com.atguigu.util.HiveUtil
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

import scala.io.Source

object tt1 {
  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME", "root")
    val sparkConf = new SparkConf().setAppName("dwd_member_import") .setMaster("local[1]")
    val sparkSession = SparkSession.builder().config(sparkConf).enableHiveSupport().getOrCreate()
    val ssc = sparkSession.sparkContext
//    ssc.hadoopConfiguration.set("fs.defaultFS", "hdfs://nameservice1")
//    ssc.hadoopConfiguration.set("dfs.nameservices", "nameservice1")
//    HiveUtil.openDynamicPartition(sparkSession) //开启动态分区
//    HiveUtil.openCompression(sparkSession) //开启压缩

//    ssc.textFile("/user/atguigu/ods/baseadlog.log").foreach(println(_))
    ssc.textFile("hdfs://hadoop112:8020/user/atguigu/ods/baseadlog.log").foreach(print(_))
  }
}
