package spark

import scala.io.Source

/**
  * Created by zhangbn on 2018/09/10.
  */
object SparkTest {

  def main(args: Array[String]): Unit = {
//    System.setProperty("hadoop.home.dir", "E:\\hadoop-common-2.7.3-bin-master")
//    System.setProperty("HADOOP_USER_NAME", "admin")

//    val sparkConf = new SparkConf()
//      .setAppName("SparkTest")
//      .setMaster("local[*]")
//    sparkConf.set("spark.sql.crossJoin.enabled", "true")
//    val spark = SparkSession
//      .builder()
//      .config(sparkConf)
//      .enableHiveSupport()
//      .getOrCreate()
    println("读取本地文件：")
    val localfile = Source.fromFile("C:/Users/zhangbn/Desktop/part-m-00000") //中文乱码
    val top5 = localfile.take(5)
    top5.foreach(println(_))
    println(top5.size)
    for(line <- top5)
      {
        println(line)
      }
    localfile.close()



//    spark.close()
  }

}
