package chapter03
import org.apache.spark.{SparkConf, SparkContext}
object Test06_Map1 {
  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME","root")
    val conf = new SparkConf().setMaster("local[*]").setAppName("map1")
    val sc = new SparkContext(conf)
    //读文件
    val value = sc.textFile("input/apache.log")
    //取得所有的ip地址
    val value1 = value.map(e => e.split(" ").head)
    //取前五个数据作为数组
    println(value1.take(5).toList)
    //将结果保存为文件
    value1.saveAsTextFile("input/ip")
    value1.saveAsTextFile("hdfs://192.168.100.131:9000/ip")
    //关闭上下文
    sc.stop()
  }
}
