package chapter03
import org.apache.spark.{SparkConf, SparkContext}
object Test06_getIp {
  def main(args: Array[String]): Unit = {
    System.setProperty("HADOOP_USER_NAME","root")
    val conf = new SparkConf().setMaster("local[*]").setAppName("getIp")
    val sc = new SparkContext(conf)
    //读文件
    val value = sc.textFile("input/apache.log")
    //输出5行日志
    println(value.take(5).mkString("Array(", ", ", ")"))
    //使用空格切分元素
    val value1 = value.map(e => e.split(" ").head)
    println(value1.collect().mkString("Array(", ", ", ")"))
    //保存成文件
    value1.saveAsTextFile("input/ip")
    //保存到Hadoop上
    value1.saveAsTextFile("hdfs://192.168.100.131:9000/ip")
  }
}
