package chapter03

import org.apache.spark.{SparkConf, SparkContext}

import java.util.Date

object Test13_Filter {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local[*]").setAppName("Filter")
    val sc = new SparkContext(conf)
    //过滤基本功能 符合条件的留下
    val value = sc.makeRDD(List(1, 2, 3, 4, 5, 6, 7, 8, 9))
    println(value.filter(e => e % 2 == 0).collect().mkString("Array(", ", ", ")"))
    //从服务器日志数据apache.log中获取2015年5月17日的请求路径
    val value1 = sc.textFile("input/apache.log")
    println(value1.map(e => {
      val strings = e.split(" ")
      (strings(3),strings(6))})
      .filter(e=>{e._1.contains("17/05/2015")})
      .map(e=>e._2)
      .collect().mkString("Array(", ", ", ")"))
    println(new Date(Date.parse("17/05/2015")).getYear)

  }
}
