import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}

object Pretreatment {
  def main(args: Array[String]): Unit = {
    Logger.getLogger("org.apache.spark").setLevel(Level.WARN)
    Logger.getLogger("org.eclipse.jetty.server").setLevel(Level.OFF)
    val conf = new SparkConf().setAppName("ptmt").setMaster("local")
    val sc = new SparkContext(conf)
    val csvRdd = sc.textFile("/Users/liyuanqing/scala_code/data/raw_user.csv")
    val dataRdd = csvRdd.zipWithIndex().filter(_._2 >= 1).keys
    val data2Rdd = dataRdd.map(x => x.split(",")(0) + "," + x.split(",")(1) + "," + x.split(",")(2) + "," + x.split(",")(4) + "," + x.split(",")(5)).map(x=>x.replace(" ",",").replace("-",","))
    println(data2Rdd.count())
    data2Rdd.take(20).foreach(println)
    data2Rdd.repartition(1).saveAsTextFile("/Users/liyuanqing/scala_code/data/raw2")
  }
}
