package cn.lecosa.spark
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD.rddToPairRDDFunctions
import org.apache.spark.HashPartitioner


object  Mac  {
  def main(args: Array[String]) {

    val conf = new SparkConf(); //创建SparkConf对象  设置spark参数
    conf.setAppName("wow,My First Spark App!"); //设置应用程序的名称，在spark程序运行的监控界面可以看到名称
    conf.setMaster("local[2]"); //此时，程序在本地运行，不需要安装spark集群
    val sc = new SparkContext(conf);
    val lines = sc.textFile("F:/spark/workspace1/bigdata/data/mac/", 3)
    val r1 = lines.filter(line => { line.trim.length() > 0 });
    val r2 = r1.map(p => new String(p.getBytes, "UTF-8").replaceAll("\"", ""))
/*    val r3 = r2.map {
      x =>
        {
          var datas = x.split(",")
          var key = datas(1) + "," + datas(10) + "," + datas(11) + "," + datas(8) + "," + datas(15) + "," + datas(9) + "," + datas(7);
          key
        }
    }*/
    //r3.repartition(1).saveAsTextFile("F:/spark/workspace/SparkDemo01/imsi2")
    r2.foreach { println }
    sc.stop();
  }
}
