package study.wsn

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._

object Portrait {
  def main(args: Array[String]){
    // 实例化
    val conf = new SparkConf().setAppName("Portrait").setMaster("local")
    val sc = new SparkContext(conf)
    
    // 读文件
//  sc.textFile("hdfs://namenode:8020/output2/products_clear/part-00000")  
    sc.textFile("file:///A:/output2/products_clear/part-00000")
    .map(s => s.split("\t"))
    .map(arr => (arr(2),arr(1)))
    .reduceByKey(_+_)
    .map(t => t._1 + "\t" + t._2)
//  .saveAsTextFile("hdfs://namenode:8020/output2/portrait_salex")
    .saveAsTextFile("file:///A:/output2/portrait_salex")
//  val rdd = sc.textFile("hdfs://namenode:8020/output2/portrait_salex/part-00000")  
    val rdd = sc.textFile("file:///A:/output2/portrait_salex/part-00000")
    rdd.foreach(println)
  }
}