package study.wsn

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._

object UserPortraitRDD {
  def main(args: Array[String]){
    //实例化
    val conf = new SparkConf().setAppName("UserPortraitRDD").setMaster("local")
    val sc = new SparkContext(conf)
    
    //读文件
    val RddOrder = sc.textFile("file:///A:/output2/order_clear/part-00000")  // 集群运行换成hdfs地址
                     .map(_.split("\t"))
                     .map(arr => (arr(1),arr(0)))
                         
    val RddProduct = sc.textFile("file:///A:/output2/products_clear/part-00000")
                     .map(_.split("\t"))
                     .map(arr => (arr(0),arr(1)))
                     
                     
    //join
    var UserPortrait = RddOrder.join(RddProduct)
                            .map(t => t._2)
                            .reduceByKey(_+";"+_)
                            .mapValues(v => {
                              val arr = v.split(";")
                              var lst = List[String]() 
                              for(value <- arr){
                                if(!lst.contains(value) & !value.isEmpty())
                                  lst = value::lst  
                              }
                              lst.mkString(";")
                            })
                            .map(t => t._1 + "\t" + t._2)  //设置输出格式
                                                
    //输出                        
    UserPortrait.foreach(println)
    UserPortrait.saveAsTextFile("file:///A:/output2/UserPortrait")  // 集群运行换成hdfs地址
  }
}