package study.wsn

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._

object OrderNumRDD {
  def main(args: Array[String]){
    //实例化
    val conf = new SparkConf().setAppName("OrderNumRDD").setMaster("local")
    val sc = new SparkContext(conf)
    
    //读文件
    val RddOrder = sc.textFile("file:///A:/output2/order_clear/part-00000")  // 集群运行换成hdfs地址
                     .map(_.split("\t"))
                     .map(arr => (arr(1),arr(0)))
                         
    val RddProduct = sc.textFile("file:///A:/output2/products_clear/part-00000")
                     .map(_.split("\t"))
                     .map(arr => (arr(0),arr(2)))
    //join
    var OrderJoin = RddOrder.join(RddProduct)
                            .map(x => x._2._2)
                            .map(x => (x,1))
                            .reduceByKey(_+_)
                            .map(x => x._1+"\t"+x._2)  //设置输出格式
    //输出                        
    OrderJoin.foreach(println)
    OrderJoin.saveAsTextFile("file:///A:/output2/OrderProductJoin")  // 集群运行换成hdfs地址
  }
}