package com.inspur

import org.apache.spark.{SparkConf, SparkContext}

object ShopTotalOrder { //计算每家店铺总订单

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      //      .setMaster("spark://192.168.66.88:7077")
      .setMaster("local")
      .setAppName("ShopTotalOrder")
    val sc = new SparkContext(conf)

    val products = sc.textFile("hdfs://192.168.66.88:8020/0616/clear_data/products/part-00000")
      .map(_.split("\t"))
      .map(arr=>(arr(0), arr(2)))
    val order_goods = sc.textFile("hdfs://192.168.66.88:8020/0616/clear_data/order_goods/part-00000")
      .map(_.split("\t"))
      .map(arr=>(arr(1), arr(0)))

    products
      .join(order_goods)
      .map(x => (x, 1))
      .reduceByKey(_+_)
      .map(x => (x._1._2._1, x._2))
      .reduceByKey(_+_)
      .map(x => x._1 + "\t" + x._2)
      .saveAsTextFile("hdfs://192.168.66.88:8020/0616/saler_ordercnt")
  }

}

//402	5
//572	2
//504	5
//116	5
//999	2
//428	5
//531	5
//560	2
//886	4
//721	5
//619	5
//407	5
//71	3
//79	9
//309	4
//86	4
//175	2
//304	7
//914	2
//305	4
//875	2
//425	2
//641	4
//877	4
//383	6
//357	5
//496	4
//601	2
//896	4
//720	4
//982	5
//316	4
//576	4
//353	5
//298	4
//348	6
//771	2
//912	2