package com.inspur

import org.apache.spark.sql.SparkSession

case class Order(commodityId:String, userId:String)
case class Product02(commodityId:String, shopId:String)

object ShopTotalOrder2 { //计算每家店铺总订单 spark SQL实现

  def main(args: Array[String]): Unit = {
      val sess = SparkSession.builder()
        .appName("ShopTotalOrder2")
        .master("local")
        .getOrCreate()


      import sess.implicits._

      val products = sess.read.textFile("hdfs://192.168.66.88:8020/0616/clear_data/products/part-00000")
        .map(_.split("\t"))
        .map(arr=>Product02(arr(0), arr(2)))
      val order_goods = sess.read.textFile("hdfs://192.168.66.88:8020/0616/clear_data/order_goods/part-00000")
        .map(_.split("\t"))
        .map(arr=>Order(arr(1), arr(0)))

      products.createOrReplaceTempView("products")
      order_goods.createOrReplaceTempView("order_goods")

      sess.sql("select shopId, count(t1.commodityId) from products t1 join order_goods t2 " +
        "on (t1.commodityId = t2.commodityId) group by shopId")
//        .show(300,false)
        .repartition(1)
        .write.csv("hdfs://192.168.66.88:8020/0616/saler_ordercnt_sparkSQL")
    }

}
