package com.itzmn.tamll.report

import org.apache.spark.sql.{SaveMode, SparkSession}

/*
 * @Author: 张梦楠
 * @Date: 2019/9/26 11:38
 * 简书：https://www.jianshu.com/u/d611be10d1a6
 * 码云：https://gitee.com/zhangqiye
 * @Description: 报表统计
 *  统计T+1统计，昨日各省用户日活，总日活
 *  统计 昨日各省销量，总销量
 */
object ReportStatistics {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder()
      .appName("report_statistics")
      .enableHiveSupport()
      .getOrCreate()


    val date = args(0)
    //1. 读取前一天数据

    import spark.implicits._

    val df = spark.sparkContext.textFile(s"/user/custom/tmall/dt=$date")
      .filter(x=>{
        val strings: Array[String] = x.split(",")
        strings.length == 7
      })
      .map(x=>{
        val strings: Array[String] = x.split(",")
        (strings(0),strings(1),strings(2),strings(3),strings(6))
      }).toDF("requestid", "ts", "userid", "provice", "action")


    df.cache().createOrReplaceTempView("t1")
    // 统计各省用户日活
    spark.sql(
      s"""
         |select count(distinct(userid)) as users,provice from t1 group by provice
         |union all
         |select count(distinct(userid)) as users,"all" as provice
       """.stripMargin).repartition(1).write
      .mode(SaveMode.Overwrite).csv(s"/user/data/reporting/tmall/dt=$date/users")

    // 统计各省销售量

    spark.sql(
      s"""
         |select count(requestid) as cnt,provice from t1 where action=0 group by provice
         |union all
         |select count(requestid) as cnt from t1 where action=0
       """.stripMargin).repartition(1).write
      .mode(SaveMode.Overwrite).csv(s"/user/data/reporting/tmall/dt=$date/sales")
  }

}
