package com.inspur

import java.util.{Calendar, Date}

import org.apache.spark.sql.SparkSession

case class Orders(userId:String,purchaseDate:String)

object ConsumptionHabits2 { //消费习惯 spark SQL 实现

  def dateOfWeek(x:Long):String = {
    val date = new Date(x)
    val cal = Calendar.getInstance()
    cal.setTime(date)
    var dateOfWeek = cal.get(Calendar.DAY_OF_WEEK) - 1

    if(dateOfWeek == 0)
      dateOfWeek = 7

    dateOfWeek.toString
  }

  def main(args: Array[String]): Unit = {
    val sess = SparkSession.builder()
      .appName("ConsumptionHabits2")
      .master("local")
      .getOrCreate()

    import sess.implicits._

    val orders = sess.read.textFile("hdfs://192.168.66.88:8020/0616/data/orders")
      .map(_.split("\t"))
      .map(element => Orders(element(0),dateOfWeek(element(1).toLong)))

    orders.createOrReplaceTempView("orders")

    sess
      .sql("select userId, purchaseDate, count(1) from orders group by userId, purchaseDate order by userId, count(1)")
      .repartition(1)
      .write.csv("hdfs://192.168.66.88:8020/0616/ConsumptionHabits_sparkSQL")
  }

}
