package study.wsn

import org.apache.spark.sql.SparkSession
import java.util.Date
import java.util.Calendar
import java.text.ParseException
import java.text.SimpleDateFormat
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

case class Order(buyerID:String,time:Long)
object BuyerConsumptionSQL {
  def main(args: Array[String]){
    startBySql
  }
  
  // 以SparkSQL的方式实现用户消费习惯的统计
  def startBySql():Unit = {
    val sess = SparkSession.builder().appName("BuyerConsumptionSQL").master("local").getOrCreate();
    import sess.implicits._
    val ds = sess.read.textFile("file:///A:/input/orders.txt")
                 .map(_.split("\t"))
                 .map(arr => Order(arr(0),getDayOfWeek(arr(1).toLong)))
    ds.createTempView("order")
    
    val sqlStr = "select t.bd,t.tm,t.cn, rank() over(partition by t.bd order by t.cn desc) from " + 
                 "(select buyerID bd,time tm,count(1) cn from order group by buyerID,time order by buyerID,count(1) desc ) t"
    sess.sql(sqlStr)
        .show(1000,false)
    
  }
  
    //根据长整数获取周几
  def getDayOfWeek(time: Long): Int = {
    val date = new Date(time);
    val cal = Calendar.getInstance();
    cal.setTime(date);
    cal.get(Calendar.DAY_OF_WEEK) - 1;   
  }
}