package study.wsn

import java.util.Date
import java.util.Calendar
import java.text.ParseException
import java.text.SimpleDateFormat
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext


object BuyerConsumptionRDD {
  def main(args: Array[String]): Unit = {
    statByRdd
  }
  
  //通过RDD的方式实现
  def statByRdd(): Unit = {
    //实例化
    val conf = new SparkConf().setAppName("BuyerConsumptionRDD").setMaster("local")
    val sc = new SparkContext(conf)
    
    //读文件
    sc.textFile("file:///A:/input/orders.txt")
      .map(_.split("\t"))
      .map(arr => {
        (arr(0),getDayOfWeek(arr(1).toLong))
      })
      .groupBy(t => t)
      .map(t => (t._1,t._2.toList.count(t => t != null)))
      .sortBy(t => t._2, false)  // 以订单数量倒序排序
      .sortBy(t => t._1._1, true, 1)
      .map(t => t._1._1 + "\t" + getDayOfWeekShow(t._1._2) + "\t" + t._2)
      .foreach(println)
      //.saveAsTextFile("file:///A:/output2/buyer")  // 集群运行需要转换成hdfs地址
  }
  
  //根据长整数获取周几
  def getDayOfWeek(time: Long): Int = {
    val date = new Date(time);
    val cal = Calendar.getInstance();
    cal.setTime(date);
    cal.get(Calendar.DAY_OF_WEEK) - 1;   
  }
  
  //根据日期返回周几
  def getDayOfWeek(day: String): Int={
    val cal = Calendar.getInstance();
    val format = new SimpleDateFormat("yyyy-MM-dd");
    cal.setTime(format.parse(day));
    cal.get(Calendar.DAY_OF_WEEK) - 1;
  }
  
  // 根据整型数字转换成相对应的周几
  def getDayOfWeekShow(count:Int):String = {
    var week = ""
    if(count == 0){
      week = "星期天"
    }else if(count == 1){
      week = "星期一"
    }else if(count == 2){
      week = "星期二"
    }else if(count == 3){
      week = "星期三"
    }else if(count == 4){
      week = "星期四"
    }else if(count == 5){
      week = "星期五"
    }else {
      week = "星期六"
    }
    week
  }
}