package com.inspur

import java.util.{Calendar, Date}

import org.apache.spark.{SparkConf, SparkContext}

object ConsumptionHabits { //消费习惯

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      //      .setMaster("spark://192.168.66.88:7077")
      .setMaster("local")
      .setAppName("ConsumptionHabits")
    val sc = new SparkContext(conf)

    val orders = sc.textFile("hdfs://192.168.66.88:8020/0616/data/orders")
      .map(_.split("\t"))
      .map(arr=>(arr(0), arr(1)))

    orders
      .map(x => {
        val date = new Date(x._2.toLong)
        val cal = Calendar.getInstance()
        cal.setTime(date)
        var dateOfWeek = cal.get(Calendar.DAY_OF_WEEK) - 1

        if(dateOfWeek == 0)
          dateOfWeek = 7

        (x._1, dateOfWeek.toString)
      })
      .groupBy(x => x)
      .map(x => (x._1, x._2.toList.count(t => t != null)))
      .sortBy(x=>x._2,false)
      .sortBy(x=>x._1._1,numPartitions = 1)
      .map(x => x._1._1 + "\t" + x._1._2 + "\t" + x._2)
      .saveAsTextFile("hdfs://192.168.66.88:8020/0616/ConsumptionHabits")
  }

}
//((11348,7),1)
//((11348,6),1)
//((11496,6),5)
//((11723,4),1)
//((11962,6),5)
//((11962,5),1)
//((12260,6),3)
//((12260,5),2)
//((12829,6),3)
//((12829,5),1)
//((13467,6),3)
//((13650,6),2)
//((14012,6),5)
//((14185,6),2)
//((14185,5),2)
//((15080,6),2)
//((15275,7),1)
//((15275,5),1)
//((15275,6),1)
//((15455,5),3)
//((15455,6),1)
//((15674,6),1)
//((15699,6),4)
//((15699,7),1)
//((15948,5),2)
//((15948,6),2)
//((16041,6),1)
//((16041,5),1)
//((16864,6),3)
//((16864,5),2)
//((17955,4),1)
//((17990,6),2)
//((18211,6),2)
//((18211,5),1)
//((18281,6),1)
//((18915,4),1)
//((18972,6),4)
//((19776,6),2)
//((19911,6),4)
//((20093,6),4)
//((20093,5),2)
//((20379,6),5)
//((20464,6),2)
//((20732,6),4)
//((20732,5),3)