package com.dtkavin.spark.demo

import java.text.SimpleDateFormat

import org.apache.spark.{Logging, SparkConf, SparkContext}

/**
  * Created by IntelliJ IDEA.
  * Programmer : John Zn
  * Date : 2016/4/13 0013
  * Time : 00:02
  * Discribtion : 
  */
class OrderDemo {

}

object OrderDemo extends Logging {
  def main(args: Array[String]) {
    val conf = new SparkConf().setAppName("UsrPassBy").setMaster("local[3]")
    val sc = new SparkContext(conf);

    val dateFormat = new SimpleDateFormat("yyyyMMddHHmmss")

    logInfo(" SparkContext createed...")

    //    val allStation = sc.textFile("D:\\data\\input-position\\station.txt").filter(!_.isEmpty).map(_.trim).map((_.split(","))).map(arr => ((arr(0),(arr(1),arr(2),arr(3)))))
    //    val allUsrInfo = sc.textFile("D:\\data\\input-position\\usr-info.txt").filter(!_.isEmpty).map(_.trim).map(_.split(",")).map(arr => ((arr(2)), (arr(0), arr(1), arr(rr(2),arr(3)))))
    //    val allStation = sc.textFile("D:\\data\\input-position\\station.txt").filter(!_.isEmpty).map(_.trim).map{case ()}
    val allUsrInfo = sc.textFile("D:\\data\\input-position\\usr-info.txt").filter(!_.isEmpty).map {
      x => {
        val arr = x.trim.split(",")
        val flag = arr(3)
        var date = dateFormat.parse(arr(1)).getTime
        if (flag.toInt > 0) date = -date
        ((arr(0), arr(2)), date)
      }
    }
    val res = allUsrInfo.reduceByKey(_+_).map{case ((phone,base),time) => (phone,(base,time))}.groupBy(_._1).sortBy(_._2.toList(1)._2._2,false)
    println(res.collect().toBuffer)

    sc.stop()
    logInfo("done")
  }
}
