package day2


import org.apache.log4j.{Level, Logger}
//import org.apache.logging.log4j.core.time.Instant
import org.apache.spark.{SparkConf, SparkContext}

import java.text.{DecimalFormat, SimpleDateFormat}
import java.util.{Calendar, Date, TimeZone}
//import scala.collection.mutable.ListBuffer
//import java.time.Instant
//import java.time.ZoneId
//import java.time.format.DateTimeFormatter

object time_consume {
  def main(args: Array[String]): Unit = {

    def tranTimeToString(tm: Long): String = {
      val fm = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
      val tim = fm.format(new Date(tm))
      tim
    }

    def convertTimestamps(timestamps: List[Long], timeZoneId: String = "UTC"): List[String] = {
      val dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
      dateFormat.setTimeZone(TimeZone.getTimeZone(timeZoneId))

      timestamps.map { timestamp =>
        dateFormat.format(timestamp)
      }
    }

      System.setProperty("hadoop.home.dir","D:\\hadoop")
    val sc = new SparkContext(new SparkConf().setAppName("Basic").setMaster("local"))
    Logger.getLogger("org").setLevel(Level.OFF)

     val file = sc.textFile("D:\\data\\HCIP\\files\\time_consume.log").map(i =>{
       val arr = i.split("\t")
       val time_arr: List[Long] = List()
       (arr(1),((time_arr:+((arr(2).toLong)*1000)),BigDecimal(arr(3))))
     }
    )

    val file_time = file.reduceByKey((x :(List[Long],BigDecimal),y :(List[Long],BigDecimal)) =>
      (x._1++y._1.sorted,x._2+y._2)
    )

    file_time.foreach(i =>{
      println("名字:"+i._1+" "+convertTimestamps(i._2._1).mkString(",")+" "+i._2._2)
    }
    )
  }
}
