package active_device

import java.text.MessageFormat.format

import util.Util._
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.SparkContext._
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.sql.hive._
import org.json4s._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
import  org.json4s.JsonAST._
import  org.apache.spark.sql.types._
object Week_active_device {
  def main(args: Array[String]) {
    // Validate args
    if (args.length == 0) {
      println("Usage: [2015-05-25|date]")
      sys.exit(1)
    }
    val date_str=args(0).toString
    //   val date_str="2015-05-27"
   // val filePath ="hdfs:///test/daily_active/" +"week/"+getWeek(date_str)
    val filePath=getConfig("active_device.hdfsdir")+"e_device_living_count_week/"+getWeek(date_str)
    val sparkConf=new SparkConf().setAppName("week_active_device")
    val sc= new SparkContext(sparkConf)
    val hdfs=FileSystem.get(new Configuration())
    if(hdfs.exists(new Path(filePath)))hdfs.delete(new Path(filePath),true)
    val sqlContext = new org.apache.spark.sql.hive.HiveContext(sc)
    import sqlContext._
    val week_ac_dev_sql= format(getConfig("active_device.week"),conv2ts(addDay(date_str,1).toString).toString,conv2ts(DayofWeek(date_str).toString).toString)
    val pk_sql=format(getConfig("active_device.pkmap"))
    val pk=sqlContext.sql(pk_sql)
    val  df=sqlContext.sql(week_ac_dev_sql)
    //  DataFrame = [ts: bigint, product_key: string, mac: string, country: string, region: string, city: string, gid: int]
    val pkmap=pk.distinct.map(row=>row.mkString("@#"))
      .subtract( df.select("product_key")
      .distinct.map(product_key=>product_key.mkString("@#")))
      .map(product_key=>product_key.toString
      +"@#"+product_key.toString+"%" +"Unknown"
      +"@#"+product_key.toString+"%" +"Unknown"+"%" +"Unknown"
      +"@#"+product_key.toString+"%" +"Unknown"+"%" +"Unknown"+"%" +"Unknown"
      +"@#"+product_key.toString+"%" +"Unknown"+"%" +"Unknown"+"%" +"Unknown"+"%" +"Unknown"
      +"@#"+product_key.toString+"%"+"location"+"%"+"Unknown"
      +"@#"+product_key.toString+"%"+"location"+"%"+"Unknown"+"%"+"Unknown"
      +"@#"+product_key.toString+"%"+"location"+"%"+"Unknown"+"%"+"Unknown"+"%"+"Unknown"
      )
      .flatMap(line=>line.split("@#"))
      .map(line=>(line.split("%")(0),(line,0L)))

      df.map(row=>row.mkString("@#"))
        .map(line=>line.split("@#"))
        .map(line=>(line(1)+line(2),line))
        .reduceByKey((device,device2)=>{
      if(  device(0).toLong >  device2(0).toLong ) device else device2
    }).map(line=>line._2).map(
        line=>line(1).toString
          +"@#"+line(1)+"%"+replaceNull(line(6))
          +"@#"+line(1)+"%"+replaceNull(line(6))+"%"+replaceNull(line(3))
          +"@#"+line(1)+"%"+replaceNull(line(6))+"%"+replaceNull(line(3))+"%"+replaceNull(line(4))
          +"@#"+line(1)+"%"+replaceNull(line(6))+"%"+replaceNull(line(3))+"%"+replaceNull(line(4))+"%"+replaceNull(line(5))
          +"@#"+line(1)+"%"+"location"+"%"+replaceNull(line(3))
          +"@#"+line(1)+"%"+"location"+"%"+replaceNull(line(3))+"%"+replaceNull(line(4))
          +"@#"+line(1)+"%"+"location"+"%"+replaceNull(line(3))+"%"+replaceNull(line(4))+"%"+replaceNull(line(5))
      ).flatMap(line =>line.split("@#"))
        .map(line=>(line,1L)).reduceByKey(_ + _)
        .map(line=>(line._1.split("%")(0),line))
        .++(pkmap)
        .map(t=>(t._1,toJobejct(t._2,getWeek(date_str).toString)))
        .reduceByKey((x,y)=>x merge y)
        .map(line=>compact(render(line._2)))
        .coalesce(1, shuffle = true)
        .saveAsTextFile(filePath)


  }
}
