package com.lvmama.monkey.calcula

import com.lvmama.monkey.common.logging.LazyLogging
import com.lvmama.monkey.common.utils.JDBCUtils.JDBCTemplate
import com.lvmama.monkey.common.utils.spark.SparkApplication
import com.lvmama.monkey.config.JobConfig
import com.lvmama.monkey.common.utils.Conversion._
import com.lvmama.monkey.common.utils.DateUtils.{dayOfWeek, getPastDate}
import com.lvmama.monkey.common.utils.MathUtils.decimal2
import org.apache.spark.rdd.RDD
import org.apache.spark.sql._
import org.apache.spark.sql.functions._
import com.lvmama.monkey.common.utils.JDBCUtils.ConnetionPool._


/**
  * Created by hejing on 2017/11/16.
  */
class PurchasingPowerCalcula(config: JobConfig) extends SparkApplication with LazyLogging{
  override var appName: String = "PurchasingPowerCalcula"
  override var sparkConfig: Map[String, String] = config.spark
  val JDBCDefault = JDBCTemplate.JDBCDefaultSet
  val connP = JDBCTemplate.getConProperties

  def execute(): Unit = {
    sparkConfig += ("spark.app.name" -> appName)
    sparkConfig += ("spark.master" -> "local[*]")
    //    sparkConfig+=("spark.default.parallelism" -> "40")
    //    sparkConfig +=("spark.sql.shuffle.partitions" -> "300" )
    withSparkContext {
      sc =>
        val sqlContext = new SQLContext(sc)
        import sqlContext.implicits._

        val maxDF: DataFrame = sqlContext.LoadFromMysql("pro_predict", "monkey").select(col("product_id"),col("sales_num")).groupBy("product_id")
          .agg("sales_num" -> "max").orderBy(col("max(sales_num)").desc)
          .map{x => (x.getAs[Long](0), x.getAs[Double](1))}
          .zipWithIndex()
          .map{x => (x._1._1, x._1._2, x._2)}
          .toDF("id","max","order")

        val mysqlDF: DataFrame = sqlContext.LoadFromMysql("comment_detail", "monkey").select(col("product_id"),
          date_format(col("oper_time"), "yyyyMMdd").as("date"),col("sense_type"))//.filter(col("product_id") < 60000)
          .filter(col("date") > getPastDate(-15)).filter(col("date") < getPastDate(0))
          .coalesce(12).na.drop()
        val needDF: DataFrame = mysqlDF.join(maxDF,mysqlDF("product_id")===maxDF("id"),"inner").drop("id")

        //上周购买力
        val countDayLast = needDF.filter(col("date") > getPastDate(-8)).groupBy("product_id","date").agg("sense_type" -> "count")
          .withColumnRenamed("count(sense_type)","countDayLast")
        val countWeekLast = needDF.filter(col("date") > getPastDate(-8)).groupBy("product_id").agg("sense_type" -> "count")
          .withColumnRenamed("product_id","id")
        val joinDay_week_last = countWeekLast.join(countDayLast,countDayLast("product_id")===countWeekLast("id"), "inner").drop("id")
          .withColumn("countWeekLast",col("count(sense_type)")).drop("count(sense_type)").withColumn("forOrder", col("countWeekLast")*0)

        //上上周购买力
        val countDay2Week = needDF.filter(col("date") < getPastDate(-7)).groupBy("product_id","date").agg("sense_type" -> "count")
          .withColumnRenamed("count(sense_type)","countDay2Week")
        val countWeek2Week = needDF.filter(col("date") < getPastDate(-7)).groupBy("product_id").agg("sense_type" -> "count")
          .withColumnRenamed("product_id","id")
        val joinDay_week_2Week = countWeek2Week.join(countDay2Week,countDay2Week("product_id")===countWeek2Week("id"), "inner").drop("id1")
          .withColumn("countWeek2Week",col("count(sense_type)")).drop("count(sense_type)").drop("product_id").withColumn("forOrder", col("countDay2Week")*0)

        //6.对应14天的id，order
        val uniqueID: DataFrame = maxDF.select(col("id"),col("order")).dropDuplicates(Seq("id"))
        val date1: RDD[(Long, String, String)] = uniqueID.rdd.map(x => (x(0).asInstanceOf[Long], x(1).toString, getPastDate(-1)))
        val date2: RDD[(Long, String, String)] = uniqueID.rdd.map(x => (x(0).asInstanceOf[Long], x(1).toString,  getPastDate(-2)))
        val date3: RDD[(Long, String, String)] = uniqueID.rdd.map(x => (x(0).asInstanceOf[Long], x(1).toString,  getPastDate(-3)))
        val date4: RDD[(Long, String, String)] = uniqueID.rdd.map(x => (x(0).asInstanceOf[Long], x(1).toString,  getPastDate(-4)))
        val date5: RDD[(Long, String, String)] = uniqueID.rdd.map(x => (x(0).asInstanceOf[Long], x(1).toString,  getPastDate(-5)))
        val date6: RDD[(Long, String, String)] = uniqueID.rdd.map(x => (x(0).asInstanceOf[Long], x(1).toString,  getPastDate(-6)))
        val date7: RDD[(Long, String, String)] = uniqueID.rdd.map(x => (x(0).asInstanceOf[Long], x(1).toString,  getPastDate(-7)))
        val date8: RDD[(Long, String, String)] = uniqueID.rdd.map(x => (x(0).asInstanceOf[Long], x(1).toString,  getPastDate(-8)))
        val date9: RDD[(Long, String, String)] = uniqueID.rdd.map(x => (x(0).asInstanceOf[Long], x(1).toString,  getPastDate(-9)))
        val date10: RDD[(Long, String, String)] = uniqueID.rdd.map(x => (x(0).asInstanceOf[Long], x(1).toString,  getPastDate(-10)))
        val date11: RDD[(Long, String, String)] = uniqueID.rdd.map(x => (x(0).asInstanceOf[Long], x(1).toString,  getPastDate(-11)))
        val date12: RDD[(Long, String, String)] = uniqueID.rdd.map(x => (x(0).asInstanceOf[Long], x(1).toString,  getPastDate(-12)))
        val date13: RDD[(Long, String, String)] = uniqueID.rdd.map(x => (x(0).asInstanceOf[Long], x(1).toString,  getPastDate(-13)))
        val date14: RDD[(Long, String, String)] = uniqueID.rdd.map(x => (x(0).asInstanceOf[Long], x(1).toString,  getPastDate(-14)))
        val DateTable_LastWeek = date1.toDF("id", "orders", "dates").unionAll(date2.toDF()).unionAll(date3.toDF()).unionAll(date4.toDF()).unionAll(date5.toDF())
          .unionAll(date6.toDF()).unionAll(date7.toDF()).withColumn("key1", col("id")*0).withColumn("key2", col("id")*0).withColumn("order" ,col("orders"))
          .drop("orders")
        val DateTable_2Week = date8.toDF("id", "orders", "dates").unionAll(date9.toDF()).unionAll(date10.toDF()).unionAll(date11.toDF())
          .unionAll(date12.toDF()).unionAll(date13.toDF()).unionAll(date14.toDF()).withColumn("key1", col("id")*0).withColumn("key2", col("id")*0)
          .withColumn("order" ,col("orders")).drop("orders")

        val last = joinDay_week_last.unionAll(DateTable_LastWeek)
          .groupBy("product_id", "date").agg("countDayLast" -> "sum", "countWeekLast" -> "sum", "forOrder" -> "sum").withColumn("weekLast", dayOfWeek(col("date")))
          .withColumnRenamed("date","dateLast")
        val twoWeek = joinDay_week_2Week.unionAll(DateTable_2Week)
          .groupBy("id", "date").agg("countDay2Week" -> "sum", "countWeek2Week" -> "sum").withColumn("week", dayOfWeek(col("date")))
          .drop("forOrder").drop("sum(countDay2Week)")
        val joinlast_twoWeek = last.join(twoWeek, last("product_id")===twoWeek("id") && last("weekLast")===twoWeek("week"))
          .drop("id").drop("week").drop("date")
          .withColumnRenamed("sum(countDayLast)","countDayLast")
          .withColumnRenamed("sum(countWeek2Week)","countWeek2Week")
          .withColumnRenamed("sum(countWeekLast)","countWeekLast")
          .withColumnRenamed("sum(forOrder)","forOrder")
          .orderBy(col("forOrder"))//forOrder越小表示max越大
          .map{x => (x.getAs[Long](0), x(1).toString,x.getAs[Long](2), x.getAs[Long](3),x.getAs[Double](4),x.getAs[Int](5),
          x.getAs[Long](6))}
          .zipWithIndex()
          .map(x => (x._1._1, x._1._2, x._1._3, x._1._4, x._1._5, x._1._6, x._1._7, x._2))
          .toDF("product_id", "dateLast", "countDayLast", "countWeekLast", "forOrder",
            "week", "count2Week", "sort_num")

        //原始的id，name，品类
        val category: DataFrame = sqlContext.LoadFromMysql("comment_detail", "monkey").select(col("product_id"),col("product_name"),col("category_id"))
          .na.drop().dropDuplicates(Seq("product_id")).coalesce(12)
        val predictDF: DataFrame = sqlContext.LoadFromMysql("pro_predict", "monkey").select(col("product_id").as("id"),col("date").as("preDATE"),
          col("week").as("preWEEK"),col("sales_num"),col("sense_type"))

        val DateTable7 = DateTable_LastWeek.drop("key1").drop("key2").drop("order")
          .withColumn("week", dayOfWeek(col("dates"))).drop("dates").withColumnRenamed("id", "id14")
        val table = DateTable7.join(predictDF, DateTable7("id14")===predictDF("id")
          && DateTable7("week")===predictDF("preWEEK")).drop("id").drop("week")

        val table2 = table.join(category, category("product_id")===table("id14")).drop("product_id")
        val lastTable = joinlast_twoWeek.join(table2, joinlast_twoWeek("product_id")===table2("id14") &&
          joinlast_twoWeek("week")===table2("preWEEK")).drop("id14").drop("preWEEK")
          .withColumn("change_rate", decimal2((col("countWeekLast")-col("count2Week"))/col("count2Week")))
          .withColumn("fluctuated", when(col("change_rate")===0, 3).when(col("change_rate") > 0, 1).when(col("change_rate") < 0, 2).when(col("change_rate")isNull, -1))
          .withColumnRenamed("dateLast", "last_date").withColumnRenamed("countDayLast", "last_purchases").withColumnRenamed("preDATE", "date")
          .withColumnRenamed("countWeekLast", "last_amount").withColumnRenamed("count2Week", "twoweek_amount")
        val sql = "truncate from pro_predict_calc"
        delFromMysql(sql)
        lastTable.insertDF2Mysql("pro_predict_calc", "monkey", SaveMode.Append)



    }
  }
}

object PurchasingPowerCalcula {
  val config = JobConfig()
  def apply(): Unit = new PurchasingPowerCalcula(config).execute()
}
