package com.lvmama.monkey.buy

import com.lvmama.monkey.common.logging.LazyLogging
import com.lvmama.monkey.common.utils.JDBCUtils.JDBCTemplate
import com.lvmama.monkey.common.utils.spark.SparkApplication
import com.lvmama.monkey.config.JobConfig
import com.lvmama.monkey.common.utils.Conversion._
import org.apache.spark.sql.functions._
import org.apache.spark.sql._
import org.apache.spark.storage.StorageLevel
import com.lvmama.monkey.common.utils.DateUtils._
import com.lvmama.monkey.common.utils.MathUtils._
import org.apache.hadoop.fs.Path
import org.apache.spark.sql.types.DoubleType

/**
  * Created by hejing on 2017/10/31.
  */
class ChoosePurchasingTrainData(config: JobConfig) extends SparkApplication with LazyLogging{
  override var appName: String = "ChoosePurchasingTrainData"
  override var sparkConfig: Map[String, String] = config.spark
  val JDBCDefault = JDBCTemplate.JDBCDefaultSet
  val connP = JDBCTemplate.getConProperties

  def execute(): Unit = {
    sparkConfig += ("spark.app.name" -> appName)
    sparkConfig += ("spark.master" -> "local[*]")
    //    sparkConfig+=("spark.default.parallelism" -> "40")
    //    sparkConfig +=("spark.sql.shuffle.partitions" -> "300" )
    withSparkContext {
      sc =>
        val sqlContext = new SQLContext(sc)
        import sqlContext.implicits._

        //part1-选取训练集
        val mysqlDF: DataFrame = sqlContext.LoadFromMysql("comment_detail", "monkey").select(col("product_id").as("productId"),
          date_format(col("create_date"), "yyyyMMdd").as("date"),col("sense_type").as("senseType")).withColumn("senseTypeA",col("senseType"))//.filter(col("productId") > 999999999)
          .coalesce(12).na.drop().orderBy(col("productId").desc, col("date").desc)
        mysqlDF.persist(StorageLevel.MEMORY_AND_DISK_SER)

        //分别求出senseType=0、1、2的总数，senseType=0、1的置为0
        val senseType: DataFrame = mysqlDF.withColumn("a0",when(col("senseType")===2,1).otherwise(0)).groupBy("productId","date")
          .agg("a0" -> "sum").withColumnRenamed("sum(a0)","good_comments")

        //求出sense_type的均值、将日期转为周几、求出评价总数amount(包含好中差评)
        val cacheTable: DataFrame = mysqlDF.groupBy("productId","date").agg("senseType" -> "avg","senseTypeA" -> "count").orderBy(col("productId").desc)
          .withColumn("week", dayOfWeek(col("date").as("String"))).withColumnRenamed("date","dates").withColumnRenamed("productId", "productIds")
          .withColumnRenamed("count(senseTypeA)", "amounts")
          .withColumn("senseType", decimal2(col("avg(senseType)").cast(DoubleType))).drop("avg(senseType)")

        val finallTable: DataFrame = senseType.join(cacheTable,senseType("productId")===cacheTable("productIds") && senseType("date")===cacheTable("dates"), "inner")
          .withColumn("good_comment", col("good_comments")).withColumn("amount",col("amounts")).drop("productIds").drop("dates")
          .drop("good_comments").drop("amounts").orderBy(col("productId").desc, col("date").desc)
        finallTable.persist(StorageLevel.MEMORY_AND_DISK_SER)

        val output = new Path("hdfs://hadoop/user/monkey/Purchasing/PurchasingTrainData")
        val output2 = new Path("hdfs://hadoop/user/monkey/Purchasing/PurchasingTrainDataResult")
        val hdfs = org.apache.hadoop.fs.FileSystem.get(new java.net.URI("hdfs://hadoop"), new org.apache.hadoop.conf.Configuration())
        if (hdfs.exists(output)) hdfs.delete(output, true)
        if (hdfs.exists(output2)) hdfs.delete(output2, true)

        finallTable.rdd.saveAsTextFile("hdfs://hadoop/user/monkey/Purchasing/PurchasingTrainData")
        val parquetData = sc.textFile("hdfs://hadoop/user/monkey/Purchasing/PurchasingTrainData/part-*").flatMap(line => line.split("\\[")).flatMap(line => line.split("\\]"))
          .filter(!_.trim.equals("")).saveAsTextFile("hdfs://hadoop/user/monkey/Purchasing/PurchasingTrainDataResult")

        mysqlDF.unpersist()
        senseType.unpersist()
        finallTable.unpersist()
    }
  }
}

object ChoosePurchasingTrainData {
  val config = JobConfig()
  def apply(): Unit = new ChoosePurchasingTrainData(config).execute()
}
