package com.bj58.test

import java.text.SimpleDateFormat
import java.util.{Calendar, Date}

import org.apache.spark.ml.classification.LogisticRegression
import org.apache.spark.ml.feature._
import org.apache.spark.ml.regression.LinearRegression
import org.apache.spark.mllib.classification.{LogisticRegressionWithLBFGS, LogisticRegressionWithSGD}
import org.apache.spark.mllib.evaluation.MulticlassMetrics
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.regression.{LabeledPoint, LinearRegressionWithSGD}
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
import org.apache.spark.sql.expressions.UserDefinedFunction
import org.apache.spark.sql.functions.udf
import org.apache.spark.sql.functions.col
import org.apache.spark.sql.functions.expr
import org.apache.spark.sql.functions.lit
import util.CalendarUtil

import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.json4s.jackson.Serialization

import scala.util.Try

/**
  * Created by 6v on 2018/12/15.
  */
object AdIncomePredict {

  val formatter = new SimpleDateFormat("yyyyMMdd")


  def main(args: Array[String]) {

    val path = "C:\\Users\\lenovo\\Desktop\\ad_income.csv";
    val path_pre = "C:\\Users\\lenovo\\Desktop\\ad_income_pre.csv";
    //    val negativeFilePath = "C:\\Users\\lenovo\\Desktop\\b.txt";

    val sparkConf = new SparkConf().setAppName("AdIncomePredict").setMaster("local[4]")
    val sc = new SparkContext(sparkConf)
    val spark = SparkSession.builder.getOrCreate()

    var df = spark.read.format("com.databricks.spark.csv")
//      .option("header", "true")
      .option("inferSchema", "true")
      .load(path).toDF("date","income")

    var df2 = spark.read.format("com.databricks.spark.csv")
      //      .option("header", "true")
      .option("inferSchema", "true")
      .load(path_pre).toDF("date")
    df2 = df2.withColumn("income",lit(0.0))
    df = df.union(df2)

    df = df.withColumn("lunar_date",solarToLunarUDF(col("date")))
      .withColumn("solar_date",solarUDF(col("date")))
    df = df.withColumn("solar_year",expr("solar_date[0]"))
    df = df.withColumn("solar_month",expr("solar_date[1]"))
    df = df.withColumn("solar_day",expr("solar_date[2]"))
    df = df.withColumn("week",expr("solar_date[3]"))
    df = df.withColumn("is_weekend",expr("solar_date[4]"))
    df = df.withColumn("lunar_year",expr("lunar_date[0]"))
    df = df.withColumn("lunar_month",expr("lunar_date[1]"))
    df = df.withColumn("lunar_day",expr("lunar_date[2]"))
    df = df.withColumn("is_festival",expr("lunar_date[3]"))
    df = df.withColumn("is_height",expr("lunar_date[4]"))
    df = df.withColumn("is_low",expr("lunar_date[5]"))
      .withColumn("heightest",expr("lunar_date[6]"))
    df = df.withColumn("income",expr("income"))

    df.show(1000)
    println(s"schema:"+df.schema)
   /* df = df.filter(_.getAs[Integer]("date")<20181215)
    df.show(1200)

   val data = df.rdd.map(row=>{
      val label = row.getAs[Double]("income")
      val solar_year = row.getAs[Double]("solar_year")
      val solar_month = row.getAs[Double]("solar_month")
      val solar_day = row.getAs[Double]("solar_day")
      val week = row.getAs[Double]("week")
      val lunar_year = row.getAs[Double]("lunar_year")
      val lunar_month = row.getAs[Double]("lunar_month")
      val lunar_day = row.getAs[Double]("lunar_day")
      LabeledPoint(label,Vectors.dense(Array(solar_year,solar_month,solar_day,week)))
    }).cache()
    val numExamples = data.count()
    val model  = LinearRegressionWithSGD.train(data,100,1,1.0)
    println(s"model.weights: ${model.weights} model.intercept: ${model.intercept}")


    // 对样本进行测试
    val prediction = model.predict(data.map(_.features))
    val predictionAndLabel = prediction.zip(data.map(_.label))
    val print_predict = predictionAndLabel.take(20)
    println("prediction" + "\t" + "label")
    for (i <- 0 to print_predict.length - 1) {
      println(print_predict(i)._1 + "\t" + print_predict(i)._2)
    }
    // 计算测试误差
    val loss = predictionAndLabel.map {
      case (p, l) =>
        val err = p - l
        err * err
    }.reduce(_ + _)
    val rmse = math.sqrt(loss / numExamples)
    println(s"Test RMSE = $rmse.")*/

    //线性回归1
    val colArray2 = Array("solar_year_encoder", "solar_month_encoder", "solar_day_encoder","week_encoder", "is_weekend","lunar_year_encoder", "lunar_month_encoder","lunar_day_encoder","is_festival","is_height","is_low","heightest")

    df = new OneHotEncoder().setInputCol("week").setOutputCol("week_encoder").transform(df)
     df = new OneHotEncoder().setInputCol("solar_year").setOutputCol("solar_year_encoder").transform(df)
    df = new OneHotEncoder().setInputCol("solar_month").setOutputCol("solar_month_encoder").transform(df)
    df = new OneHotEncoder().setInputCol("solar_day").setOutputCol("solar_day_encoder").transform(df)
    df = new OneHotEncoder().setInputCol("lunar_year").setOutputCol("lunar_year_encoder").transform(df)
    df = new OneHotEncoder().setInputCol("lunar_month").setOutputCol("lunar_month_encoder").transform(df)
    df = new OneHotEncoder().setInputCol("lunar_day").setOutputCol("lunar_day_encoder").transform(df)
    val vecDF: DataFrame = new VectorAssembler().setInputCols(colArray2).setOutputCol("features").transform(df)
   /* // 正则化每个向量到1阶范数
    val normalizer = new Normalizer()
      .setInputCol("features")
      .setOutputCol("normFeatures")
      .setP(1.0)
    val l1NormData = normalizer.transform(vecDF)*/
  /* val scaler = new  StandardScaler()
     .setInputCol("features")
     .setOutputCol("scaledFeatures")
     .setWithStd(true)
     .setWithMean(false)


    // Compute summary statistics by fitting the StandardScaler.
    val scalerModel = scaler.fit(vecDF)

    // Normalize each feature to have unit standard deviation.
    val scaledData = scalerModel.transform(vecDF)*/


//    l1NormData.write.format("avro").save("E:\\1")


//    val Array(trainingDF, testDF) = vecDF.randomSplit(Array(0.9, 0.1))
    /* val lrModel = new LogisticRegression().setLabelCol("income").setFeaturesCol("features").fit(trainingDF)
 //    lrModel.setElasticNetParam(1)
     // 输出逻辑回归的系数和截距
     println(s"Coefficients: ${lrModel.coefficients} Intercept: ${lrModel.intercept}")*/
   val model =new LinearRegression().setFeaturesCol("features").setLabelCol("income").setFitIntercept(true)
      .setMaxIter(500).setRegParam(0.3).setElasticNetParam(1)//建立模型
    val lrModel = model.fit(vecDF.filter(_.getAs[Integer]("date")<ZdIncomePredict.date))

    // 输出模型全部参数
    lrModel.extractParamMap()
    // Print the coefficients and intercept for linear regression
    println(s"Coefficients: ${lrModel.coefficients} Intercept: ${lrModel.intercept}")


    val predictions = lrModel.transform(vecDF.filter(_.getAs[Integer]("date")>=ZdIncomePredict.date))
//    predictions.select("date","prediction").show()
    val res = predictions.select("date","prediction").filter(_.getAs[Integer]("date")>=ZdIncomePredict.date).cache()
    res.write.mode(SaveMode.Overwrite).format("com.databricks.spark.csv").save("E:\\1")

    res.groupBy(expr("substr(date,1,6)")).sum("prediction").repartition(1).write.mode(SaveMode.Overwrite).format("com.databricks.spark.csv").save("E:\\2")
    //    predictions.select("date","prediction").write.mode(SaveMode.Overwrite).format("com.databricks.spark.csv").save("E:\\1")
//   val mean =predictions.rdd.map(_.getAs[Double]("income")).mean()
//    val m = predictions.rdd.map(row=>{
//      val label = row.getAs[Double]("income")
//      Math.pow(label-mean,2)
//    }).sum()

//   val p =  predictions.rdd.map( row=>{
//     val label = row.getAs[Double]("income")
//     val point = row.getAs[Double]("prediction")
//     Math.pow(label-point,2)
//   }).sum
//    println(s"m:${m}")
//    println(s"p:${p}")
//    println(1-p/m)
//    predictions.selectExpr("date","income", "round(prediction,1) as prediction").show(1000)
   val trainingSummary = lrModel.summary
    println(s"numIterations: ${trainingSummary.totalIterations}")
//    println(s"objectiveHistory: ${trainingSummary.objectiveHistory.toList}")
//    trainingSummary.residuals.show()
    println(s"RMSE: ${trainingSummary.rootMeanSquaredError}")
    println(s"r2: ${trainingSummary.r2}")
//    predictions.show(2000)
//    predictions.write.save("D:\\123")
 /*   val trainingData = df.rdd.map(row=>{
      val label = row.getAs[Double]("income")
      val solar_date = row.getAs[mutable.WrappedArray[Double]]("solar_date").toArray

      val indices = solar_date.indices.toArray

      println(s"solar_date:${solar_date(0)}")
      println(s"solar_date:${solar_date(1)}")
      println(s"solar_date:${solar_date(2)}")
      val point =Vectors.sparse(solar_date.size,indices,solar_date)
      println(s"point:${point}")
     val res =  LabeledPoint(label,point)
      println(s"LabeledPoint:${res}")
      res
    })
    val splits = trainingData.randomSplit(Array(0.6, 0.4), seed = 11L)
    val train =  splits(0).cache()
    val test = splits(1)
//    val model = new LogisticRegressionWithLBFGS().setNumClasses(2).run(train).setThreshold(0.01)
val model = new LogisticRegressionWithLBFGS().
  setNumClasses(10).
  run(train)
    model.weights
    model.intercept


//    model.weights
    val predictionAndLabels = test.map { case LabeledPoint(label, features) =>
      val prediction = model.predict(features)
      (prediction, label)
    }
    val metrics = new MulticlassMetrics(predictionAndLabels)
    val precision = metrics.accuracy
    println("Precision = " + precision)
*/
//    df.show(10)


  }
  val solarUDF : UserDefinedFunction= udf((date: String) => {
    val value =
      Try {
        val myDate = formatter.parse(date)
        val c: Calendar = Calendar.getInstance
        c.setTime(myDate)
        val week = c.get(Calendar.DAY_OF_WEEK)-1
        val isWeekend = if(week ==6 || week==0) 1.0 else 0.0
        Array(date.substring(0,4).toDouble-2015,date.substring(4,6).toDouble,date.substring(6,8).toDouble,week.toDouble,isWeekend)
      }.getOrElse(Array(0.0,0.0,0.0,0.0,0.0))
    value
  })

  val solarToLunarUDF : UserDefinedFunction= udf((date: String) => {
    val value =
      Try {
        val  arr = CalendarUtil.solarToLunar(date)
        //是否节日
        val solar_day = date.substring(4,8)
        val mon = if(arr(1)>9) ""+arr(1) else "0"+arr(1)
        val day = if(arr(2)>9) ""+arr(2) else "0"+arr(2)
        val lunar_day = mon+""+day
        val isFestival = if(solar_day == "0101" || solar_day =="0501" || solar_day=="1001" || solar_day=="1002"|| solar_day=="1003"|| solar_day=="1004"|| solar_day=="1005"|| solar_day=="1006"|| solar_day=="1007"
          || lunar_day=="0505" || lunar_day=="0815" || lunar_day=="1223" || lunar_day=="1224"|| lunar_day=="1225"|| lunar_day=="1226"|| lunar_day=="1227"|| lunar_day=="1228" || lunar_day=="1229"|| lunar_day=="1230"|| lunar_day=="1231"|| lunar_day=="0101"|| lunar_day=="0102"|| lunar_day=="0103"|| lunar_day=="0104"|| lunar_day=="0105" || lunar_day=="0106") 0.0 else 1.0

        val isLow =  if( lunar_day.toDouble >=101 && lunar_day.toDouble < 107) 1.0 else 0.0
        val isHeight =  if( lunar_day.toDouble >=108 && lunar_day.toDouble < 123) 1.0 else 0.0
        val heightest =  0.0

        Array(arr(0).toDouble-2015,arr(1).toDouble,arr(2).toDouble,isFestival,isHeight,isLow,heightest)
      }.getOrElse(Array(0.0,0.0,0.0,0.0,0.0,0.0,0.0))
    value
  })

}
