package cn.itcast.czxy.BD18.ml

import cn.itcast.czxy.BD18.bean.BaseMode
import org.apache.spark.ml.clustering.KMeans
import org.apache.spark.ml.feature.VectorAssembler
import org.apache.spark.sql.{DataFrame, functions}

import scala.collection.immutable

object Rfmmode1 extends BaseMode {
  override def setAppName: String = "Rfmmode1"

  override def setLeven4Id: Int = 131

  override def getNewTag(leve5: DataFrame, hbaseDF: DataFrame): DataFrame = {
    import spark.implicits._
    import org.apache.spark.sql.functions._

    var getR = functions.datediff(current_timestamp(), from_unixtime(max("finishTime"))) - 300 as ("recency")
    var getF = functions.count("orderSn").as("frequency")
    var getM = functions.sum("orderAmount").as("monetsmy")

    val getRFM: DataFrame = hbaseDF.groupBy("memberId")
      .agg(getR, getF, getM)

    var getRScore = functions
      .when((col("recency") >= 1) && (col("recency") <= 3), 5)
      .when((col("recency") >= 4) && (col("recency") <= 6), 4)
      .when((col("recency") >= 7) && (col("recency") <= 9), 3)
      .when((col("recency") >= 10) && (col("recency") <= 15), 2)
      .when((col("recency") >= 16), 1)
      .as("recency")


    var getFScore = functions
      .when(col("frequency") >= 200, 5)
      .when((col("frequency") >= 150) && (col("frequency") <= 199), 4)
      .when((col("frequency") >= 100) && (col("frequency") <= 149), 3)
      .when((col("frequency") >= 50) && (col("frequency") <= 99), 2)
      .when((col("frequency") >= 1) && (col("frequency") <= 49), 1)
      .as("frequency")

    var getMScore = functions
      .when(col("monetsmy") >= 200000, 5)
      .when(col("monetsmy").between(100000, 199999), 4)
      .when(col("monetsmy").between(50000, 99999), 3)
      .when(col("monetsmy").between(10000, 49999), 2)
      .when(col("monetsmy") <= 9999, 1)
      .as("monetary")

    val getRFMScoreDF = getRFM.select('memberId, getRScore, getFScore, getMScore)
    //威克特儿
    //额声不了儿
    val RFMFeature = new VectorAssembler().setInputCols(Array("recency", "frequency", "monetary"))
      .setOutputCol("feature")
      .transform(getRFMScoreDF)

    val km = new KMeans()
      .setK(7)
      .setMaxIter(14)
      //        .setSeed(10)
      .setFeaturesCol("feature")
      .setPredictionCol("featureOut")
      .fit(RFMFeature)
    val kmDF = km.transform(RFMFeature)
//    kmDF.show()
//    kmDF.groupBy("featureOut")
//      .agg(max(col("recency")+col("frequency")+col("monetary")).as("max"),
//        min(col("recency")+col("frequency")+col("monetary")).as("min")).show()

    val clusterCentersSum: immutable.IndexedSeq[(Int, Double)] = for (i<-km.clusterCenters.indices) yield (i,km.clusterCenters(i).toArray.sum)
    val clusterCentersSumSort: immutable.IndexedSeq[(Int, Double)] = clusterCentersSum.sortBy(-_._2)
//    clusterCentersSumSort.foreach(println)

    val clusterCentersIndices: immutable.IndexedSeq[(Int, Int)] = for (elem <- clusterCentersSumSort.indices) yield (clusterCentersSumSort(elem)._1,elem)

    val KMIndicesDF: DataFrame = clusterCentersIndices.toDF("featureOut","rule")
    val leve5KMIndicesDF: DataFrame = KMIndicesDF.join(leve5,KMIndicesDF("rule")===leve5("rule")).select("featureOut","id")

//    kmDF.show()
//    leve5KMIndicesDF.show()

    val frame: DataFrame = kmDF.join(leve5KMIndicesDF,kmDF("featureOut")===leve5KMIndicesDF("featureOut")).select('memberId.as("userId"),'id.as("tagsId"))




    //    leve5.show()
    //    hbaseDF.show()
//    frame.groupBy("tagsId").count().show()
frame.repartition(4)

  }

  def main(args: Array[String]): Unit = {
    exec()
  }
}
