package cn.itcast.tags.models.statistics

import cn.itcast.tags.models.{AbstractModel, ModelType}
import cn.itcast.tags.tools.TagTools
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.expressions.Window

class PayTypeModel extends AbstractModel("PayTypeModel",ModelType.STATISTICS){
  override def doTag(businessDF: DataFrame, tagDF: DataFrame): DataFrame = {

    import businessDF.sparkSession.implicits._
    import org.apache.spark.sql.functions._


    businessDF.printSchema()
    businessDF.show(10,false)

    tagDF.printSchema()
    tagDF.show(10,false)

    val paymentcodeDF: DataFrame = businessDF
      .groupBy($"memberid", $"paymentcode")
      .count()
        .withColumn("rank",
          row_number().over(
            Window.partitionBy($"memberid").orderBy($"count".desc)
          )
        )
        .where($"rank"===1)
      .select(
        $"memberid".as("id"),
        $"paymentcode"
      )
    paymentcodeDF.printSchema()
    paymentcodeDF.show(10,false)
    paymentcodeDF.count()
    val modelDF: DataFrame = TagTools.ruleMatchTag(paymentcodeDF, "paymentcode", tagDF)

    modelDF.printSchema()
    modelDF.show(100,false)
    modelDF.count()
    modelDF
  }
}

object PayTypeModel{
  def main(args: Array[String]): Unit = {
    val payTypeModel = new PayTypeModel()
    payTypeModel.executeModel(350L)

  }
}