package demo.spark.hurypoint.parser

import demo.spark.hurypoint.parser.VinParser.VinReport
import org.apache.spark.sql.{DataFrame, Dataset, Encoders, Row, SparkSession}
import org.apache.spark.sql.types.DataType

/**
 * Vin码解析器，按照Vin码出现在日志中的次数聚合
 * @param sparkSession
 * @param structs 结构体
 */
class VinParser(
                 sparkSession: SparkSession,
                 structs: (String,DataType,Boolean)*
               ) extends EventParser[DataFrame](sparkSession,structs) {

  override def parse(origin: DataFrame): Dataset[Row] = {
    val groupByVin_count = origin.groupBy(origin.col("vin")).count()
    dealDF = dealDF.unionByName(groupByVin_count)
    dealDF
  }

  override def report(lineCount: Integer): Unit = {
    import sparkSession.sqlContext.implicits._;
    dealDF
      .groupBy("vin").sum("count")
      .toDF("vin","totalCount")
      .orderBy($"totalCount" desc)
      .rdd
      // deal inValid VIN code.
      .map(row => {
        var newVin = row.getAs[String]("vin");
        if(newVin == null) newVin = "UN_REPORTED"
        VinReport(newVin,row.getAs[Long]("totalCount"))
      })
      // execute some action as Type of VinReport
      // ...
      .toDF("vin","totalCount")
      // caller main constructor!
      .as[VinReport](Encoders.bean(classOf[VinReport]))
      .show(lineCount)
  }
}

object VinParser{
  case class VinReport(vin :String, totalCount: Long)
}

