package demo.spark.hurypoint.parser

import org.apache.spark.sql.types.{DataType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

/**
 * 事件解析器,
 * 重写了check方法，要求被解析的实例不为NULL.
 */
abstract class EventParser[Res <: Any](
                                        sparkSession: SparkSession,
                                        structs: Seq[(String,DataType,Boolean)]
                                      ) extends ParseAble[DataFrame,Res] {
  var structType: StructType = null;
  var dealDF: DataFrame = null;

  override def check(): Boolean = {
    require(sparkSession != null)
    require(structs != null && structs.size > 0)
    structType != null && dealDF != null;
  }

  override def prepare(origin: DataFrame, args: Any*): Unit = {
    if(!check){
      this.structType = new StructType(structs.map(struct => StructField(struct._1,struct._2,struct._3)).toArray)
      this.dealDF = sparkSession.createDataFrame(sparkSession.sparkContext.emptyRDD[Row],this.structType)
    }
    require(check)
  }

  def report(lineCount: Integer): Unit = {
    require(dealDF != null)
    dealDF.show(lineCount)
  }
}