import org.apache.spark.sql.SparkSession

class AccountTransformer(spark: SparkSession, destTable: String) extends AbstractTransformer(spark, destTable) {

  def transform(dt: String, hour: String, ifTest: Boolean = false): Unit = {
    val pathIn = if(ifTest)
      s"oss://risk-ml-featurestore/account_time_feature_test/dt=$dt/hour=$hour" else
      s"oss://risk-ml-featurestore/account_time_feature/dt=$dt/hour=$hour"

    logger.info(s"start transform $dt/$hour to $destTable, src: $pathIn")
    val df = spark.read.parquet(pathIn)
     import spark.implicits._

    // 1. DataFrame -> AccountTimeRecord RDD
    df.rdd.flatMap { row =>
      TypeConverters.rowToAccountRecord(row)
    }.toDF().write.mode("append").insertInto(destTable)
  }
}
