import org.apache.log4j.Logger
import org.apache.spark.sql.SparkSession


/**
 * Transformer抽象基类，提供通用方法和工具
 */
abstract class AbstractTransformer(spark: SparkSession, destTable: String) extends Transformer {
  protected val logger = Logger.getLogger(s"trans_$destTable")

  /**
   * 从目标表获取operator列信息
   */
  def getOperatorColumnsFromDestTable(tableName: String): List[String] = {
    val descTable = spark.sql(s"desc $tableName")
    val tableSchema = descTable.collect()
    
    val opList = tableSchema
      .filter(_.getAs[String]("col_name").endsWith("_fm"))
      .map(_.getAs[String]("col_name").dropRight(3).toLowerCase)
      .toList
    
    logger.info(s"get ${opList.size} operator columns from $tableName")
    opList
  }

  /**
   * 通用transform入口，子类可重写
   */
  def transform(dt: String, hour: String, ifTest: Boolean = false): Unit

}
