import org.apache.spark.sql.SparkSession
import scopt.OptionParser

/**
 * 主程序入口：转换OSS数据到目标表
 * 使用Scala UDAF替代Python的flatten+collect_list操作
 */
object TransformOss2Final { 
  
  // 命令行参数case class
  case class Config(
    dt: String = "",
    hour: String = "",
    tableType: String = "T", // trace table or A, account table
    destTable: String = "",
    saltNum: Int = 1,
    ifTest: Boolean = false
  )
  
  def main(args: Array[String]): Unit = {
    
    // 解析命令行参数
    val parser = new OptionParser[Config]("TransformOss2Final") {
      head("转换OSS数据到目标表", "1.0.0")
      
      opt[String]('d', "dt")
        .required()
        .action((x, c) => c.copy(dt = x))
        .text("处理日期，格式: YYYYMMDD")
      
      opt[String]('h', "hour")
        .required()
        .action((x, c) => c.copy(hour = x))
        .text("处理小时，格式: HH")
      
      opt[String]('e', "table-type")
        .action((x, c) => c.copy(tableType = x))
        .text("表类型，T: trace table, A: account table")
      
      opt[String]('t', "dest-table")
        .required()
        .action((x, c) => c.copy(destTable = x))
        .text("目标表名")

      opt[Int]('s', "salt-num")
        .action((x, c) => c.copy(saltNum = x))
        .text("加盐数，默认1")

      opt[Boolean]('g', "debug")
        .action((x, c) => c.copy(ifTest = x))
        .text("是否debug模式，默认false")
      
      help("help").text("显示帮助信息")
    }
    
    parser.parse(args, Config()) match {
      case Some(config) =>
        run(config)
      case None =>
        System.exit(1)
    }
  }
  
  def run(config: Config): Unit = {
    // 创建SparkSession
    val spark = SparkSession.builder()
      .appName("transform_oss2final_scala")
      .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
      .config("spark.kryo.registrator", "com.emr.etl.KryoRegistrator")
      .getOrCreate()
    
    try {
      // 创建转换器
      val transformer = config.tableType match {
        case "T" =>
           new TraceTransformer(spark, config.destTable, config.saltNum)
        case "A" =>
           new AccountTransformer(spark, config.destTable)
        case _ =>
          throw new IllegalArgumentException(s"Invalid table type: ${config.tableType}")
      }
      
      // 执行转换
      transformer.transform(
        config.dt, 
        config.hour,
        config.ifTest
      )
      
      println("Transform completed successfully!")
      
    } catch {
      case e: Exception =>
        println(s"Transform failed with error: ${e.getMessage}")
        e.printStackTrace()
        System.exit(1)
    } finally {
      spark.stop()
    }
  }
} 