package com.ecommerce.analysis

import com.ecommerce.utils.ConfigLoader
import com.ecommerce.db.MySQLHandler
import org.apache.log4j.{Logger, Level}
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.functions._
import java.net.InetAddress
import com.typesafe.config.Config

abstract class BaseAnalyzer {
  protected val logger: Logger = Logger.getLogger(this.getClass)
  protected val config: Config = ConfigLoader.loadConfig

  logger.setLevel(Level.INFO)

  def analyze(spark: SparkSession, cleanedData: DataFrame): DataFrame

  protected def readCleanedData(spark: SparkSession): DataFrame = {
    val cleanDataPath = config.getString("hadoop.clean.data")
    logger.info(s"读取清洗后的数据路径: $cleanDataPath")
    try {
      spark.read
        .option("header", "true")
        .option("sep", ",")
        .option("inferSchema", "false")
        .csv(cleanDataPath)
    } catch {
      case e: Exception =>
        logger.error(s"读取清洗后的数据失败，路径: $cleanDataPath", e)
        throw e
    }
  }

  // 修改保存方法，移除表创建逻辑
  protected def saveResult(result: DataFrame, tableName: String, spark: SparkSession): Unit = {
    val csvOutputPath = s"${config.getString("hadoop.analysis.result")}/$tableName"
    try {
      result.write
        .mode("overwrite")
        .option("header", "true")
        .csv(csvOutputPath)
      logger.info(s"结果已保存至CSV: $csvOutputPath")
    } catch {
      case e: Exception =>
        logger.error(s"保存CSV结果失败: ${e.getMessage}", e)
    }
    try {
      val localHost = InetAddress.getLocalHost
      logger.info(s"写入MySQL的主机信息: ${localHost.getHostName}(${localHost.getHostAddress})")
      if (MySQLHandler.testConnection()) {
        MySQLHandler.writeToMySQL(result, tableName, "overwrite")
        logger.info(s"结果已写入MySQL表: $tableName")
      }
    } catch {
      case e: Exception =>
        logger.error(s"写入MySQL表 $tableName 失败: ${e.getMessage}", e)
    }
  }

  def performSave(result: DataFrame, tableName: String, spark: SparkSession): Unit = {
    saveResult(result, tableName, spark)
  }

  protected def convertToTimestamp(df: DataFrame, dateCol: String, format: String): DataFrame = {
    df.withColumn(dateCol, to_timestamp(col(dateCol), format))
  }

  // 通用的SQL构建方法
  protected def buildCreateTableSql(df: DataFrame, tableName: String, primaryKey: String): String = {
    val columns = df.schema.fields.map { field =>
      val fieldName = field.name
      val mysqlType = field.dataType.simpleString match {
        case "integer" => "INT"
        case "long" => "BIGINT"
        case "double" => "DOUBLE"
        case "float" => "FLOAT"
        case "string" => "VARCHAR(255)"
        case "boolean" => "TINYINT(1)"
        case "timestamp" => "DATETIME"
        case "date" => "DATE"
        case _ => "VARCHAR(255)"
      }
      s"`$fieldName` $mysqlType"
    }.mkString(", ")

    val primaryKeyClause = if (primaryKey.nonEmpty && df.schema.fieldNames.contains(primaryKey)) {
      s", PRIMARY KEY (`$primaryKey`)"
    } else {
      ""
    }

    s"CREATE TABLE `$tableName` ($columns$primaryKeyClause)"
  }

  // 通用的表创建方法
  protected def createTableIfNotExists(df: DataFrame, tableName: String, primaryKey: String): Unit = {
    if (MySQLHandler.tableExists(tableName)) {
      logger.info(s"表[$tableName]已存在，无需创建")
      return
    }

    logger.info(s"开始创建表[$tableName]")
    var connection: java.sql.Connection = null
    try {
      connection = MySQLHandler.getConnection()
      val createSql = buildCreateTableSql(df, tableName, primaryKey)
      logger.info(s"创建表SQL: $createSql")
      MySQLHandler.createTable(connection, createSql)
      logger.info(s"表[$tableName]创建成功")
    } catch {
      case e: Exception =>
        logger.error(s"创建表[$tableName]失败", e)
        throw new RuntimeException(s"创建表[$tableName]失败", e)
    } finally {
      if (connection != null) connection.close()
    }
  }
}