package com.orderstats.spark

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions._
import org.apache.spark.sql.streaming.Trigger
import org.apache.spark.sql.types._
import java.util.Properties
import java.sql.{Connection, DriverManager, PreparedStatement}

object RealTimeOrderConsumer {
  
  case class OrderData(
    category: String,
    product: String,
    user: String,
    quantity: Int,
    orderTime: String,
    rating: Double,
    isValid: String
  )
  
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("RealTimeOrderConsumer")
      .master("local[*]")
      .config("spark.sql.adaptive.enabled", "true")
      .config("spark.sql.adaptive.coalescePartitions.enabled", "true")
      .getOrCreate()
    
    import spark.implicits._
    
    // 定义订单数据Schema
    val orderSchema = StructType(Array(
      StructField("category", StringType, true),
      StructField("product", StringType, true),
      StructField("user", StringType, true),
      StructField("quantity", IntegerType, true),
      StructField("orderTime", StringType, true),
      StructField("rating", DoubleType, true),
      StructField("isValid", StringType, true)
    ))
    
    // 从Kafka读取数据流
    val kafkaDF = spark
      .readStream
      .format("kafka")
      .option("kafka.bootstrap.servers", "localhost:9092")
      .option("subscribe", "orders")
      .option("startingOffsets", "latest")
      .load()
    
    // 解析CSV数据
    val orderDF = kafkaDF
      .select(from_csv(col("value").cast("string"), orderSchema, Map.empty[String, String]).as("data"))
      .select("data.*")
      .withColumn("timestamp", current_timestamp())
    
    // 打印接收到的数据
    val consoleQuery = orderDF.writeStream
      .outputMode("append")
      .format("console")
      .option("truncate", false)
      .trigger(Trigger.ProcessingTime("5 seconds"))
      .start()
    
    // 实时统计分析
    val statsQuery = orderDF
      .filter(col("isValid") === "Y")
      .groupBy(
        window(col("timestamp"), "30 seconds", "10 seconds"),
        col("category")
      )
      .agg(
        count("*").as("order_count"),
        sum("quantity").as("total_quantity"),
        avg("rating").as("avg_rating")
      )
      .writeStream
      .outputMode("update")
      .foreachBatch { (batchDF: org.apache.spark.sql.Dataset[org.apache.spark.sql.Row], batchId: Long) =>
        println(s"=== 批次 $batchId 统计结果 ===")
        batchDF.show()
        
        // 将统计结果写入数据库
        batchDF.collect().foreach { row =>
          val category = row.getAs[String]("category")
          val orderCount = row.getAs[Long]("order_count")
          val totalQuantity = row.getAs[Long]("total_quantity")
          val avgRating = row.getAs[Double]("avg_rating")
          
          updateDatabase(category, orderCount, totalQuantity, avgRating)
        }
      }
      .trigger(Trigger.ProcessingTime("10 seconds"))
      .start()
    
    // 保存原始订单到数据库
    val saveQuery = orderDF.writeStream
      .foreachBatch { (batchDF: org.apache.spark.sql.Dataset[org.apache.spark.sql.Row], batchId: Long) =>
        batchDF.collect().foreach { row =>
          val order = OrderData(
            row.getAs[String]("category"),
            row.getAs[String]("product"),
            row.getAs[String]("user"),
            row.getAs[Int]("quantity"),
            row.getAs[String]("orderTime"),
            row.getAs[Double]("rating"),
            row.getAs[String]("isValid")
          )
          saveOrderToDatabase(order)
        }
      }
      .trigger(Trigger.ProcessingTime("5 seconds"))
      .start()
    
    println("🚀 Spark实时数据消费者已启动")
    println("📊 正在处理来自Kafka的订单数据...")
    println("💾 数据将实时更新到数据库")
    
    // 等待所有查询完成
    spark.streams.awaitAnyTermination()
  }
  
  def getConnection(): Connection = {
    val url = "jdbc:sqlite:/var/lib/tomcat9/webapps/ROOT/orders_system.db"
    DriverManager.getConnection(url)
  }
  
  def saveOrderToDatabase(order: OrderData): Unit = {
    var conn: Connection = null
    var stmt: PreparedStatement = null
    
    try {
      conn = getConnection()
      val sql = """
        INSERT INTO orders (category, product_name, user_id, quantity, order_date, rating, is_valid)
        VALUES (?, ?, ?, ?, ?, ?, ?)
      """
      stmt = conn.prepareStatement(sql)
      stmt.setString(1, order.category)
      stmt.setString(2, order.product)
      stmt.setString(3, order.user)
      stmt.setInt(4, order.quantity)
      stmt.setString(5, order.orderTime)
      stmt.setDouble(6, order.rating)
      stmt.setString(7, order.isValid)
      
      stmt.executeUpdate()
      
    } catch {
      case e: Exception =>
        println(s"保存订单失败: ${e.getMessage}")
    } finally {
      if (stmt != null) stmt.close()
      if (conn != null) conn.close()
    }
  }
  
  def updateDatabase(category: String, orderCount: Long, totalQuantity: Long, avgRating: Double): Unit = {
    var conn: Connection = null
    var stmt: PreparedStatement = null
    
    try {
      conn = getConnection()
      
      // 更新类别统计
      val sql = """
        INSERT OR REPLACE INTO category_order_stats (category, order_count, total_quantity, avg_rating, last_updated)
        VALUES (?, ?, ?, ?, datetime('now'))
      """
      stmt = conn.prepareStatement(sql)
      stmt.setString(1, category)
      stmt.setLong(2, orderCount)
      stmt.setLong(3, totalQuantity)
      stmt.setDouble(4, avgRating)
      
      stmt.executeUpdate()
      
      println(s"✅ 更新类别统计: $category - 订单数:$orderCount, 总量:$totalQuantity, 评分:${f"$avgRating%.1f"}")
      
    } catch {
      case e: Exception =>
        println(s"更新统计失败: ${e.getMessage}")
    } finally {
      if (stmt != null) stmt.close()
      if (conn != null) conn.close()
    }
  }
} 