package org.example

import org.apache.spark.sql.SparkSession
import java.io.File

/**
 * Spark读取Flink创建的Hudi表示例
 * 需要Hudi Spark运行时依赖：org.apache.hudi:hudi-spark3.2-bundle_2.12
 */
object SparkHudiReader {
    def main(args: Array[String]): Unit = {
        // 初始化SparkSession
        val spark = SparkSession.builder()
            .appName("Hudi Spark Reader")
            .master("local[1]")  // 本地模式运行
            .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
            .config("spark.sql.extensions", "org.apache.spark.sql.hudi.HoodieSparkSessionExtension")
            .config("spark.sql.catalog.spark_catalog", "org.apache.spark.sql.hudi.catalog.HoodieCatalog")
            .config("spark.sql.hive.convertMetastoreParquet", "false")
            .config("spark.hadoop.hive.metastore.uris", "") // 防止尝试连接Hive Metastore
            .getOrCreate()

        // 设置日志级别为WARN以减少日志输出
        spark.sparkContext.setLogLevel("WARN")
        
        println("显示所有数据库：")
        spark.sql("show databases;").show

        val tablePath = "/tmp/hudi/default/user_behavior"

        println("将Hudi表注册为临时表后查询:")
        spark.sql(
            s"""
            |CREATE TEMPORARY VIEW user_behavior_view
            |USING hudi
            |OPTIONS (
            |  path '$tablePath'
            |)
            |""".stripMargin)
        
        println("查询临时表:")
        spark.sql("SELECT * FROM user_behavior_view").show(false)

        println("使用DataFrame API读取Hudi表:")
        val hudiDF = spark.read.format("hudi").load(tablePath)
        hudiDF.show(false)
        
        // 获取表的元数据信息
        println("表的元数据信息:")
        hudiDF.printSchema()
    }
} 