from pyspark.sql import SparkSession
from pyspark.sql.functions import col

from SparkSessionBase import SparkSessionBase  # 假设这是你自己的封装类


# 继承 SparkSessionBase 类
class UserApi(SparkSessionBase):
    SPARK_URL = "yarn"
    SPARK_APP_NAME = 'UserApi'
    ENABLE_HIVE_SUPPORT = True

    def __init__(self):
        self.spark = self._create_spark_session()
        self.spark.sparkContext.setLogLevel("ERROR")  # 设置日志级别为 ERROR，减少日志

    def get_user_info(self, user_id):
        # 从 users 表中获取用户信息
        users_df = self.spark.table('users')
        user_info = users_df.filter(col("user_id") == user_id).select("*")

        return user_info

    def run(self, user_id):
        user_info = self.get_user_info(user_id)
        if not user_info.rdd.isEmpty():
            # user_info.show(truncate=False)  # 显示用户信息
            print("user success")
            return user_info
        else:
            print(f"No user found with ID: {user_id}")
            return None


if __name__ == '__main__':
    # 示例调用
    job = UserApi()
    job.run(user_id="9jh7Q3keoOkdASlpECJ-ig")