from pyspark.sql import SparkSession
from pyspark.sql.functions import monotonically_increasing_id
from pyspark.sql.types import StructType,StructField,StringType,IntegerType,FloatType
from pyspark.sql.functions import count,mean,stddev,min,max,col,sum,when,avg
from pyspark.sql import functions as F

if __name__ == '__main__':
    spark = SparkSession.builder \
        .appName("sparkSQL") \
        .master("local[*]") \
        .config("spark.sql.shuffle.partitions", 2) \
        .config("spark.sql.warehouse.dir", "hdfs://192.168.88.161:8020/user/hive/warehouse") \
        .config("hive.metastore.uris", "thrift://192.168.88.161:9083") \
        .config("spark.jars", "/tmp/pycharm_project_88/spark/mysql-connector-java-5.1.32.jar") \
        .enableHiveSupport() \
        .getOrCreate()
    sc = spark.sparkContext

    fooddata = spark.read.table("fooddata")

    # 价格top10评分
    top_ten_price = fooddata.orderBy(fooddata.avgPrice.desc()).limit(10)
    result1 = top_ten_price.select("title","start","avgPrice")

    df = result1.toPandas()

    try:
        result1.write.mode("overwrite"). \
            format("jdbc"). \
            option("url", "jdbc:mysql://192.168.88.161:3306/bigdata?useSSL=false&useUnicode=true&charset=utf8mb4"). \
            option("dbtable", "maxPriceTop"). \
            option("user", "root"). \
            option("password", "123456"). \
            option("driver", "com.mysql.jdbc.Driver"). \
            save()

        result1.write.mode("overwrite").saveAsTable("maxPriceTop","parquet")
        spark.sql("select * from maxPriceTop").show()
    except Exception as e:
        print("Error:", e)
        raise  # 重新抛出异常以查看完整堆栈信息

    # totalType
    result2 = fooddata.groupby("totalType").count()

    try:
        result2.write.mode("overwrite"). \
            format("jdbc"). \
            option("url", "jdbc:mysql://192.168.88.161:3306/bigdata?useSSL=false&useUnicode=true&charset=utf8mb4"). \
            option("dbtable", "typeCount"). \
            option("user", "root"). \
            option("password", "123456"). \
            option("driver", "com.mysql.jdbc.Driver"). \
            save()

        result2.write.mode("overwrite").saveAsTable("typeCount","parquet")
        spark.sql("select * from typeCount").show()
    except Exception as e:
        print("Error:", e)
        raise  # 重新抛出异常以查看完整堆栈信息

    # 城市均价
    result3 = fooddata.groupby("city").agg(F.avg("avgPrice").alias("averagePrice"))

    try:
        result3.write.mode("overwrite"). \
            format("jdbc"). \
            option("url", "jdbc:mysql://192.168.88.161:3306/bigdata?useSSL=false&useUnicode=true&charset=utf8mb4"). \
            option("dbtable", "cityAvg"). \
            option("user", "root"). \
            option("password", "123456"). \
            option("driver", "com.mysql.jdbc.Driver"). \
            save()

        result3.write.mode("overwrite").saveAsTable("cityAvg","parquet")
        spark.sql("select * from cityAvg").show()
    except Exception as e:
        print("Error:", e)
        raise  # 重新抛出异常以查看完整堆栈信息

    # 类型分析
    result4 = fooddata.groupby("totalType").agg(avg("totalComment").alias("commentAvg"))

    try:
        result4.write.mode("overwrite"). \
            format("jdbc"). \
            option("url", "jdbc:mysql://192.168.88.161:3306/bigdata?useSSL=false&useUnicode=true&charset=utf8mb4"). \
            option("dbtable", "typeComment"). \
            option("user", "root"). \
            option("password", "123456"). \
            option("driver", "com.mysql.jdbc.Driver"). \
            save()

        result4.write.mode("overwrite").saveAsTable("typeComment", "parquet")
        spark.sql("select * from typeComment").show()
    except Exception as e:
        print("Error:", e)
        raise  # 重新抛出异常以查看完整堆栈信息

    # 评分
    result5 = fooddata.groupby("totalType").agg(
        avg("tasterate").alias("avgTasterate"),
        avg("envsrate").alias("avgEnvsrate"),
        avg("serverate").alias("avgServerate"),
    )

    try:
        result5.write.mode("overwrite"). \
            format("jdbc"). \
            option("url", "jdbc:mysql://192.168.88.161:3306/bigdata?useSSL=false&useUnicode=true&charset=utf8mb4"). \
            option("dbtable", "typeRate"). \
            option("user", "root"). \
            option("password", "123456"). \
            option("driver", "com.mysql.jdbc.Driver"). \
            save()

        result5.write.mode("overwrite").saveAsTable("typeRate", "parquet")
        spark.sql("select * from typeRate").show()
    except Exception as e:
        print("Error:", e)
        raise  # 重新抛出异常以查看完整堆栈信息

    # 精确类型
    result6 = fooddata.groupby("type").count()

    try:
        result6.write.mode("overwrite"). \
            format("jdbc"). \
            option("url", "jdbc:mysql://192.168.88.161:3306/bigdata?useSSL=false&useUnicode=true&charset=utf8mb4"). \
            option("dbtable", "specificType"). \
            option("user", "root"). \
            option("password", "123456"). \
            option("driver", "com.mysql.jdbc.Driver"). \
            save()

        result6.write.mode("overwrite").saveAsTable("specificType", "parquet")
        spark.sql("select * from specificType").show()
    except Exception as e:
        print("Error:", e)
        raise  # 重新抛出异常以查看完整堆栈信息

    # 价格分析
    result7 = fooddata.groupby("city").agg(max("avgPrice").alias("maxAvgPrice"))

    try:
        result7.write.mode("overwrite"). \
            format("jdbc"). \
            option("url", "jdbc:mysql://192.168.88.161:3306/bigdata?useSSL=false&useUnicode=true&charset=utf8mb4"). \
            option("dbtable", "maxPriceCity"). \
            option("user", "root"). \
            option("password", "123456"). \
            option("driver", "com.mysql.jdbc.Driver"). \
            save()

        result7.write.mode("overwrite").saveAsTable("maxPriceCity", "parquet")
        spark.sql("select * from maxPriceCity").show()
    except Exception as e:
        print("Error:", e)
        raise  # 重新抛出异常以查看完整堆栈信息

    # 价格分类
    fooddata_with_category = fooddata.withColumn(
        "priceCategory",
        when(col("avgPrice").between(0, 15), "0 - 15元")
        .when(col("avgPrice").between(15, 50), "15 - 50元")
        .when(col("avgPrice").between(50, 100), "50 - 100元")
        .when(col("avgPrice").between(100, 200), "100 - 200元")
        .when(col("avgPrice").between(200, 500), "200 - 500元")
        .otherwise("500以上")
    )

    result8 = fooddata_with_category.groupby("priceCategory").count()

    try:
        result8.write.mode("overwrite"). \
            format("jdbc"). \
            option("url", "jdbc:mysql://192.168.88.161:3306/bigdata?useSSL=false&useUnicode=true&charset=utf8mb4"). \
            option("dbtable", "categoryPrice"). \
            option("user", "root"). \
            option("password", "123456"). \
            option("driver", "com.mysql.jdbc.Driver"). \
            save()

        result8.write.mode("overwrite").saveAsTable("categoryPrice", "parquet")
        spark.sql("select * from categoryPrice").show()
    except Exception as e:
        print("Error:", e)
        raise  # 重新抛出异常以查看完整堆栈信息

    # 类型均价
    result9 = fooddata.groupby("totalType").agg(avg("avgPrice").alias("allAvgPrice"))

    try:
        result9.write.mode("overwrite"). \
            format("jdbc"). \
            option("url", "jdbc:mysql://192.168.88.161:3306/bigdata?useSSL=false&useUnicode=true&charset=utf8mb4"). \
            option("dbtable", "typePrice"). \
            option("user", "root"). \
            option("password", "123456"). \
            option("driver", "com.mysql.jdbc.Driver"). \
            save()

        result9.write.mode("overwrite").saveAsTable("typePrice", "parquet")
        spark.sql("select * from typePrice").show()
    except Exception as e:
        print("Error:", e)
        raise  # 重新抛出异常以查看完整堆栈信息

    # 星级分布
    reslt10 = fooddata.groupby("start").count()

    try:
        reslt10.write.mode("overwrite"). \
            format("jdbc"). \
            option("url", "jdbc:mysql://192.168.88.161:3306/bigdata?useSSL=false&useUnicode=true&charset=utf8mb4"). \
            option("dbtable", "startCount"). \
            option("user", "root"). \
            option("password", "123456"). \
            option("driver", "com.mysql.jdbc.Driver"). \
            save()

        reslt10.write.mode("overwrite").saveAsTable("startCount", "parquet")
        spark.sql("select * from startCount").show()
    except Exception as e:
        print("Error:", e)
        raise  # 重新抛出异常以查看完整堆栈信息


    fooddata_with_mixrate = fooddata.withColumn(
        "mixrate",
        col("tasterate") + col("envsrate") + col("serverate")
    )

    # 计算每个城市的 mixrate 平均值
    result11 = fooddata_with_mixrate.groupby("city").agg(avg("mixrate").alias("avgMixrate"))

    try:
        result11.write.mode("overwrite"). \
            format("jdbc"). \
            option("url", "jdbc:mysql://192.168.88.161:3306/bigdata?useSSL=false&useUnicode=true&charset=utf8mb4"). \
            option("dbtable", "mixrateAvg"). \
            option("user", "root"). \
            option("password", "123456"). \
            option("driver", "com.mysql.jdbc.Driver"). \
            save()

        result11.write.mode("overwrite").saveAsTable("mixrateAvg", "parquet")
        spark.sql("select * from mixrateAvg").show()
    except Exception as e:
        print("Error:", e)
        raise  # 重新抛出异常以查看完整堆栈信息

    # 价格最大最小
    result12 = fooddata.groupby("city").agg(
        max("avgPrice").alias("maxAvfPrice"),
        avg("avgPrice").alias("avgAvfPrice"),
        min("avgPrice").alias("minAvfPrice"),
    )

    try:
        result12.write.mode("overwrite"). \
            format("jdbc"). \
            option("url", "jdbc:mysql://192.168.88.161:3306/bigdata?useSSL=false&useUnicode=true&charset=utf8mb4"). \
            option("dbtable", "mamCity"). \
            option("user", "root"). \
            option("password", "123456"). \
            option("driver", "com.mysql.jdbc.Driver"). \
            save()

        result12.write.mode("overwrite").saveAsTable("mamCity", "parquet")
        spark.sql("select * from mamCity").show()
    except Exception as e:
        print("Error:", e)
        raise  # 重新抛出异常以查看完整堆栈信息

    total_comments_df = fooddata.groupby("address").agg(
        sum("totalComment").alias("sumTotalComment")
    )

    result13 = total_comments_df.orderBy(col("sumTotalComment").desc()).limit(10)
    try:
        result13.write.mode("overwrite"). \
            format("jdbc"). \
            option("url", "jdbc:mysql://192.168.88.161:3306/bigdata?useSSL=false&useUnicode=true&charset=utf8mb4"). \
            option("dbtable", "hotAddress"). \
            option("user", "root"). \
            option("password", "123456"). \
            option("driver", "com.mysql.jdbc.Driver"). \
            save()

        result13.write.mode("overwrite").saveAsTable("hotAddress", "parquet")
        spark.sql("select * from hotAddress").show()
    except Exception as e:
        print("Error:", e)
        raise  # 重新抛出异常以查看完整堆栈信息