#coding:utf8

#导包
from pyspark.sql import SparkSession
from pyspark.sql.functions import monotonically_increasing_id
from pyspark.sql.types import StructType,StructField,IntegerType,StringType,FloatType
from pyspark.sql.functions import count,mean,col,sum,when,max,min,avg,explode,split,row_number,year,month
from pyspark.sql.window import Window


if __name__ == '__main__':
    #构建
    spark = SparkSession.builder.appName("sparkSQL").master("local[*]"). \
        config("spark.sql.shuffle.partitions", 2). \
        config("spark.sql.warehouse.dir", "hdfs://master:8020/user/hive/warehouse"). \
        config("hive.metastore.uris", "thrift://master:9083"). \
        enableHiveSupport(). \
        getOrCreate()


    #读取
    dian_ying_shu_ju = spark.read.table("dian_ying_shu_ju")

    #需求一 类型分析 统计
    explode_df = dian_ying_shu_ju.withColumn("type",explode(split(col("type"),"-")))
    result1 = explode_df.groupBy("type").agg(count("type").alias("type_count"))

    # sql
    result1.write.mode("overwrite"). \
        format("jdbc"). \
        option("url", "jdbc:mysql://master:3306/bigdata?useSSL=false&useUnicode=true&charset=utf8"). \
        option("dbtable", "dian_ying_leixing_shu"). \
        option("user", "root"). \
        option("password", "qinxiao123456"). \
        option("encoding", "utf-8"). \
        save()

    result1.write.mode("overwrite").saveAsTable("dian_ying_leixing_shu", "orc")
    spark.sql("select * from dian_ying_leixing_shu").show()

    #需求二 票房TOP10
    sorted_df = dian_ying_shu_ju.orderBy(col("allBoxOffice").desc())
    result2 = sorted_df.limit(10)

    # sql
    result2.write.mode("overwrite"). \
        format("jdbc"). \
        option("url", "jdbc:mysql://master:3306/bigdata?useSSL=false&useUnicode=true&charset=utf8"). \
        option("dbtable", "piao_fang_top"). \
        option("user", "root"). \
        option("password", "qinxiao123456"). \
        option("encoding", "utf-8"). \
        save()

    result2.write.mode("overwrite").saveAsTable("piao_fang_top", "orc")
    spark.sql("select * from piao_fang_top").show()

    #需求三 类型最大票房
    result3 = explode_df.groupBy("type").agg(
        max("firstBoxOffice").alias("max_firstBoxOffice"),
        max("allBoxOffice").alias("max_allBoxOffice")
    )

    # sql
    result3.write.mode("overwrite"). \
        format("jdbc"). \
        option("url", "jdbc:mysql://master:3306/bigdata?useSSL=false&useUnicode=true&charset=utf8"). \
        option("dbtable", "leixing_zuigao_piaofang"). \
        option("user", "root"). \
        option("password", "qinxiao123456"). \
        option("encoding", "utf-8"). \
        save()

    result3.write.mode("overwrite").saveAsTable("leixing_zuigao_piaofang", "orc")
    spark.sql("select * from leixing_zuigao_piaofang").show()

    #需求四 国家统计
    explode_df2 = dian_ying_shu_ju.withColumn("country",explode(split(col("country"),",")))

    result4 = explode_df2.groupBy("country").agg(count("country").alias("country_count"))

    # sql
    result4.write.mode("overwrite"). \
        format("jdbc"). \
        option("url", "jdbc:mysql://master:3306/bigdata?useSSL=false&useUnicode=true&charset=utf8"). \
        option("dbtable", "guo_jia_dian_ying_shu"). \
        option("user", "root"). \
        option("password", "qinxiao123456"). \
        option("encoding", "utf-8"). \
        save()

    result4.write.mode("overwrite").saveAsTable("guo_jia_dian_ying_shu", "orc")
    spark.sql("select * from guo_jia_dian_ying_shu").show()

    #需求五 评分分类
    dian_ying_shu_ju = dian_ying_shu_ju.withColumn(
        "rateCategory",
        when(col("rate") == 0, None).otherwise(
            when((col("rate") >= 10) & (col("rate") < 20), "半星").
            when((col("rate") >= 20) & (col("rate") < 30), "1星").
            when((col("rate") >= 30) & (col("rate") < 40), "1.5星").
            when((col("rate") >= 40) & (col("rate") < 50), "2星").
            when((col("rate") >= 50) & (col("rate") < 60), "2.5星").
            when((col("rate") >= 60) & (col("rate") < 70), "3星").
            when((col("rate") >= 70) & (col("rate") < 80), "3.5星").
            when((col("rate") >= 80) & (col("rate") < 90), "4星").
            when((col("rate") >= 90) & (col("rate") < 100), "4.5星")
        )
    )

    filter_df = dian_ying_shu_ju.filter(col("rate") != 0)
    result5 = filter_df.groupBy("rateCategory").count()

    # sql
    result5.write.mode("overwrite"). \
        format("jdbc"). \
        option("url", "jdbc:mysql://master:3306/bigdata?useSSL=false&useUnicode=true&charset=utf8"). \
        option("dbtable", "mingxing_dengji"). \
        option("user", "root"). \
        option("password", "qinxiao123456"). \
        option("encoding", "utf-8"). \
        save()

    result5.write.mode("overwrite").saveAsTable("mingxing_dengji", "orc")
    spark.sql("select * from mingxing_dengji").show()

    #需求6 按类型票房分析
    grouped_df = explode_df.groupBy("type","title").agg({"allBoxOffice":"sum"})
    grouped_df = grouped_df.withColumnRenamed("sum(allBoxOffice)","allBoxOffice")

    window = Window.partitionBy("type").orderBy(col("allBoxOffice").desc())
    ranked_df = grouped_df.withColumn("row_num",row_number().over(window))
    #过滤出每组的前十
    result6 = ranked_df.filter(col("row_num") <= 10).drop("row_num")

    # sql
    result6.write.mode("overwrite"). \
        format("jdbc"). \
        option("url", "jdbc:mysql://master/bigdata?useSSL=false&useUnicode=true&charset=utf8"). \
        option("dbtable", "mei_leixing_piaofang"). \
        option("user", "root"). \
        option("password", "qinxiao123456"). \
        option("encoding", "utf-8"). \
        save()

    result6.write.mode("overwrite").saveAsTable("mei_leixing_piaofang", "orc")
    spark.sql("select * from mei_leixing_piaofang").show()

    #需求7 按类型评分
    filter_df2 = explode_df.filter(col("rate") != 0)
    result7 = filter_df2.groupBy("type","rate").agg(count("rate").alias("rate_count"))

    # sql
    result7.write.mode("overwrite"). \
        format("jdbc"). \
        option("url", "jdbc:mysql://master/bigdata?useSSL=false&useUnicode=true&charset=utf8"). \
        option("dbtable", "mei_leixing_pingfen"). \
        option("user", "root"). \
        option("password", "qinxiao123456"). \
        option("encoding", "utf-8"). \
        save()

    result7.write.mode("overwrite").saveAsTable("mei_leixing_pingfen", "orc")
    spark.sql("select * from mei_leixing_pingfen").show()

    #需求8类型平均时长
    result8 = explode_df.groupBy("type").agg(avg("duration").alias("avg_duration"))

    # sql
    result8.write.mode("overwrite"). \
        format("jdbc"). \
        option("url", "jdbc:mysql://master/bigdata?useSSL=false&useUnicode=true&charset=utf8"). \
        option("dbtable", "leixing_pingjun_shichang"). \
        option("user", "root"). \
        option("password", "qinxiao123456"). \
        option("encoding", "utf-8"). \
        save()

    result8.write.mode("overwrite").saveAsTable("leixing_pingjun_shichang", "orc")
    spark.sql("select * from leixing_pingjun_shichang").show()

    #需求9 年度平均评分分析
    time_df1 = dian_ying_shu_ju.withColumn("releaseTime",col("releaseTime").cast("date"))
    time_df2 = dian_ying_shu_ju.withColumn("year",year(col("releaseTime")))
    result9 = time_df2.groupBy("year").agg(avg("rate").alias("year_rate"))

    # sql
    result9.write.mode("overwrite"). \
        format("jdbc"). \
        option("url", "jdbc:mysql://master/bigdata?useSSL=false&useUnicode=true&charset=utf8"). \
        option("dbtable", "nianfen_pingfen"). \
        option("user", "root"). \
        option("password", "qinxiao123456"). \
        option("encoding", "utf-8"). \
        save()

    result9.write.mode("overwrite").saveAsTable("nianfen_pingfen", "orc")
    spark.sql("select * from nianfen_pingfen").show()

    #需求十 国家平均
    result10 = explode_df2.groupBy("country").agg(avg("rate").alias("avg_rate"))

    # sql
    result10.write.mode("overwrite"). \
        format("jdbc"). \
        option("url", "jdbc:mysql://master/bigdata?useSSL=false&useUnicode=true&charset=utf8"). \
        option("dbtable", "guo_jia_pingfen"). \
        option("user", "root"). \
        option("password", "qinxiao123456"). \
        option("encoding", "utf-8"). \
        save()

    result10.write.mode("overwrite").saveAsTable("guo_jia_pingfen", "orc")
    spark.sql("select * from guo_jia_pingfen").show()

    #需求十一 类型
    result11 = explode_df.groupBy("type").agg(avg("rate").alias("avg_rate"))

    # sql
    result11.write.mode("overwrite"). \
        format("jdbc"). \
        option("url", "jdbc:mysql://master/bigdata?useSSL=false&useUnicode=true&charset=utf8"). \
        option("dbtable", "leixing_pingfen"). \
        option("user", "root"). \
        option("password", "qinxiao123456"). \
        option("encoding", "utf-8"). \
        save()

    result11.write.mode("overwrite").saveAsTable("leixing_pingfen", "orc")
    spark.sql("select * from leixing_pingfen").show()

    #需求十二 时间分析
    result12 = time_df2.groupBy("year").agg(count("year").alias("year_count"))

    # sql
    result12.write.mode("overwrite"). \
        format("jdbc"). \
        option("url", "jdbc:mysql://master/bigdata?useSSL=false&useUnicode=true&charset=utf8"). \
        option("dbtable", "nianfen_dianying_shu"). \
        option("user", "root"). \
        option("password", "qinxiao123456"). \
        option("encoding", "utf-8"). \
        save()

    result12.write.mode("overwrite").saveAsTable("nianfen_dianying_shu", "orc")
    spark.sql("select * from nianfen_dianying_shu").show()

    #月度
    time_df3 = time_df1.withColumn("month",month(col("releaseTime")))
    result13 = time_df3.groupBy("month").agg(count("month").alias("year_count"))

    # sql
    result13.write.mode("overwrite"). \
        format("jdbc"). \
        option("url", "jdbc:mysql://master/bigdata?useSSL=false&useUnicode=true&charset=utf8"). \
        option("dbtable", "yue_fen_dian_ying_shu"). \
        option("user", "root"). \
        option("password", "qinxiao123456"). \
        option("encoding", "utf-8"). \
        save()

    result13.write.mode("overwrite").saveAsTable("yue_fen_dian_ying_shu", "orc")
    spark.sql("select * from yue_fen_dian_ying_shu").show()

    #时长分析
    dian_ying_shu_ju = dian_ying_shu_ju.withColumn(
        "durationCategory",
        when(col("duration") == 0, None).otherwise(
            when((col("duration") >= 0) & (col("rate") < 50), "很短").
            when((col("duration") >= 50) & (col("rate") < 80), "较短").
            when((col("duration") >= 80) & (col("rate") < 120), "中").
            when((col("duration") >= 120) & (col("rate") < 150), "较长").
            otherwise('很长')
        )
    )

    filter_df4 = dian_ying_shu_ju.filter(col("duration") != 0)
    result14 = filter_df4.groupBy("durationCategory").count()

    # sql
    result14.write.mode("overwrite"). \
        format("jdbc"). \
        option("url", "jdbc:mysql://master/bigdata?useSSL=false&useUnicode=true&charset=utf8"). \
        option("dbtable", "shichang_fen_lei"). \
        option("user", "root"). \
        option("password", "qinxiao123456"). \
        option("encoding", "utf-8"). \
        save()

    result14.write.mode("overwrite").saveAsTable("shichang_fen_lei", "orc")
    spark.sql("select * from shichang_fen_lei").show()

    #需求15 各类型平均票房
    result15 = explode_df.groupBy("type").agg(
        avg("allBoxOffice").alias("avg_allBoxOffice")
    )

    # sql
    result15.write.mode("overwrite"). \
        format("jdbc"). \
        option("url", "jdbc:mysql://master/bigdata?useSSL=false&useUnicode=true&charset=utf8"). \
        option("dbtable", "leixing_pingjun_piaofang"). \
        option("user", "root"). \
        option("password", "qinxiao123456"). \
        option("encoding", "utf-8"). \
        save()

    result15.write.mode("overwrite").saveAsTable("leixing_pingjun_piaofang", "orc")
    spark.sql("select * from leixing_pingjun_piaofang").show()

    #各国家票房
    result16 = explode_df2.groupBy("country").agg(
        avg("allBoxOffice").alias("avg_allBoxOffice")
    )

    # sql
    result16.write.mode("overwrite"). \
        format("jdbc"). \
        option("url", "jdbc:mysql://master/bigdata?useSSL=false&useUnicode=true&charset=utf8"). \
        option("dbtable", "guo_jia_ping_jun_piao_fang"). \
        option("user", "root"). \
        option("password", "qinxiao123456"). \
        option("encoding", "utf-8"). \
        save()

    result16.write.mode("overwrite").saveAsTable("guo_jia_ping_jun_piao_fang", "orc")
    spark.sql("select * from guo_jia_ping_jun_piao_fang").show()

    #年度平均票房
    result17 = time_df2.groupBy("year").agg(
        avg("allBoxOffice").alias("avg_allBoxOffice")
    )

    # sql
    result17.write.mode("overwrite"). \
        format("jdbc"). \
        option("url", "jdbc:mysql://master/bigdata?useSSL=false&useUnicode=true&charset=utf8"). \
        option("dbtable", "nianfen_pingjun_piaofang"). \
        option("user", "root"). \
        option("password", "qinxiao123456"). \
        option("encoding", "utf-8"). \
        save()

    result17.write.mode("overwrite").saveAsTable("nianfen_pingjun_piaofang", "orc")
    spark.sql("select * from nianfen_pingjun_piaofang").show()