# -*- coding:utf-8 -*-
# @Author: shenyuyu
# @Time: 2023/6/28 18:01
# @File: h_2_1.py
from pyspark.sql import SparkSession
from pyspark.sql.types import StructType, StringType, IntegerType

if __name__ == '__main__':
    spark = SparkSession.builder.appName("a").master("local[*]").getOrCreate()
    sc = spark.sparkContext
    rdd = sc.textFile("file:///tmp/pycharm_project_161/data/sql/u.data")
    rdd1 = rdd.map(lambda x: x.split("\t")).map(lambda x: [x[0], x[1], x[2], x[3]])
    df = rdd1.toDF(["user", "movie", "score", "time"])
    df.createTempView("user_score")

    # todo 查询用户平均分
    # spark.sql("select user, avg(score) as score_avg from user_score group by user order by score_avg desc").show()
    # todo 查询电影平均分
    # spark.sql("select movie, avg(score) as score_avg from user_score group by movie").show()
    # todo 查询大于平均分的电影的数量
    # spark.sql("select count(*) from user_score where score > (select round(avg(score), 2) as score_avg from user_score)").show()
    # todo 查询高分电影中(>3)打分次数最多的用户, 此人打分的平均分
    # spark.sql("select avg(score) from user_score where user = (select user from user_score where score > 3 group by user order by count(*) desc limit 1)").show()
    # todo 查询每个用户的平局打分, 最低打分, 最高打分
    # spark.sql("select user, avg(score) score_avg, min(score) score_min, max(score) score_max from user_score group by user").show(n=200)
    # todo 查询评分超过100次的电影, 的平均分 排名 TOP10
    # spark.sql("select movie, avg(score) score_avg from user_score group by movie having movie in (select movie from user_score group by movie having count(*) > 100) order by score_avg desc").show()