from pyspark.sql import SparkSession
from pyspark.sql.functions import col, expr, concat, countDistinct, to_timestamp, year, min, max, month, avg, \
    weekofyear, count
from pyspark.sql.types import StructType, ArrayType, StringType, StructField, IntegerType, FloatType, BooleanType
from pyspark import find_spark_home

# Driver
spark = SparkSession \
    .builder \
    .master('local') \
    .appName('Hello Spark') \
    .getOrCreate()
print(find_spark_home._find_spark_home())

review_schema = StructType([StructField("business_id", StringType(), True),
                            StructField("cool", IntegerType(), True),
                            StructField("date", StringType(), True),
                            StructField("funny", IntegerType(), True),
                            StructField("review_id", StringType(), True),
                            StructField("stars", FloatType(), True),
                            StructField("text", StringType(), True),
                            StructField("useful", IntegerType(), True),
                            StructField("user_id", StringType(), True),

                            ]
                           )

review_df = spark.read.schema(review_schema).option("header", True).json("../dataset/review.json")

review_df.show(truncate=False)
# 编程
review_df.select("date")\
    .groupby(year("date").alias("year"))\
    .count()\
    .show(truncate=False)

# sql
review_df.createTempView("review")
spark.sql("""
select year(date) year,count(*) count
from review
group by year(date)
""").show(truncate=False)
