from pyspark.sql.session import SparkSession
from pyspark.sql.functions import *
spark= SparkSession.builder.master('local').appName('demo1_sparksession').getOrCreate()

#读取数据，返回DataFrame
#DF时对RDD的封装，在RDD的基础上增加表的结构
student_df = (spark.read.format('csv').option('sep', ',').schema(
    'id string, name string, age int, sex string, clazz string').load('../../data/students.txt'))

# student_df.show()
#可以将DataFrame转换为RDD
student_rdd = student_df.rdd

#1，使用sql处理数据
#注册试图
student_df.createOrReplaceTempView('students')

spark.sql('select * from students')
#使用sql处理数据，返回一个新的DF
spark.sql(
    'select clazz,count(1) as num from students group by clazz'
).show()


#2,使用DSL处理数据
(
    student_df
    .groupby('clazz')
    .agg(count('clazz').alias('num'))
    .show()
)
