from pyspark.sql import SparkSession, functions as F

if __name__ == '__main__':
    spark = SparkSession.builder. \
        appName("test"). \
        master("local[*]"). \
        getOrCreate()

    sc = spark.sparkContext

    # TODO 1：SQL 风格进行处理
    rdd = sc.textFile("../data/input/words.txt").flatMap(lambda x: x.split(" ")).map(lambda x: [x])

    df = rdd.toDF(["word"])

    df.createOrReplaceTempView("words")

    spark.sql("select word, count(*) as cnt from words group by word order by cnt desc").show()

    # TODO 2: DSL 风格处理
    df = spark.read.format("text").load("../data/input/words.txt")
    # withColumn
    df2 = df.withColumn("value", F.explode(F.split(df["value"], " ")))
    df2.show()

    df2.groupBy("value").count() \
        .withColumnRenamed("value", "word") \
        .withColumnRenamed("count", "cnt").orderBy("cnt", assending=False) \
        .show()
