package com.xunqi.spark;

import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import scala.Tuple2;

import java.util.Arrays;

import static org.apache.spark.sql.functions.*;

public class WordCount {
    public static void main(String[] args) {
        // 初始化SparkSession
        SparkSession spark = SparkSession.builder().appName("WordCount").master("local").getOrCreate();

        // 读取文本文件
        Dataset<Row> textData = spark.read().text("E://Projects//data-heart-mirror//data-mirror//txtfile//wikiOfSpark.txt");

        // 使用RDD方式实现词频统计
        JavaRDD<Row> rdd = textData.javaRDD();
        System.out.println("RDD方式结果：");
        rdd.flatMap(line -> Arrays.asList(line.toString().split(" ")).iterator())
                .mapToPair(word -> new Tuple2<>(word, 1))
                .reduceByKey(Integer::sum)
                .mapToPair(Tuple2::swap)
                .sortByKey(false)
                .mapToPair(Tuple2::swap)
                .zipWithIndex()
                .take(5).forEach(tuple ->
                        System.out.println(tuple._1() + ": " + tuple._2()));


        // 使用DSL方式实现词频统计
        Dataset<Row> wordCounts = textData
                .select(explode(split(col("value"), " ")).as("word"))
                .filter(col("word").notEqual(""))
                .groupBy("word")
                .count()
                .orderBy(col("count").desc());

        System.out.println("DSL方式结果：");
        wordCounts.limit(5).show();

        // 使用SQL方式实现词频统计
        textData.createOrReplaceTempView("text_data");

        Dataset<Row> sqlWordCounts = spark.sql(
                "SELECT word, COUNT(*) as count " +
                        "FROM (SELECT explode(split(value, ' ')) as word FROM text_data) " +
                        "WHERE word != '' " +
                        "GROUP BY word " +
                        "ORDER BY count DESC " +
                        "LIMIT 5"
        );

        System.out.println("SQL方式结果：");
        sqlWordCounts.show();

        spark.stop();
    }
}
