package cn.itcast.hello;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.streaming.OutputMode;
import org.apache.spark.sql.streaming.StreamingQueryException;

import java.util.Arrays;
import java.util.concurrent.TimeoutException;

import static org.apache.spark.sql.functions.col;
/**
 * Author itcast
 * Desc 演示使用Java语言开发StructuredStreaming完成WordCount
 */
public class JavaSparkDemo04 {
    public static void main(String[] args) throws TimeoutException, StreamingQueryException {
        //0.TODO 准备环境
        SparkSession spark = SparkSession.builder().appName("JavaSparkDemo").master("local[*]")
                .config("spark.sql.shuffle.partitions", "4")
                .getOrCreate();
        spark.sparkContext().setLogLevel("WARN");


        //1.TODO 加载数据
        Dataset<Row> lines = spark.readStream()
                .format("socket")
                .option("host", "node1")
                .option("port", 9999)
                .load();

        //2.TODO 处理数据-WordCount
        Dataset<String> ds = lines.as(Encoders.STRING());
        Dataset<String> wordsDS = ds.flatMap((String line) -> Arrays.asList(line.split(" ")).iterator(), Encoders.STRING());

        //TODO ====SQL
        wordsDS.createOrReplaceTempView("t_word");
        String sql = "select value, count(*) as counts " +
                "from t_word " +
                "group by value " +
                "order by counts desc";
        Dataset<Row> result1 = spark.sql(sql);

        //TODO ====DSL
        Dataset<Row> result2 = wordsDS.groupBy("value")
                .count()
                .orderBy(col("count").desc());

        //3.TODO 输出结果
        result1.writeStream()
                .format("console")
                .outputMode(OutputMode.Complete())
                .start();
                /*.awaitTermination()*/
        result2.writeStream()
                .format("console")
                .outputMode(OutputMode.Complete())
                .start()
                .awaitTermination();

        //4.TODO 关闭资源
        spark.stop();

    }
}
