package com.example.sql;

import com.example.entity.User;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.function.FilterFunction;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import scala.Tuple2;

import java.util.Arrays;
import java.util.Iterator;

/**
 * @author wangjinlong
 * @version 1.0
 * @date 2021/5/19 16:40
 */
public class SqlDemo {
    static SparkSession session;
    static {
        SparkConf conf = new SparkConf().setAppName("spark-demo").setMaster("local");
        conf.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer");
        conf.registerKryoClasses(new Class[]{User.class});
        conf.set("spark.rdd.compress", "true");
        conf.set("spark.eventLog.enabled", "true");
        conf.set("spark.eventLog.dir", "/tmp/spark-events");

        session = SparkSession.builder()
                .config(conf)
                .master("local")
                .appName("spark-sql")
                .getOrCreate();
    }

    public static void main(String[] args) {
        test();
    }

    public static void test(){
        Dataset<String> textFile = session.read().textFile("data/file.txt");
        textFile.cache();
        textFile.show();

        Dataset<String> furthest = textFile.filter(new FilterFunction<String>() {
            @Override
            public boolean call(String value) throws Exception {
                return value.contains("furthest");
            }
        });
        furthest.show();

        Dataset<String> dataset = textFile.map(new MapFunction<String, String>() {
            @Override
            public String call(String value) throws Exception {
                return "sql-"+ value;
            }
        }, Encoders.STRING());
        dataset.show();

        Dataset<String> flatMap = textFile.flatMap(new FlatMapFunction<String, String>() {
            @Override
            public Iterator<String> call(String s) throws Exception {
                return Arrays.asList(s.split(" ")).iterator();
            }
        }, Encoders.STRING());
        flatMap.show();

        Dataset<Tuple2<String, Integer>> tuple2Dataset = flatMap.flatMap(new FlatMapFunction<String, Tuple2<String, Integer>>() {
            @Override
            public Iterator<Tuple2<String, Integer>> call(String s) throws Exception {
                return Arrays.asList(new Tuple2<String, Integer>(s, 1)).iterator();
            }
        }, Encoders.tuple(Encoders.STRING(), Encoders.INT()));
        tuple2Dataset.show();

        Dataset<Row> count = tuple2Dataset.groupBy("_1").count();
        count.show();

        session.stop();
    }
}
