import model.Info;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
import scala.Tuple2;

import java.util.Arrays;

public class WordCountSql {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder()
                .appName("RDDToDataset")
                .master("local[*]")
                .getOrCreate();

        JavaRDD<String> lines = spark.read().textFile("src/main/resources/data").javaRDD();
        JavaRDD<String> words = lines.flatMap(line -> Arrays.asList(line.split(" ")).iterator());

        JavaRDD<Info> stuRDD = words.map(new Function<String, Info>() {
            public Info call(String line) throws Exception {
                System.out.println(line);
                Info stu = new Info();
                stu.setWord(line);
                stu.setCnt(1);
                return stu;
            }
        });

        Dataset<Row> stuDf = spark.createDataFrame(stuRDD, Info.class);
        stuDf.printSchema();
        stuDf.createOrReplaceTempView("info");
        Dataset<Row> nameDf = spark.sql("select word,count(cnt) as cnt from info group by word");
        nameDf.show();
        nameDf.coalesce(2).write().mode(SaveMode.Overwrite).format("csv").csv("src/main/resources/result");
        spark.stop();

    }
}
