package org.mcclone.jr.spark.es;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.HarFileSystem;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.*;
import org.elasticsearch.spark.rdd.api.java.JavaEsSpark;
import org.mcclone.jr.spark.Person;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

public class SparkSave {

    public static void main(String[] args) throws AnalysisException, IOException {
//        SparkSession spark = SparkSession
//                .builder()
//                .master("local")
//                .config("es.nodes", "localhost")
//                .config("es.port", "9200")
//                .appName("Java Spark SQL basic example")
//                .getOrCreate();

        SparkConf sparkConf = new SparkConf().setAppName("writeEs").setMaster("local[*]").set("es.index.auto.create", "true")
                .set("es.nodes", "localhost").set("es.port", "9200").set("es.nodes.wan.only", "true");
        List<Person> personList = new ArrayList<>();
        personList.add(new Person("1111", "1", "1"));
        personList.add(new Person("2222", "1", "2"));
        JavaSparkContext javaSparkContext = new JavaSparkContext(sparkConf);
        javaSparkContext.parallelize(personList);
        JavaEsSpark.saveToEs(javaSparkContext.parallelize(personList), "test");

        FileSystem fileSystem = FileSystem.get(new Configuration());
        HarFileSystem fs = new HarFileSystem();


//        Dataset<Row> dataFrame = spark.createDataFrame(personList, Person.class);
//        dataFrame.createTempView("t");
//
//        dataFrame.reduce((ReduceFunction<Row>) (row, t1) -> row);
//        dataFrame.persist(StorageLevel.MEMORY_ONLY());
//        spark.sql("select name,max(id),type from t group by type").show();
//
//        dataFrame.foreach(new ForeachFunction<Row>() {
//            @Override
//            public void call(Row row) throws Exception {
//                System.out.println(row);
//            }
//        });
//        dataFrame
//                .write()
//                .partitionBy("type")
//                .mode(SaveMode.Append)
//                .parquet("/data/person.parquet");
    }
}
