package com.demo.spark.sql;

import com.mongodb.spark.MongoSpark;
import com.mongodb.spark.config.ReadConfig;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class DataFrameReadWriteMongo {
    public static void main(String[] args) {

        SparkSession sparkSession = SparkSession.builder().appName("spark support hive")
                .config("spark.sql.warehouse.dir", "/user/spark")
                .config("spark.mongodb.input.uri","mongodb://hadoop-3:27017/testdb.test1")
                .config("spark.mongodb.output.uri","mongodb://hadoop-3:27017/testdb.test3")
                .master("local")
                .getOrCreate();


        JavaSparkContext sparkContext = JavaSparkContext.fromSparkContext(sparkSession.sparkContext());
        sparkContext.setLogLevel("error");
        Dataset<Row> person_test1 = MongoSpark.load(sparkContext).toDF();
        person_test1.createOrReplaceTempView("person");
        person_test1.printSchema();
        Map<String, String> readConfigMap = new HashMap<>();
        readConfigMap.put("spark.mongodb.input.uri","mongodb://hadoop-3:27017");
        readConfigMap.put("spark.mongodb.input.database","testdb");
        readConfigMap.put("collection","test2");
        ReadConfig readConfig = ReadConfig.create( readConfigMap);

        Dataset<Row> person_test2 = MongoSpark.loadAndInferSchema(sparkSession, readConfig);
        person_test2.createOrReplaceTempView("person2");

        Dataset<Row> sql = sparkSession.sql("select count(*) as count ,sum(t1.age) as sum from  person t1 inner join person2 t2 on t1.name=t2.name");
        MongoSpark.save(sql);
    }
}
