package com.demo.spark.sql;

import com.mongodb.spark.MongoSpark;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.ForeachFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.*;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;

import java.util.ArrayList;
import java.util.List;

public class DataFrameWriteMongo {
    public static void main(String[] args) {

        SparkSession sparkSession = SparkSession.builder().appName("spark support hive")
                .config("spark.sql.warehouse.dir", "/user/spark")
                .config("spark.mongodb.input.uri","mongodb://hadoop-3:27017/testdb.test1")
                .config("spark.mongodb.output.uri","mongodb://hadoop-3:27017/testdb.test2")
                .master("local")
                .getOrCreate();


        JavaSparkContext sparkContext = JavaSparkContext.fromSparkContext(sparkSession.sparkContext());
        sparkContext.setLogLevel("error");

        String schemaString = "name age";

        // Generate the schema based on the string of schema
        List<StructField> fields = new ArrayList<>();
        for (String fieldName : schemaString.split(" ")) {
            StructField field = DataTypes.createStructField(fieldName, DataTypes.StringType, true);
            fields.add(field);
        }
        StructType schema = DataTypes.createStructType(fields);
        JavaRDD<Row> map = sparkContext.textFile("D:\\git\\hadoop-cloudera-demo\\src\\main\\resources\\person.txt").map(new Function<String, Row>() {
            @Override
            public Row call(String v1) throws Exception {

                String[] s = v1.split(" ");
                Row row = RowFactory.create(s[0], s[1]);
                return row;
            }
        });
        Dataset<Row> dataFrame = sparkSession.createDataFrame(map, schema);

        MongoSpark.save(dataFrame);
    }
}
