package statistics;

import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

import com.mongodb.spark.MongoSpark;
import org.bson.Document;
import com.mongodb.spark.rdd.api.java.JavaMongoRDD;

import java.util.HashMap;

public class Spark_Mongo {
    public static void main(String[] args) {
        SparkSession spark = SparkSession.builder()
                .master("local[*]")
                .appName("MongoSparkConnectorIntro")
                .config("spark.mongodb.input.uri", "mongodb://root:jhmk123456@192.168.8.147:21234/local_test_new.test?serverSelectionTimeoutMS=5000&connectTimeoutMS=10000&authSource=admin&authMechanism=SCRAM-SHA-1")
//                .config("spark.mongodb.output.uri", "mongodb://root:jhmk123456@192.168.8.147:21234/local_test_new.test?serverSelectionTimeoutMS=5000&connectTimeoutMS=10000&authSource=admin&authMechanism=SCRAM-SHA-1")
                .getOrCreate();
        JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());
        jsc.setLogLevel("WARN");

        Dataset<Row> implicitDS = MongoSpark.load(jsc).toDF();
        implicitDS.createOrReplaceTempView("implicit");
        spark.sql("select result.itemname from implicit").show();




        spark.close();
        jsc.close();
    }
}
