package site.yunnong.atvris.recommend.test;

import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

import java.net.URL;

/**
 * @author zjh
 * @date 2021/9/15 19:38
 */
public class SimpleApp {

//    public static void main(String[] args) {
//        URL resource = SimpleApp.class.getClassLoader().getResource("classpath:sampledata/movies.csv");
//        SparkSession spark = SparkSession.builder().master("local").appName("Simple Application").getOrCreate();
//        Dataset<Row> dataset = spark.read().format("csv").option("header", "true").load(resource.getPath());
////        Dataset<String> dataset = spark.read().textFile(logFile).cache();
//        long numAs = dataset.filter(String.valueOf((FilterFunction<String>) s -> s.contains("a"))).count();
//        long numBs = dataset.filter(String.valueOf((FilterFunction<String>) s -> s.contains("b"))).count();
//        System.out.println("Line with a: " + numAs + " Line with b: " + numBs );
////        spark.stop();
//    }

    public static void main(String[] args) {
        Logger.getLogger("org").setLevel(Level.ERROR);

        SparkConf conf = new SparkConf()
                .setMaster("local")
                .setAppName("featureEngineering")
                .set("spark.submit.deployMode", "client");
        SparkSession spark = SparkSession.builder().config(conf).getOrCreate();
        URL resource = SimpleApp.class.getResource("/sampledata/movies.csv");
        Dataset<Row> movieSamples = spark.read().format("csv").option("header", "true").load(resource.getPath());
        System.out.println("Raw Movie Samples:");
        movieSamples.printSchema();
        movieSamples.show(10);
        
        
//        String logFile = "D:\\projects\\java\\atvris\\atvris-recommend\\src\\main\\resources\\sampledata\\movies.csv"; // Should be some file on your system
//        SparkSession spark = SparkSession.builder().appName("Simple Application").master("local").getOrCreate();
//        Dataset<String> logData = spark.read().textFile(logFile).cache();
//
//        long numAs = logData.filter((FilterFunction<String>) s -> s.contains("a")).count();
//        long numBs = logData.filter((FilterFunction<String>) s -> s.contains("b")).count();
//
//        System.out.println("Lines with a: " + numAs + ", lines with b: " + numBs);
//
//        spark.stop();
    }
}
