package misssad.simple_project;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoder;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;

import model.Person;

import static org.apache.spark.sql.functions.col;

import java.util.Collections;

import org.apache.spark.sql.AnalysisException;

public class SimpleApp {

	
	public static void main(String[] args) throws AnalysisException {
		System.setProperty("hadoop.home.dir", "D:\\Program Files\\hadoop-2.7.6");
		
		SparkSession spark = SparkSession.builder().appName("Simple Application").getOrCreate();
/*		
		String logFile = "D:\\Program Files\\spark-2.3.1-bin-hadoop2.7\\README.md";
		Dataset<String> logData = spark.read().textFile(logFile).cache();
		long numAs = logData.filter(s -> s.contains("a")).count();
		long numBs = logData.filter(s -> s.contains("b")).count();
		System.out.println("Lines with a:" + numAs + ", lines with b: " + numBs);*/
		
//		Dataset<Row> df = spark.read().json("D:\\Program Files\\spark-2.3.1-bin-hadoop2.7\\examples\\src\\main\\resources\\people.json");
//		df.show();
		/*df.printSchema();
		df.select("name").show();
		df.select(col("name"), col("age").plus(1)).show();
		df.select(col("age").gt(21)).show();
		df.filter(col("age").gt(21)).show();
		df.groupBy("age").count().show();*/
		
//		df.createOrReplaceTempView("people");
		/*df.createGlobalTempView("people");
		Dataset<Row> sqlDF = spark.sql("select * from global_temp.people");
		sqlDF.show();
		spark.newSession().sql("select * from global_temp.people");*/
		
		
		
		spark.stop();
	}
}
