package org.eking.bigdata.spark;

import java.util.Arrays;
import java.util.Collections;
import java.util.List;

import org.apache.spark.sql.types.StructField;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.sql.Encoder;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.StructType;
import org.apache.spark.api.java.function.Function;
import static org.apache.spark.sql.functions.col;

public class SparkML {

	public static void main(String[] args) {
		// TODO 
//		SparkSession spark = SparkSession
//				  .builder()
//				  .appName("Java Spark SQL basic example")
//				  .config("spark.some.config.option", "some-value")
//				  .getOrCreate();
//						  
//		Dataset<Row> df = spark.read().json("/lele/people.json");
//		df.show();
//		df.printSchema();
//		df.select(col("name"), col("age").plus(1)).show();
//		df.filter(col("age").gt(21)).show();
//		df.groupBy("age").count().show();
//		
//		System.out.println("========>");
//		df.createOrReplaceTempView("people");
//
//		Dataset<Row> sqlDF = spark.sql("SELECT * FROM people");
//		sqlDF.show();
//		
//		Person person = new Person();
//		person.setName("Andy");
//		person.setAge(32);
//		
//		Encoder<Person> personEncoder = Encoders.bean(Person.class);
//		Dataset<Person> javaBeanDS = spark.createDataset(
//		  Collections.singletonList(person),
//		  personEncoder
//		);
//		javaBeanDS.show();
//		
//		JavaRDD<Person> peopleRDD = spark.read().textFile("/lele/people.txt")
//				  .javaRDD()
//				  .map(new Function<String, Person>() {
//				    @Override
//				    public Person call(String line) throws Exception {
//				      String[] parts = line.split(",");
//				      Person person = new Person();
//				      person.setName(parts[0]);
//				      person.setAge(Integer.parseInt(parts[1].trim()));
//				      return person;
//				    }
//				  });
//		
//		// Apply a schema to an RDD of JavaBeans to get a DataFrame
//		Dataset<Row> peopleDF = spark.createDataFrame(peopleRDD, Person.class);
//		// Register the DataFrame as a temporary view
//		peopleDF.createOrReplaceTempView("people");
//		Dataset<Row> teenagersDF = spark.sql("SELECT name FROM people WHERE age BETWEEN 13 AND 19");
//
//		spark.stop();
		
		
		List<Row> data = Arrays.asList(
				RowFactory.create(0.0, "Hi I heard about Spark"),
				RowFactory.create(0.0, "I wish Java could use case classes"),
				RowFactory.create(1.0, "Logistic regression models are neat"));

		StructType schema = new StructType(new StructField[] {
				new StructField("label", DataTypes.DoubleType, false,
						Metadata.empty()),
				new StructField("sentence", DataTypes.StringType, false,
						Metadata.empty()) });
				
	}

}
