package org.sn.jdish.spark.sql;

import java.util.ArrayList;
import java.util.List;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.DataFrameReader;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.SaveMode;

/**
 * JSON数据源
 * 
 * @author snzigod@hotmail.com
 */
public class JSONDataSource {

	public static void main(String[] args) {
		/**
		 * SparkConf:第一步创建一个SparkConf，在这个对象里面可以设置允许模式Local Standalone yarn
		 * AppName(可以在Web UI中看到) 还可以设置Spark运行时的资源要求
		 */
		SparkConf sparkConf = new SparkConf().setAppName("sparkSql").setMaster("local");

		/**
		 * 基于SparkConf的对象可以创建出来一个SparkContext Spark上下文
		 * SparkContext是通往集群的唯一通道，SparkContext在创建的时候还会创建任务调度器
		 */
		JavaSparkContext javaSparkContext = new JavaSparkContext(sparkConf);

		readJson(javaSparkContext);

		javaSparkContext.close();
	}

	public static void readJson(JavaSparkContext javaSparkContext) {
		SQLContext sqlContext = new SQLContext(javaSparkContext);

		DataFrameReader dataFrameReader = sqlContext.read();
		DataFrame df = dataFrameReader.format("json").load("data/people.json");
		df.printSchema();
		df.show();
		// 注册一张临时表
		df.registerTempTable("people");

		DataFrame teenagers = sqlContext.sql("SELECT name FROM people WHERE age >= 13 AND age <= 19");
		List<String> list = teenagers.toJavaRDD().map(new Function<Row, String>() {
			private static final long serialVersionUID = 1L;

			@Override
			public String call(Row row) {
				return "Name: " + row.getString(0);
			}
		}).collect();

		for (String name : list) {
			System.out.println(name);
		}

		// 创建数据源
		List<String> personInfoJSONs = new ArrayList<String>();
		personInfoJSONs.add("{\"name\":\"ZhangFa\",\"age\":32}");
		personInfoJSONs.add("{\"name\":\"Faker\",\"age\":12}");
		personInfoJSONs.add("{\"name\":\"Moon\",\"age\":62}");

		// 根据数据源创建临时表
		JavaRDD<String> studentInfosRDD = javaSparkContext.parallelize(personInfoJSONs);
		DataFrame studentDataFrame = sqlContext.read().format("json").json(studentInfosRDD);
		studentDataFrame.printSchema();
		studentDataFrame.show();
		// 注册一张临时表
		studentDataFrame.registerTempTable("student");

		DataFrame dataFrame = sqlContext.sql("select * from student");
		dataFrame.javaRDD().foreach(new VoidFunction<Row>() {
			private static final long serialVersionUID = 1L;

			@Override
			public void call(Row row) throws Exception {
				System.out.println(row);
			}
		});

		dataFrame.write().format("json").mode(SaveMode.Overwrite).save("student");
	}
}
