package misssad.simple_project;

import java.util.ArrayList;
import java.util.List;

import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.MapFunction;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoder;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;

import model.Person;

public class SimpleRDDsIntoDataset {

	public static void main(String[] args) {
		//-Dspark.master=local
		System.setProperty("hadoop.home.dir", "D:\\Program Files\\hadoop-2.7.6");
		SparkSession spark = SparkSession.builder().appName("RDDs转换").getOrCreate();
		//从textFile中创建people对象的RDD
		JavaRDD<Person> peopleRDD = spark.read()
		.textFile("D:\\Program Files\\spark-2.3.1-bin-hadoop2.7\\examples\\src\\main\\resources\\people.txt")
		.javaRDD()
		.map(line -> {
			String[] parts = line.split(",");
			Person person = new Person();
			person.setName(parts[0]);
			person.setAge(Integer.parseInt(parts[1].trim()));
			return person;
		});
		//将模式应用到javaBeans中RDD来获得DataFrame
		Dataset<Row> peopleDF = spark.createDataFrame(peopleRDD, Person.class);
		//将该DataFrame注册为临时视图
		peopleDF.createOrReplaceTempView("people");
		
		Dataset<Row> sql = spark.sql("select name from people where age between 13 and 19");
		
		/*Encoder<String> stringEncoder = Encoders.STRING();
		sql.map((MapFunction<Row, String>) row -> "Name: " + row.getString(0), stringEncoder);
		sql.show();*/
		//另一种写法
//		sql.map((MapFunction<Row, String>)row -> "Name: " + row.<String>getAs("name"), stringEncoder);
//		sql.show();
		
		//-----------以编程的方式指定模式--------------------
		//1、从原始RDD中创建rows的RDD
		JavaRDD<String> peopleRDD2 = spark.sparkContext().textFile("D:\\Program Files\\spark-2.3.1-bin-hadoop2.7\\examples\\src\\main\\resources\\people.txt", 1).toJavaRDD();
		//模式以字符串编码
		String schemaString = "name age";
		//
		List<StructField> fields = new ArrayList<>();
		for(String fieldName : schemaString.split(" ")){
			StructField field = DataTypes.createStructField(fieldName, DataTypes.StringType, true);
			fields.add(field);
		}
		
		StructType schema = DataTypes.createStructType(fields);
		//将RDD（people转为Rows
		JavaRDD<Row> rowRDD = peopleRDD2.map((Function<String, Row>) record -> {
			String[] attributes = record.split(",");
			return RowFactory.create(attributes[0], attributes[1].trim());
		});
		
		//将 schema 应用到 RDD 得到 DataFrame
		Dataset<Row> peopleDataFrame = spark.createDataFrame(rowRDD, schema);
		//使用DataFrame创建一个临时视图
		peopleDataFrame.createOrReplaceTempView("people");
		//在刚刚创建的视图中 执行SQL
		Dataset<Row> result = spark.sql("select * from people");
		//此时的result就是一张表
		result.show();
		//
		Dataset<String> nameDS = result.map((MapFunction<Row, String>) row -> "Name: " + row.getString(0), Encoders.STRING());
		nameDS.show();
		
		

	}
}
