package Demo2;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.*;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import scala.Tuple2;

import java.util.ArrayList;
import java.util.List;

/**
 * Created by lenovo on 2017/11/13.
 */
public class SparkSQL_JavaDemo1 {
    public static void main(String args[]){
        System.setProperty("hadoop.home.dir","E://hadoop-liyadong//hadoop-2.7.1");

        SparkConf conf =  new SparkConf().setMaster("local[2]").setAppName("SparkSQL_JavaDemo1").set("spark.testing.memory","2147480000");
      JavaSparkContext sc = new JavaSparkContext(conf);
       SQLContext sqlContext = new SQLContext(sc);

        List<String> studentInfoJSONs = new ArrayList<String>();
        studentInfoJSONs.add("{'name':'Justin','score':99}");
        studentInfoJSONs.add("{'name':'Andy','score':78}");
        studentInfoJSONs.add("{'name':'Michael','score':60}");
        JavaRDD<String> studentInfoRDD = sc.parallelize(studentInfoJSONs);
        DataFrame studentInfoDF = sqlContext.read().json(studentInfoRDD);
        studentInfoDF.registerTempTable("student_info");
        String sql ="select name from student_info where score >70";
       DataFrame nameDF = sqlContext.sql(sql);
        nameDF.javaRDD().foreach(new VoidFunction<Row>() {
            @Override
            public void call(Row row) throws Exception {
               System.out.println(row.getAs("name"));
            }
        });
       List<String> nameList = nameDF.toJavaRDD().map(new Function<Row, String>() {
            @Override
            public String call(Row row) throws Exception {
                return row.getAs("name");
            }
        }).collect();

     DataFrame df = sqlContext.read().json("D://spark实训//people.json");
        df.registerTempTable("student");
        String sql1 = "select name,age from student where name in (";
        for(int i =0;i<nameList.size();i++){
            sql1 += "'"+nameList.get(i)+"'";
            if(i<nameList.size()-1){
                sql1 += ",";
            }
        }
        sql1 +=")";
        System.out.println(sql1);

        DataFrame studentDF = sqlContext.sql(sql1);
      JavaPairRDD<String,Tuple2<String,String>> goodStrudenRDD = studentDF.javaRDD().mapToPair(new PairFunction<Row, String, String>() {
            @Override
            public Tuple2<String, String> call(Row row) throws Exception {
                return new Tuple2<String, String>(String.valueOf(row.getAs("name")),String.valueOf(row.getAs("age")));
            }
        }).join(studentInfoDF.javaRDD().mapToPair(new PairFunction<Row, String, String>() {
            @Override
            public Tuple2<String, String> call(Row row) throws Exception {
                return new Tuple2<String, String>(String.valueOf(row.getAs("name")),String.valueOf(row.getAs("score")));
            }
        }));

        JavaRDD goodStrudenNameRDD = goodStrudenRDD.map(new Function<Tuple2<String,Tuple2<String,String>>, Row>() {
            @Override
            public Row call(Tuple2<String, Tuple2<String, String>> v1) throws Exception {
                return RowFactory.create(v1._1,v1._2._1,v1._2._2);
            }
        });

        goodStrudenNameRDD.foreach(new VoidFunction<Row>() {
            @Override
            public void call(Row row) throws Exception {
                System.out.println(row.get(0)+","+row.get(1)+","+row.get(2));
            }
        });

        List<StructField> fields = new ArrayList<StructField>();
        fields.add(DataTypes.createStructField("name",DataTypes.StringType,true));
        fields.add(DataTypes.createStructField("age",DataTypes.StringType,true));
        fields.add(DataTypes.createStructField("score",DataTypes.StringType,true));

        StructType structType = DataTypes.createStructType(fields);
        DataFrame goodDF = sqlContext.createDataFrame(goodStrudenNameRDD,structType);
        goodDF.write().format("json").mode(SaveMode.Append).save("D://GoodStudent");
//        studentDF.javaRDD().foreach(new VoidFunction<Row>() {
//            @Override
//            public void call(Row row) throws Exception {
//                System.out.println(row.getAs("name")+","+row.getAs("age")+","+row.getAs("score"));
//            }
//        });

    }
}
