package com.tod.spark.springbootspark.sql;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import scala.Tuple2;

import java.util.*;

public class JDBCDataSource {
    public static void main(String[] args) {
        SparkConf conf = new SparkConf().setAppName("dataframe");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        SQLContext sqlContext = new SQLContext(jsc);

        Map options = new HashMap(){{
            put("url", "jdbc:mysql://*****:3306/databasename");
            put("dbtable", "******");
            put("user", "*****");
            put("password", "****");
        }};
        Dataset<Row> studentInfoDS = sqlContext.read().format("jdbc").options(options).load();

        options.put("dbtable", "%%%%%%%");
        Dataset<Row> studentScoresDS = sqlContext.read().format("jdbc").options(options).load();

        JavaPairRDD<String, Integer> studentInfoPairRDD = studentScoresDS.javaRDD().mapToPair(row -> new Tuple2<>(row.getString(0), row.getInt(1)));
        JavaPairRDD<String, Tuple2<Integer, Integer>> studentsRDD = studentInfoPairRDD.join(studentScoresDS.javaRDD().mapToPair(row -> new Tuple2<>(row.getString(0), row.getInt(1))));

        //将JavaPairRDD转换为JavaRDD<Row>
        JavaRDD<Row> studentRowsRDD = studentsRDD.map((Function<Tuple2<String, Tuple2<Integer, Integer>>, Row>) tuple -> RowFactory.create(tuple._1, tuple._2._1, tuple._2._2));

        //过滤
        JavaRDD<Row> filteredStudentRowsRDD = studentRowsRDD.filter(row -> {
            if (row.getInt(2) > 80) {
                return true;
            }
            return false;
        });

        // 继续转换为DataSet
        List<StructField> structFields = new ArrayList<>();
        structFields.add(DataTypes.createStructField("name",DataTypes.StringType,true));
        structFields.add(DataTypes.createStructField("age",DataTypes.IntegerType,true));
        structFields.add(DataTypes.createStructField("score",DataTypes.IntegerType,true));

        StructType structType = DataTypes.createStructType(structFields);
        Dataset<Row> studentsDS = sqlContext.createDataFrame(filteredStudentRowsRDD, structType);

        Row[] rows = studentsDS.collect();
        Arrays.asList(rows).forEach(row -> System.out.println(row));

        //将DataFrame数据保存到MySQL表中
        studentsDS.javaRDD().foreach(row -> {
            String sql = "insert into good_student_infos values("
                    +"'" + row.getString(0) +"',"
                    + Integer.valueOf(String.valueOf(row.get(1))) +","
                    + Integer.valueOf(String.valueOf(row.get(1))) +")";
            Class.forName("com.mysql.jdbc.Driver");
            //略
        });
    }
}
