package Demo2;

import org.apache.spark.SparkConf;
import org.apache.spark.SparkContext;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import scala.Tuple2;
import scala.tools.cmd.gen.AnyVals;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;

/**
 * Created by lenovo on 2017/11/20.
 * sparkSQL操作mysql
 */
public class SparkSQL_JavaJDBC {
    public static void main(String[] args){
        System.setProperty("hadoop.home.dir", "E://hadoop-liyadong//hadoop-2.7.1");

        SparkConf conf = new SparkConf().setAppName("SparkSQL_JavaJDBC").setMaster("local[3]").set("spark.testing.memory","2147480000");
        JavaSparkContext sc = new JavaSparkContext(conf);
       SQLContext sparkSQL = new SQLContext(sc);

      HashMap<String,String> jdbcMap =  new HashMap<String,String>();

        jdbcMap.put("url","jdbc:mysql://localhost:3306/cppds");
        jdbcMap.put("dbtable","people");
        jdbcMap.put("user","root");
        jdbcMap.put("password","root");

       DataFrame jdbcDF = sparkSQL.read().format("jdbc").options(jdbcMap).load();
        jdbcDF.registerTempTable("people");
       DataFrame peopleDF = sparkSQL.sql("select * from people");
        peopleDF.javaRDD().foreach(new VoidFunction<Row>() {
            @Override
            public void call(Row row) throws Exception {
                System.out.println(row.get(0)+"*"+row.get(1));
            }
        });
//
        List<String> studentInfoJSONs = new ArrayList<String>();
        studentInfoJSONs.add("{'name':'Justin','score':99}");
        studentInfoJSONs.add("{'name':'Andy','score':78}");
        studentInfoJSONs.add("{'name':'Michael','score':60}");
        JavaRDD<String> studentInfoRDD = sc.parallelize(studentInfoJSONs);
        DataFrame studentInfoDF = sparkSQL.read().json(studentInfoRDD);

       JavaPairRDD<String,Tuple2<String,String>> goodStudentRDD = peopleDF.javaRDD().mapToPair(new PairFunction<Row, String, String>() {
           private static final long serialVersionUID = 1L;
            @Override
            public Tuple2<String, String> call(Row row) throws Exception {
                return new Tuple2<String, String>(String.valueOf(row.getAs("name")),String.valueOf(row.getAs("age")));
            }
        }).join(studentInfoDF.javaRDD().mapToPair(new PairFunction<Row, String, String>() {
           private static final long serialVersionUID = 1L;
            @Override
            public Tuple2<String, String> call(Row row) throws Exception {
                System.out.println(row.getAs("name").toString()+","+row.getAs("score").toString());
                return new Tuple2<String, String>(row.getAs("name").toString(),row.getAs("score").toString());
            }
        }));

      JavaRDD goodStudentNameRDD =  goodStudentRDD.map(new Function<Tuple2<String,Tuple2<String,String>>, Object>() {
            @Override
            public Row call(Tuple2<String, Tuple2<String, String>> v1) throws Exception {
                return RowFactory.create(v1._1,v1._2._1,v1._2._2);
            }
        });

       List<StructField> list = new ArrayList<StructField>();
        list.add(DataTypes.createStructField("name",DataTypes.StringType,true));
        list.add(DataTypes.createStructField("age",DataTypes.StringType,true));
        list.add(DataTypes.createStructField("score",DataTypes.StringType,true));
       StructType strucType = DataTypes.createStructType(list);
       DataFrame goodDF = sparkSQL.createDataFrame(goodStudentNameRDD,strucType);

        goodDF.javaRDD().foreach(new VoidFunction<Row>() {

            @Override
            public void call(Row row) throws Exception {
               Connection con = SparkSQL_JavaJDBC.getCon();
                Statement statement = null;
                try {
                    statement = con.createStatement();
                } catch (SQLException e) {
                    e.printStackTrace();
                }
               String sql = "insert into goodStudent (name,age,score) values(";
                sql = sql+"'"+String.valueOf(row.getAs("name"))+"',"+"'"+String.valueOf(row.getAs("age"))+"',"+"'"+String.valueOf(row.getAs("score"))+"'";
                sql = sql+")";
                System.out.println(sql);
                statement.execute(sql);
            }
        });
    }
    public static Connection getCon(){
        Connection con = null;
try {
    Class.forName("com.mysql.jdbc.Driver");

    con = DriverManager.getConnection("jdbc:mysql://localhost:3306/cppds","root","root");

}catch (Exception e){

}
        return con;
    }
}
