package org.example.com.atguigu.day03;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import scala.Tuple2;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Iterator;

public class $1_Test2 {
    public static void main(String[] args) {
        SparkConf conf = new SparkConf().setMaster("local[4]").setAppName("com.atguigu.day01.$01_RddCreate");
        JavaSparkContext sc = new JavaSparkContext(conf);

        JavaRDD<String> rdd1 = sc.parallelize(Arrays.asList("a 1", "b 2", "b 3", "c 1", "d 1", "e 5"));
        JavaPairRDD<String, Integer> rdd2 = rdd1.mapToPair(new PairFunction<String, String, Integer>() {
            @Override
            public Tuple2<String, Integer> call(String s) throws Exception {
                return new Tuple2<>(s.split(" ")[0], Integer.parseInt(s.split(" ")[1]));
            }
        });
        rdd2.foreachPartition(new VoidFunction<Iterator<Tuple2<String, Integer>>>() {
            @Override
            public void call(Iterator<Tuple2<String, Integer>> tuple2Iterator) throws Exception {
                Connection connection = null;
                PreparedStatement statement = null;
                try {
                    connection = DriverManager.getConnection("jdbc:mysql://hadoop102:3306/spark_test_db", "root", "root");
                    statement = connection.prepareStatement("insert into test2 values(?,?)");
                    int i = 0;
                    Tuple2<String, Integer> data;
                    while (tuple2Iterator.hasNext()){
                        data=tuple2Iterator.next();
                        statement.setString(1,data._1);
                        statement.setInt(2,data._2);
                        statement.addBatch();
                        if (i%1000==0){
                            statement.executeBatch();
                            statement.clearBatch();
                        }
                        i++;
                    }
                    statement.executeBatch();
                } catch (Exception e) {
                    e.printStackTrace();
                }finally {
                    if (statement!=null)
                        statement.close();
                    if (connection!=null)
                        connection.close();
                }
            }
        });
    }
}
