package com.xian.spark.jdbc;

import org.apache.spark.sql.*;

import java.util.HashMap;
import java.util.Properties;

/**
 * spark 2.x 版
 * 读mysql数据写入到另一个mysql
 */
public class MysqlToMysql {
    public static void main(String[] args) {
        SparkSession.Builder builder = SparkSession.builder();
        builder.appName("MysqlToMysql");
        builder.master("local");
        SparkSession session = builder.getOrCreate();

        DataFrameReader read = session.read();

        //mysql配置
        HashMap<String, String> options = new HashMap<>();
        options.put("driver","com.mysql.jdbc.Driver");
        options.put("url","jdbc:mysql://r71:3306/testdb?characterEncoding=utf8&useSSL=TRUE");
        options.put("user","root");
        options.put("password","123456");
        //options.put("dbtable","person");
        options.put("dbtable","testdb.person");

        //查询mysql数据
        Dataset<Row> tableDS = read.format("jdbc").options(options).load();

        //打印前20条数据(默认20条)
        tableDS.show();


        //写入到另一个mysql中
        DataFrameWriter<Row> write = tableDS.write();
        String url = "jdbc:mysql://r71:3306/db2?characterEncoding=utf8&useSSL=TRUE";
        String table = "t1";
        Properties pro = new Properties();
        pro.setProperty("user","root");
        pro.setProperty("password","123456");
        //一般建议执行两次：第一次是覆盖模式，自己再修改一下表字段类型，第二是追加模式把数据全部插入进去
        //后面测试两次用追加的方式也可以
        //write.mode(SaveMode.Overwrite).jdbc(url, table, pro);
        write.mode(SaveMode.Append).jdbc(url, table, pro);


        session.close();


    }
}
