package com.ts.blog.batch.mysql;


import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.SparkSession;

import java.util.Properties;

/**
 * @classname:
 * @description:
 * @author: yishiyong
 * @create: 2018-12-25
 */
public class SparkMysqlDome {
    public static org.apache.log4j.Logger logger = org.apache.log4j.Logger.getLogger(SparkMysql.class);

    public static void main(String[] args) {
  /*      JavaSparkContext sparkContext = new JavaSparkContext(new SparkConf().setAppName("SparkMysql").setMaster("local[5]"));
        SQLContext sqlContext = new SQLContext(sparkContext);

        //读取mysql数据
        readMySQL(sqlContext);

        //停止SparkContext
        sparkContext.stop();*/


       SparkSession session = SparkSession.builder()
                .master("local[*]")
                .appName("Spark shell")
                .getOrCreate();
        readMySQLRemote(session);
        session.stop();
    }

    private static void readMySQLRemote(SparkSession session){
        //jdbc.url=jdbc:mysql://localhost:3306/database
        String url = "jdbc:mysql://172.18.101.97:3306/lnaudit";
        //查找的表名
        String table = "blog";
        //增加数据库的用户名(user)密码(password),指定test数据库的驱动(driver)
        Properties connectionProperties = new Properties();
        connectionProperties.put("user","lnaudit");
        connectionProperties.put("password","lnaudit");
        connectionProperties.put("driver","com.mysql.jdbc.Driver");

        //SparkJdbc读取Postgresql的products表内容
        System.out.println("读取test数据库中的user_test表内容");
        // 读取表中所有数据
        Dataset<Row> jdbcDF = session.read().option("header", "true").jdbc(url,table,connectionProperties).select("*");
        //显示数据
        jdbcDF.show();
    }

    private static void readMySQL(SQLContext sqlContext){
        //jdbc.url=jdbc:mysql://localhost:3306/database
        String url = "jdbc:mysql://172.18.101.97:3306/lnaudit";
        //查找的表名
        String table = "sys_user";
        //增加数据库的用户名(user)密码(password),指定test数据库的驱动(driver)
        Properties connectionProperties = new Properties();
        connectionProperties.put("user","lnaudit");
        connectionProperties.put("password","lnaudit");
        connectionProperties.put("driver","com.mysql.jdbc.Driver");

        //SparkJdbc读取Postgresql的products表内容
        System.out.println("读取test数据库中的user_test表内容");
        // 读取表中所有数据
        Dataset<Row> jdbcDF = sqlContext.read().jdbc(url,table,connectionProperties).select("*");
        //显示数据
        jdbcDF.show();
    }
}