package site.yunnong.atvris.recommend.offline.spark.featureing;

import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;

import java.util.Properties;

/**
 * @author zjh
 * @date 2021/9/16 9:47
 */
public class JavaBuildData {

    public static void main(String[] args) {
        Logger.getLogger("org").setLevel(Level.ERROR);
        SparkConf conf = new SparkConf()
                .setMaster("local")
                .setAppName("mysql")
                .set("spark.sql.shuffle.partitions", "1");

        JavaSparkContext sparkContext = new JavaSparkContext(conf);
        SQLContext sqlContext = new SQLContext(sparkContext);
        //读取mysql数据
        readMySQL(sqlContext);

        //停止SparkContext
        sparkContext.stop();
    }

    private static void readMySQL(SQLContext sqlContext) {
        //jdbc.url=jdbc:mysql://localhost:3306/database
        String url = "jdbc:mysql://localhost:3306/yunnong?useUnicode=true&characterEncoding=UTF-8&serverTimezone=Asia/Shanghai&allowMultiQueries=true";
        //查找的表名
        String table = "video";
        //增加数据库的用户名(user)密码(password),指定test数据库的驱动(driver)
        Properties connectionProperties = new Properties();
        connectionProperties.put("driver", "com.mysql.cj.jdbc.Driver");
        connectionProperties.put("user", "root");
        connectionProperties.put("password", "zjhzjh802200");
  
        //SparkJdbc读取Postgresql的products表内容
        System.out.println("读取yunnong数据库中的表内容video");
        // 读取表中所有数据
        Dataset<Row> jdbcDF = sqlContext.read().jdbc(url, table, connectionProperties).select("*");
        //显示数据
        jdbcDF.show();
    }
}
