package com.example.springbootspark.data;

import java.util.Arrays;
import java.util.Properties;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.SQLContext;

import scala.Tuple2;

/**
 * Created by Administrator on 2017/11/6.
 */
public class SparkMysql {
	public static org.apache.log4j.Logger logsger = org.apache.log4j.Logger.getLogger(SparkMysql.class);

	public static void main(String[] args) {
		JavaSparkContext sparkContext = new JavaSparkContext(
				new SparkConf().setAppName("SparkMysql").setMaster("local[5]"));
		SQLContext sqlContext = new SQLContext(sparkContext);
		// 读取mysql数据
		readMySQL(sqlContext);

		// 停止SparkContext
		sparkContext.stop();
	}

	private static void readMySQL(SQLContext sqlContext) {
		// jdbc.url=jdbc:mysql://localhost:3306/database
		String url = "jdbc:mysql://192.168.1.201:3306/office";
		// 查找的表名
		String table = "module";
		// 增加数据库的用户名(user)密码(password),指定test数据库的驱动(driver)
		Properties connectionProperties = new Properties();
		connectionProperties.put("user", "root");
		connectionProperties.put("password", "111111");
		connectionProperties.put("driver", "com.mysql.jdbc.Driver");

		// SparkJdbc读取Postgresql的products表内容
		System.out.println("读取test数据库中的user_test表内容");
		// 读取表中所有数据
		Dataset jdbcDF = sqlContext.read().jdbc(url, table, connectionProperties).select("*");
		JavaRDD<String> lines= jdbcDF.javaRDD();
		System.out.println("----------------"+lines.count()+"-----------");
		 /* JavaRDD<String> words = lines
	                .flatMap(str -> Arrays.asList(SPACE.split(str)).iterator());

	        JavaPairRDD<String, Integer> ones = words
	                .mapToPair(str -> new Tuple2<String, Integer>(str, 1));*/
		// 显示数据
		jdbcDF.show();
	}
}