package com.bocommlife.mi;

import java.util.Properties;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.SparkSession;

public class ReadMysql {

	public static void main(String[] args) throws Exception {
		SparkConf conf = new SparkConf().setAppName("Jimmy's read sql spark");//.setMaster("local");//.setMaster("spark://localhost:7077");
		JavaSparkContext sc = new JavaSparkContext(conf);
		SQLContext sqlContext = new SQLContext(sc);
		
		readMySql(sqlContext);
		
		sc.stop();
		
//		SparkSession spark = SparkSession
//				  .builder()
//				  .appName("Jimmy's read sql spark")
////				  .enableHiveSupport()
//				  .getOrCreate();
//		readMySql(spark);
//		spark.stop();
		
	}
	
//	private static void readMySql(SparkSession sparkSession) {
//		String url = "jdbc:mysql://localhost:3306/scrapydb?user=root&password=root1234";
//		String table = "t_sinahealth_staged";
//		Properties connectionProperties = new Properties();
//		connectionProperties.put("user","root");
//		connectionProperties.put("password","root1234");
//		connectionProperties.put("driver","com.mysql.jdbc.Driver");
//		Dataset<Row> jdbcDF = sparkSession.jdbc(url,table,connectionProperties);
//		Dataset<Row> jdbcDF = sparkSession.sql("select title from t_sinahealth_staged").registerTempTable("t_sinahealth_staged");
//		jdbcDF.show();
//	}
	
	private static void readMySql(SQLContext sqlContext) {
		String url = "jdbc:mysql://129.1.18.74:3306/scrapydb?user=root&password=root1234";
		String table = "t_sinahealth_staged";
		Properties connectionProperties = new Properties();
		connectionProperties.put("user","root");
		connectionProperties.put("password","root1234");
		connectionProperties.put("driver","com.mysql.jdbc.Driver");
		Dataset<Row> df = sqlContext.read().jdbc(url,table,connectionProperties);
		df.createOrReplaceTempView("abc");
		
//		df.printSchema();
		df = sqlContext.sql("select count(1) from abc");
		df.show();
		
		df.write().format("json").save("abc");
	}
}
