package com.ruby.bigtable.spark;

import java.util.HashMap;
import java.util.Map;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.hive.HiveContext;

public class LoadMysql2Hive {

	@SuppressWarnings({ "resource", "unused" })
	public static void main(String[] args) {
		SparkConf conf = new SparkConf().setAppName("LoadMysql2Hive").setMaster("local");
		JavaSparkContext sc = new JavaSparkContext(conf);
		HiveContext hiveContext = new HiveContext(sc.sc());
		Map<String, String> mysql_driver = new HashMap<String, String>();
		mysql_driver.put("url", "jdbc:mysql://192.168.159.1:3306/hive");
		mysql_driver.put("dbtable", "students");
		mysql_driver.put("user", "hive");
		mysql_driver.put("password", "hive");
		DataFrame mysql_df = hiveContext.read().format("jdbc").options(mysql_driver).load();
		mysql_df.write().format("parquet").mode(SaveMode.Append).saveAsTable("students");
		System.out.println("success");
	}

}
