package com.linkstec.spark;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.hive.HiveContext;

@SuppressWarnings("deprecation")
public class SparkMysqlTest {
	public static void main(String[] args) {
	        SparkConf sparkConf = new SparkConf().setAppName("SparkHive").setMaster("local");
	        sparkConf.set("spark.sql.warehouse.dir", "hdfs://localhost:9000/user/hive/warehouse");
	        JavaSparkContext sc = new JavaSparkContext(sparkConf);
	        //"hive.metastore.warehouse.dir=hdfs://ubuntu1:9000/user/hive/warehouse" 
	        
	        //不要使用SQLContext,部署异常找不到数据库和表
			HiveContext hiveContext = new HiveContext(sc);
			@SuppressWarnings("unused")
			SQLContext sqlContext = new SQLContext(sc);
	        //查询表前10条数据
	        hiveContext.sql("use default");
//	        hiveContext.sql("CREATE TABLE IF NOT EXISTS default.src (key INT, value STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t' ");
	        hiveContext.sql("show databases").show();
	        hiveContext.sql("show tables").show();
//	        hiveContext.sql("select * from src limit 10").show();
	        
	        sc.stop();
	}
}