package main.scala.com.hive.spark.test

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import java.util.HashMap
import java.util.ArrayList

/**
 * @author ZhiLi
 */
object SparkHive {
  def main(args: Array[String]): Unit = {
    println("helloworld")
    val sparkConf = new SparkConf().setAppName("HiveTest").setMaster("local[2]")
    val sc = new SparkContext(sparkConf)
    val sqlContext = new org.apache.spark.sql.SQLContext(sc)
    val url = "jdbc:hive2://192.168.40.129:10000/default" 
    val jdbcDF = sqlContext.read.format("jdbc").options(
          Map("url" -> url, 
              "dbtable" -> "test", 
              "user" -> "cloudera", 
              "password" -> "cloudera", 
              "driver" -> "org.apache.hive.jdbc.HiveDriver")).load()
     val tableRDD = jdbcDF.where("1=1").rdd
     //打印每一行
     tableRDD.foreach { x => println(x) }
    val map = new HashMap[Object,Object]()
    val array = new ArrayList[Map[Object,Object]]()
    sc.stop()
  }
}