package me.bigdata.spark

import org.apache.hadoop.hive.ql.metadata.HiveUtils
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.{Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by Administrator on 2017/5/29.
  */
object SparkHiveApp {
  case class Person(name: String, sex: String, road: String, tel: String, email: String)
  def main(args:Array[String]):Unit={
    val sparkConf = new SparkConf().set("spark.streaming.stopGracefullyOnShutdown","true").setAppName("hive-demo").setMaster("local[2]")
    val sc = new SparkContext(sparkConf)
    val ssc = new StreamingContext(sc, Seconds(60))
    val hiveContext = SparkSession.builder().enableHiveSupport().getOrCreate()
    val personDF = hiveContext.read.csv("D:\\javaSpace\\spark-hive-demo1\\text.csv")
    personDF.createGlobalTempView("person")
  }
}
