import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.hive.HiveContext

object HiveSourceTest {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("DataFrameLoadTest").setMaster("local")
    val sc = new SparkContext(conf)
    //val sqlContext = new SQLContext(sc)
    //数据源之hive

    val hiveContext: HiveContext = new HiveContext(sc)
    hiveContext.sql("create table aura.worker(id string,salary double,bon double,dep int) row format delimited fields terminated by ','")
    hiveContext.sql("load data local inpath '/root/worker.txt' into table aura.worker ")
    hiveContext.sql("select * from aura.worker").show()
  }
}
