package main.java

import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession

/**
  * SparkReadJson
  *
  * @author zhangyimin
  * 2018-10-10 上午10:04
  * @version 1.0
  */
object SparkReadHive {

  def main(args: Array[String]): Unit = {


    val conf=new  SparkConf()
    conf.setAppName("peopleInfo")
    conf.setMaster("local")
//    val sc=new SparkContext(conf)
    val spark=SparkSession.builder().config(conf).enableHiveSupport().getOrCreate()
//val spark: SparkSession = SparkSession.builder().appName("HiveSupport").master("local[2]").enableHiveSupport().getOrCreate()


//    val jsonDF=spark.read.json("hdfs://10.16.7.36:9000/data/input/attendence.json")
//    println(jsonDF.show)
    spark.sql("use default")
    val hiveDF=spark.sql("select sex, max(high_num) max_high,min(high_num) min_high  from people_info group by sex")
    println(hiveDF.show)

    spark.stop();
  }

}
