package com.zhaosc.spark.sql.df

import org.apache.spark.sql.SparkSession
import org.apache.spark.SparkConf

object DataFrameOpsFromFile {
	def main(args: Array[String]): Unit = {
	  
	val conf = new SparkConf().setMaster("local")
      .setAppName("SecondSort");
			val spark = SparkSession
					.builder()
					.appName("DataFrameOpsFromFile")
					.config("spark.master", "local")
					.getOrCreate();

			val peopleDf = spark.read.json("people.json");
			peopleDf.select(peopleDf.col("name")).show()
			peopleDf.createOrReplaceTempView("people")

			spark.udf.register("myAverage", MyAverage)
			val result = spark.sql("SELECT myAverage(age) as people FROM people")
			result.show()

	}


}