package SQL

import java.util.Properties

import SQL.MyAverage
import org.apache.spark.sql.{DataFrame, SparkSession}

object UDAFDemo {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession
      .builder().master("local[2]")
      .appName("Spark SQL basic example")
      .getOrCreate()
    spark.sqlContext.sparkContext.setLogLevel("WARN")
    import spark.implicits._

    spark.udf.register("myAverage", new MyAverage())

    val connectionProperties = new Properties() ;
    connectionProperties.put("user", "hive")
    connectionProperties.put("password", "hive")
    val jdbcDF: DataFrame = spark.read.jdbc("jdbc:mysql://spark:3306/test", "employee", connectionProperties)
    //jdbcDF.show()

    jdbcDF.createOrReplaceTempView("employee")
    val result = spark.sql("SELECT myAverage(age) as average_age FROM employee")
    result.show()

    spark.close()

  }

}