package com.atguigu0.sql

import org.apache.spark.sql.{DataFrame, SparkSession}

/**
 * @description: xxx
 * @time: 2020/6/15 15:17
 * @author: baojinlong
 **/
object CustomerAvgTest {
  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder().master("local[*]").appName("xx").getOrCreate()
    // 导入隐式转换,注意spark.implicits._中spark必须要和上面spark变量名称一致
    // 读取文件
    val dataFrame: DataFrame = spark.read.json("E:/qj_codes/big-data/Spark1015/SparkCoreDaemon/src/data/person.json")
    val dataFrame1: DataFrame = spark.read.text("E:/qj_codes/big-data/Spark1015/SparkCoreDaemon/src/data/person.txt")
    val dataFrame2: DataFrame = spark.read.format("json").load("E:/qj_codes/big-data/Spark1015/SparkCoreDaemon/src/data/person.json")
    println("dataFrame1=" + dataFrame1 + dataFrame2)
    // sql风格
    dataFrame.createTempView("people")
    // 注册函数
    spark.udf.register("myAvg", CustomerAvg)
    // 使用
    spark.sql("select myAvg(age) from people").show
    // 关闭连接
    spark.stop()
  }
}
