package com.itcast.spark

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.api.java.UDF1
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.StringType
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.{SparkConf, SparkContext}

/**
 * DESC:使用sparksql实现简单的wordcount
 */
case class Small(line: String)

object SparkSqlUDF {
  def main(args: Array[String]): Unit = {
    val conf: SparkConf = new SparkConf().setAppName("SparkSqlWordCount").setMaster("local[*]")
    val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()
    val sc: SparkContext = spark.sparkContext
    sc.setLogLevel("WARN")
    import spark.implicits._

    //1-读取数据
    val data: RDD[String] = sc.textFile("./datasets/input/udf.txt")
    //2-查看Scheme信息
    val dataDF: DataFrame = data.map(x => Small(x)).toDF()
    dataDF.printSchema()
    //3-查看数据的3行数据
    //data.show(3,false)
    //这里的udf并不是一个单词进来一个单词出来，一行数据进来一行数据出去
    //udaf 多行数据进来一行数据出去
    //udtf 一行数据进来多行数据出去
    spark.udf.register("SmallToBigger", new UDF1[String, String] {
      override def call(t1: String): String = {
        t1.toUpperCase
      }
    }, StringType)
    dataDF.createOrReplaceTempView("table")
    spark.sql("select line,SmallToBigger(line) from table").show()
    /* +----------+-----------------------+
     |      line|UDF:SmallToBigger(line)|
       +----------+-----------------------+
     |helloworld|             HELLOWORLD|
     |       abc|                    ABC|
     |     study|                  STUDY|
     | smallWORD|              SMALLWORD|
     +----------+-----------------------+*/
    spark.udf.register("SmallToBigger1", (line: String) => line.toUpperCase)
    spark.sql("select line,SmallToBigger1(line) from table").show()
    //如何使用DSL的方式进行UDF的调用
    dataDF.select($"line", callUDF("SmallToBigger1", $"line")).show()
    //使用UDF函数实现value的相乘
    val df = Seq(("id1", 1), ("id2", 4), ("id3", 5)).toDF("id", "value")
    spark.udf.register("udfMmul", (value: Int) => value * value)
    df.select($"id", callUDF("udfMmul", $"value")).show()
    df.select('id, callUDF("udfMmul", 'value)).show()
    df.createOrReplaceTempView("dftable")
    spark.sql("select id,udfMmul(value) from dftable").show()
    //udf函数的使用---必须掌握
    val df1 = Seq(("id1", 1, 2), ("id2", 4, 5), ("id3", 5, 6)).toDF("id", "value", "tuple")
    spark.udf.register("udfMmul11", (value1: Int, value2: Int) => value1 * value2)
    df1.select($"id", callUDF("udfMmul11", $"value", $"tuple")).show()

    spark.stop()

  }
}
