package com.etc.datefrme

import com.etc.StrCount
import org.apache.spark.sql.types.{StringType, StructField, StructType}
import org.apache.spark.sql.{Dataset, Row, SparkSession}

object UDF {

  def udfTest(): Unit = {
    val spark = SparkSession.builder().appName("UDF").master("local").getOrCreate()
    import spark.implicits._
    val strings = Array("hello", "word", "hello", "jiege")
    val value: Dataset[String] = spark.createDataset(strings)
    val frame = value.toDF("name")
    frame.createOrReplaceTempView("user")
    spark.udf.register("len", (str: String) => str.length)
    spark.sql("select len(name) name from user").show()
    spark.stop()
  }

  def udfDemo(): Unit = {
    val spark = SparkSession.builder().appName("UDF").master("local").getOrCreate()
    val strings = Array("hello", "word", "hello", "jiege")
    val value = spark.sparkContext.parallelize(strings)
    val unit = value.map(name => (Row(name)))
    val structType = StructType(Array(StructField("name", StringType, true)))
    val frame = spark.createDataFrame(unit, structType)
    frame.createOrReplaceTempView("user")
    spark.udf.register("len", (str: String) => str.length)
    spark.sql("select len(name) name from user").show()
    spark.stop()
  }

  def Udaf(): Unit = {
    val spark = SparkSession.builder().appName("Udaf").master("local").getOrCreate()
    import spark.implicits._
    val strings = Array("hello", "word", "hello", "jiege")
    val value: Dataset[String] = spark.createDataset(strings)
    val frame = value.toDF("name")
    frame.createOrReplaceTempView("users")
    spark.udf.register("strcount", new StrCount)
    spark.sql("select name,strcount(name) as strcount from users group by name").show()
    spark.stop()
  }

  def top3(): Unit = {
    val spark = SparkSession.builder().appName("top3").master("local[*]").getOrCreate()

    val frame = spark.read.textFile("hdfs://master:9000/input/uu.txt")
    import spark.implicits._
    val map = Map (
      ("city", List("北京", "天津", "南京")),
      ("platform", List("Android")),
      ("version", List("1.0", "1.2", "1.5", "2.0"))
    )
    val unit = spark.sparkContext.broadcast(map)

    frame.filter(x => {
      val strings = x.split(",")
      if (strings.length <= 0 || !(unit.value("city").contains(strings(3)))) {
        false
      }
      if (strings.length <= 0 || !(unit.value("platform").contains(strings(4)))) {
        false
      }
      if (strings.length <= 0 || !(unit.value("version").contains(strings(5)))) {
        false
      }
      true
    }).map(a =>

      (a.split(",")(0) + "_" + a.split(",")(1), a.split(",")(2))

    ).groupByKey(_._1).count().foreach(a => print(a._1 + "\t" + a._2))


  }

  def main(args: Array[String]): Unit = {
    //      udfTest()
    //      udfDemo()
//    Udaf()
    top3()
  }
}
