package tech.spiro.spark

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.expr

object DataFrameExample extends Serializable {

  def someUDF(str: String): String = str + "-a"

  def main(args: Array[String]): Unit = {
    val pathToDataFolder = args(0)

    // create SparkSession
    val spark = SparkSession.builder()
      .appName("Spark Example")
      .config("spark.sql.warehouse.dir", "/user/hive/warehouse")
      .getOrCreate()

    spark.udf.register("myUDF", someUDF(_:String):String)

    val df = spark.read.json(pathToDataFolder + "data.json")
    df.groupBy(expr("myUDF(DEST_COUNTRY_NAME)")).sum("count").collect().foreach(x => println(x))
  }
}
