package sparksqls.datatransfer

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

import java.nio.file.Paths

object WithTextFile {
  // prepare SparkSession resource service object
  val spark: SparkSession = SparkSession.builder
    .master("local")
    .appName("RDDtoDF")
    .getOrCreate
  val base_path: String = "/Users/collinsliu/spark-3.5.0/"
  val abs_path: String = Paths.get("").toAbsolutePath.toString

  // prepare case class for DataFrame Schema
  case class Person(name: String, age: Long)
  // introduce implicit implementation of transformation: encoder

  import spark.implicits._

  def main(args: Array[String]): Unit = {
    val df1 = structuredToDF("/examples/src/main/resources/people.txt")
    showDF(df1, "people1")
    val df2 = customizedToDF("/examples/src/main/resources/people.txt")
    showDF(df2, "people2")
  }

  /**
   * if input data is structured in explicit form
   * we could create one case class to store schema
   *
   * @param path
   * @return
   */
  def structuredToDF(path: String): DataFrame = {
    val rdd: RDD[Person] = spark.sparkContext.textFile("file:///" + base_path + path)
      .map(_.split(","))
      .map(f => Person(f(0), f(1).trim.toInt))
    rdd.toDF()
  }

  /**
   * if input data should be transferred into more complex form
   * we need to provide schema for its loading process
   *
   * @param path
   * @return
   */
  def customizedToDF(path: String): DataFrame = {
    // schema should be like:
    // name:String, age:int
    val schema: StructType = StructType(List(StructField("name", StringType),
      StructField("age", IntegerType)))
    val rdd = spark.sparkContext.textFile("file:///" + base_path + path)
      .map(_.split(","))
      .map(f => Row(f(0).toString.trim, f(1).toString.trim.toInt))
    spark.createDataFrame(rdd, schema)
  }

  def showDF(df: DataFrame, table_name: String): Unit = {
    df.createOrReplaceTempView(table_name)
    val new_df = spark.sql("SELECT * from " + table_name + " WHERE age BETWEEN 13 AND 19")
    new_df.map(f => "Name: " + f(0)).show()
  }


}
