package spark.example

import org.apache.spark.sql.{SparkSession,Row,DataFrame}
import org.apache.spark.sql.types._
import org.apache.spark.SparkContext

object rddAndDataSetInteroperating {
  def main (args: Array[String]) {
    val spark = SparkSession.builder().appName("rddAndDataSetInteroperating example").master("yarn").getOrCreate()
    val input = args(0)

    val dfProgramming = rddToDF(spark,input) 
    println("Programmatically Specifying the Schema")
    dfProgramming.show(false)

    dfProgramming.createOrReplaceTempView("peopleProgramming")
    val resultProgramming = spark.sql("SELECT * FROM peopleProgramming")
    println("resultProgramming: ")
    import spark.implicits._
    resultProgramming.map(attributes => "Name: " + attributes(0) + "\t" + "Age: " + attributes(1).toString.toInt).show(false)

    println("Inferring the Schema Using Reflection")
    val dfReflection = rddToDFCase(spark,input)
    dfReflection.show()

    dfReflection.createOrReplaceTempView("peopleReplace")
    val resultReflection = spark.sql("SELECT * from peopleReplace")
    println("resultReflection: ")
    resultReflection.map(attributes => "Name: " + attributes(0) + "\t" + "Age: " + attributes(1).toString.toInt).show(false)

    val resultsProgramming1 = rddToDF1(spark,input)
    resultsProgramming1.map(attributes => "Name: " + attributes(0) + "\t" + "Age: " + attributes(1).toString.toInt).show(false)
  }
   
  def rddToDF(spark: SparkSession,filePath: String) : DataFrame = {
    val schema = StructType(Seq(
      StructField("name",StringType,true),
      StructField("age",IntegerType,true)
    ))

    val rowRDD = spark.sparkContext.textFile(filePath,2)
      .map( x => x.split(","))
      .map( x => Row(x(0),x(1).trim.toInt))
    
    spark.createDataFrame(rowRDD,schema)
  }

  def rddToDF1(spark: SparkSession,filePath: String) : DataFrame = {
    val peopleRDD = spark.sparkContext.textFile(filePath)

    val schemaString = "name age"
    val fields = schemaString.split(" ").map(fieldName => StructField(fieldName,StringType,nullable = true))

    val schema = StructType(fields)
    val rowRDD = peopleRDD.map(_.split(" ")).map(attributes => Row(attributes(0),attributes(1).trim))

    val peopleDF = spark.createDataFrame(rowRDD,schema)
    peopleDF.createOrReplaceTempView("peopleProgramming1")
    spark.sql("SELECT * FROM peopleProgramming1")
  }
  case class Persion(name: String,age: Int)
  def rddToDFCase(spark: SparkSession,filePath: String) : DataFrame = {
    import spark.implicits._
    val peopleDF = spark.sparkContext
      .textFile(filePath,2)
      .map( x => x.split(","))
      .map( x => Persion(x(0),x(1).trim().toInt)).toDF

    peopleDF
  }
}
