package spark.sql

import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SparkSession
case class Token(name: String, productId: Int, score: Double)
object DatasetTEst {


  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf().setAppName("LDATest").setMaster("local[8]")
    val sc = new SparkContext(sparkConf)
    val spark = SparkSession.builder.getOrCreate()
//    val spark: SparkSession = ...
    import spark.implicits._
    val ds = Seq("I am a shiny Dataset!").toDS
    ds.show()
    val df = Seq("I am an old grumpy DataFrame!").toDF
    df.show()
    val df2 = Seq("I am an old grumpy DataFrame!").toDF("text")
    df2.show()
    val ds2 = sc.parallelize(Seq("hello")).toDS
    ds2.show()

    import spark.implicits._
//    case class Token(name: String, productId: Int, score: Double)
    val data = Seq(
      Token("aaa", 100, 0.12),
      Token("aaa", 200, 0.29),
      Token("bbb", 200, 0.53),
      Token("bbb", 300, 0.42))

    val ds3 = data.toDS
    ds3.show()
    val df3 = data.toDF

    // Transform DataFrame into a Dataset
    val ds4 = df3.as[Token]
    println(ds4.printSchema)
    df3.map(_.getClass.getName).show(false)
    ds4.map(_.getClass.getName).show(false)
  }

}
