package com.gjy.learning.scala

import org.apache.spark
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession

object ImplicitDemo2 {
  val spark = SparkSession.builder()
    .appName("DataType Conversion")
    .master("local[*]")
    .getOrCreate()

  case class Person(name: String, age: Int, city: String)
  import spark.implicits._

  val rdd: RDD[(String, Int, String)] = spark.sparkContext.parallelize(Seq(("Alice", 25, "New York"), ("Bob", 30, "San Francisco")))
  val df = rdd.map { case (name, age, city) => Person(name, age, city) }.toDF()
}
