package SparkSQL

import org.apache.spark.sql.{Dataset, SparkSession}
import org.junit.Test

class DataSetTest {
  //创建SparkSession
  val spark = SparkSession.builder()
    .master("local[*]")
    .appName("createDataSet")
    .getOrCreate()

  /**
   * 使用样例类来创建DataSet
   */


  @Test
  def createDataSetTest01() = {
    import spark.implicits._
    //ds是具有类型的
    val df: Dataset[Stu] = Seq(Stu(1, "zhangsan"), Stu(2, "lisi")).toDS
    df.show()
  }
  @Test
  def createDataSetTest02() = {
    import spark.implicits._
    //ds是具有类型的
    val df: Dataset[Int] = Seq(1,2,3,4).toDS
    df.show()
  }
}
case class Stu(Id: Int, name: String)