package com.imooc.spark

import com.imooc.spark.DataFrameRDDApp.SetLogger
import org.apache.spark.sql.SparkSession

/**
  * Created by zghgchao 2017/12/23 16:17
  * DataSet的使用
  */
object DataSetApp {
  def main(args: Array[String]): Unit = {
    SetLogger()
    val spark = SparkSession.builder()
      .master("local[2]")
      .appName("DataFrameRDDApp").getOrCreate()

    val path = "src/data/sales.csv"

    // 解析CSV文件
    val salesDF = spark.read.option("header", "true").option("inferSchema", "true").csv(path)
    salesDF.show()

    // 注意：需要导入隐式转换
    import spark.implicits._

    val salesDS = salesDF.as[Sales]
    salesDS.show()
    salesDS.map(line => line.itemId).show()
    salesDF.select("itemId").show()
    salesDS.select("transactionId").show()

    spark.stop()
  }

  case class Sales(transactionId: Int, customerId: Int, itemId: Int, amountPaid: Double)

}
