package com.darrenchan.spark.sql

import org.apache.spark.sql.SparkSession

/**
  * sales.csv:
  * transactionId,customerId,itemId,amountPaid
  * 111,1,1,100.0
  * 112,2,1,200.0
  * 113,3,1,300.0
  */
object DataSetApp {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().master("local[2]").appName("DataSetApp").getOrCreate()

    //注意：需要导入隐式转换
    import spark.implicits._

    //spark如何解析csv文件(有头标识->transactionId,customerId,itemId,amountPaid)
    val df = spark.read.option("header", "true").option("inferSchema", "true").csv("sales.csv")
    df.show()

    val ds = df.as[Sales]
    ds.map(line => line.customerId).show()

    spark.stop()
  }

  case class Sales(transactionId: Int, customerId: Int, itemId: Int, amountPaid: Double)
}
