package sparksql

import org.apache.spark.sql.SparkSession

object DataSetApp {

  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder().appName("DataSetApp").master("local[2]").getOrCreate();
    val df = spark.read.option("header",true).option("inferSchema",true).csv("F:\\lzc\\SparkSQL\\sparksql\\src\\main\\scala\\sparksql\\sales.csv");

    import spark.implicits._
    val ds = df.as[Sales];
    df.show();
    ds.map(line => line.itemId+line.transactionId).show();
    spark.stop();
  }

  case class Sales(transactionId:Int,customedId:Int,itemId:Int,amountPaid:Double);

}
