package com.niit.spark.sql.test

import org.apache.spark.sql.SparkSession

/**
 * Date:2025/5/12
 * Author：Ys
 * Description:
 */
object CSVToDataFrame {


  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder().appName("CSV to DataFrame").master("local[*]").getOrCreate()
    spark.sparkContext.setLogLevel("ERROR")
    import spark.implicits._
    // 如果DataFrame要读取的是一个csv文件，该csv文件有表头的情况需要设置option("header", "true")
    val df = spark.read.option("header", "true").csv("input/sql/sales.csv")
    df.show()

    spark.stop()
  }
}
