import org.apache.spark.sql.SparkSession

object Csvreaderv {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("CSV Reader")
      .master("local[*]")
      .getOrCreate()

    val filePath = "C:\\Users\\PC-608\\Desktop\\house.csv"

    try {
      val df = spark.read
        .option("header", "true")
        .option("inferSchema", "true")
        .csv(filePath)

      println("Schema:")
      df.printSchema()

      println("\nFirst 5 rows:")
      df.show(100)

      println(s"\nTotal rows: ${df.count()}")
    } catch {
      case e: Exception => println(s"Error reading file: ${e.getMessage}")
    } finally {
      spark.stop()
    }
  }
}