package com.niit.spark.sql.test

import org.apache.spark.sql.functions.{avg, col, desc}
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
 * Date:2025/5/12
 * Author：Ys
 * Description:
 */
object CastColumn {


  def main(args: Array[String]): Unit = {

    val spark = SparkSession.builder().appName("CastColumn").master("local[*]").getOrCreate()
    spark.sparkContext.setLogLevel("ERROR")

    val df: DataFrame = spark.read.option("header","true").csv("input/sql/employees3.csv")
    //witchColumn参数：列名，列对象
    df.withColumn("salary",col("salary").cast("double")).show()
    
    spark.stop()
  }

}
