package com.niit.spark.sql.test

import org.apache.spark.sql.functions.{col, desc}
import org.apache.spark.sql.{DataFrame, SparkSession}

/**
 * Date:2025/5/16
 * Author：Ys
 * Description:
 */
object AddColumn {

  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder().appName("AddColumn").master("local[*]").getOrCreate()
    spark.sparkContext.setLogLevel("ERROR")
    val df: DataFrame = spark.read.option("header", "true").csv("input/sql/employees.csv")


    // 操做列 对工资列进行相乘 1.1 来作为新列的值

    df.withColumn("new_salary",col("salary")*1.1).show()

    spark.stop()

  }

}
