package com.bigdata.spark.sql

import org.apache.spark.sql.{DataFrame,Dataset,SparkSession}
import org.apache.spark.sql.functions._
/**
 * @author Gerry chan
 * @version 1.0
 * 2021/03/16 20:28:00
 * DataFrame列操作_withColumn()
 */
object DataFrameTest {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("DataFrameTest")
      .master("local[*]")
      .getOrCreate()

    spark.sparkContext.setLogLevel("WARN")
    import spark.implicits._

    val ds: Dataset[String] = spark.read.textFile("datas/testData.csv")
    val df:DataFrame = ds.map(x => x.split(","))
      .map(x => (x(0), x(1)))
      .toDF("name", "age")
      .cache()

    df.show()

    //withColumn添加列， sql.functions.lit()函数,返回的也是列对象，可以传入任意参数值
    df.withColumn("sno", lit("22"))
      .withColumnRenamed("name", "newName")
      .show()

    /**
     * 列对象：' 、$ 、col 、column
     */
    df.withColumn("sno1", 'age+1).show()
    df.withColumn("sno2", $"age").show()
    df.withColumn("sno3", col("age") +2).show()

    df.withColumn("sno4", column("age")+3).show()

  }
}
