package com.gjy.learning.scala

import com.gjy.learning.scala.SparkReadOracle.oracleDFCreate
import org.apache.spark.sql.functions.col

import scala.math.BigDecimal.javaBigDecimal2bigDecimal

object Closure {

  import org.apache.spark.sql.SparkSession
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("TaskOne")
      .master("local[*]")
      .getOrCreate()

    val emp = oracleDFCreate(spark, "emp")

    var sal = 1000
    val result = emp.filter(col("SAL")>= sal)
    result.show()
    sal = 2000
    result.show()

    sal = 1000
    val result1 = emp.filter(row => {
      val value = row.getDecimal(5).toInt
      value > sal
    })
    result1.show()
    sal = 2000
    result1.show()
  }
}
