package cn.doitedu.dw_etl.entropy

import org.apache.spark.sql.SparkSession

object 信息熵计算 {

  def main(args: Array[String]): Unit = {


    val spark = SparkSession
      .builder()
      .appName("赔付率信息熵计算")
      .master("local")
      .getOrCreate()

    val df = spark.read.option("header",true).csv("dw_etl/data/entropy/input/pay.csv")

    df.createTempView("df")

    spark.udf.register("entropy",EntropyUDAF)

    val res = spark.sql(
      """
        |
        |select
        |entropy(sex,cast(sexp as double)) as sex_entro,
        |entropy(age,cast(agep as double)) as age_entro,
        |entropy(job,cast(jobp as double)) as job_entro
        |from df
        |""".stripMargin)


    res.show(100,false)
    spark.close()
  }

}
