package com.li.spark0615.zhibiao

import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.DoubleType

object Twelve {
  def main(args: Array[String]): Unit = {

    System.setProperty("HADOOP_USER_NAME","root")

    val session = SparkSession
      .builder()
      .appName("li")
      .master("local[*]")
      .enableHiveSupport()
      .config("hive.metastore.uris", "thrift://192.168.23.40:9083")
      .config("dfs.client.use.datanode.hostname", "true")
      .config("hive.exec.dynamic.partition.mode", "nonstrict")
      .getOrCreate()

    val cate: DataFrame = session.table("ods.lx_category")

    val job: DataFrame = session.table("ods.lx_job")

    job
      .as("a")
      .join(
        cate.as("b"),
        col("a.category_id") === col("b.id")
      )
      .select(col("comid") ,col("money") , col("maney_max") )
      .as("a")
      .join(
        session.table("ods.lx_com").as("b"),
        col("b.id") === col("a.comid")
      )
      .select(col("name")  ,col("address")  ,col("phone") ,col("contact") ,col("money").cast(DoubleType).as("money") , col("maney_max").cast(DoubleType).as("maney_max") )
      .where(
        col("money") > 4000
        and
        col("maney_max") < 10000
      )
      .select(col("name")  ,col("address")  ,col("phone") ,col("contact") ,concat(col("money") , lit("_"),col("maney_max") ))
//      .selectExpr("*","concat(money,'_',maney_max)  ")
      .write
      .format("jdbc")
      .option("url", "jdbc:mysql://192.168.23.40:3306/shtd_store?useSSL=false&characterEncoding=utf8")
      .option("user", "root")
      .option("password", "123456")
      .option("dbtable", "col_12")
      .mode(SaveMode.Overwrite)
      .save()



  }
}
