package com.li.spark0615.zhibiao

import org.apache.hadoop.hive.ql.exec.UDF
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.spark.sql.functions._

object Seven {
  def main(args: Array[String]): Unit = {


    System.setProperty("HADOOP_USER_NAME","root")

    val session = SparkSession
      .builder()
      .appName("li")
      .master("local[*]")
      .enableHiveSupport()
      .config("hive.metastore.uris", "thrift://192.168.23.40:9083")
      .config("dfs.client.use.datanode.hostname", "true")
      .config("hive.exec.dynamic.partition.mode", "nonstrict")
      .getOrCreate()

    val userDefinedFunction = udf(
      (str: String) => {
        val strings = str.split("\\.")

        strings( strings.length - 1 )
      }
    )

    session
      .table("ods.lx_brand")
      .withColumn(
        "url",userDefinedFunction.apply(col("url"))
      )
      .groupBy(col("url") as "url")
      .agg(count(col("*")) as "Num")
      .orderBy(col("Num") desc)
      .where(col("url") =!= "" )


      .write
      .format("jdbc")
      .option("url", "jdbc:mysql://192.168.23.40:3306/shtd_store?useSSL=false&characterEncoding=utf8")
      .option("user", "root")
      .option("password", "123456")
      .option("dbtable", "col_07")
      .mode(SaveMode.Overwrite)
      .save()

  }
}
