package com.li.spark0615.zhibiao

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.functions.{col, sum}

object Four {
  def main(args: Array[String]): Unit = {

    System.setProperty("HADOOP_USER_NAME","root")

    val session = SparkSession
      .builder()
      .appName("li")
      .master("local[*]")
      .enableHiveSupport()
      .config("hive.metastore.uris", "thrift://192.168.23.40:9083")
      .config("dfs.client.use.datanode.hostname", "true")
      .config("hive.exec.dynamic.partition.mode", "nonstrict")
      .getOrCreate()

    session.table("lx_com").where(col("name")).orderBy()

//    Two.readMysql(session,"lx_pay")
//      .groupBy(col("name"))
//      .agg(sum(col("money")) as "money" )
//      .orderBy(col("money") desc )
//      .where(col("name") =!= "")
//
//      .write
//      .format("jdbc")
//      .option("url", "jdbc:mysql://192.168.23.40:3306/shtd_store?useSSL=false")
//      .option("user", "root")
//      .option("password", "123456")
//      .option("dbtable", "col_03")
//      .save()



  }
}
