package com.li.spark0615.zhibiao

import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
import org.apache.spark.sql.functions._


object One {
  def main(args: Array[String]): Unit = {

    System.setProperty("HADOOP_USER_NAME","root")

    val session = SparkSession
      .builder()
      .appName("li")
      .master("local[*]")
      .enableHiveSupport()
      .config("hive.metastore.uris", "thrift://192.168.23.40:9083")
      .config("dfs.client.use.datanode.hostname", "true")
      .config("hive.exec.dynamic.partition.mode", "nonstrict")
      .getOrCreate()

    val frame = readMysql(session, "lx_com").createTempView("lx_com")

    session.sql(
      """
        |select contact , count(*) as Num from ( select
        |if( contact="" or contact is null , substring("王二狗",1,1) , substring(contact,1,1) ) as contact
        |from
        |lx_com
        |)
        |group by contact
        |""".stripMargin).orderBy(col("Num") desc ).show(100)

  }

  def readMysql(session:SparkSession,table:String):DataFrame={

    val reader: DataFrame = session
      .read
      .format("jdbc")
      .option("url", "jdbc:mysql://192.168.23.34:3306/laxaio?useSSL=false")
      .option("user", "root")
      .option("password", "123456")
      .option("dbtable", "( select * from  " + table + " limit 600000 ) temp")
      .load()

    reader
  }

}
