package com.li.spark0615.zhibiao

import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
import org.apache.spark.sql.functions._

object Nine {
  def main(args: Array[String]): Unit = {

    System.setProperty("HADOOP_USER_NAME","root")

    val session = SparkSession
      .builder()
      .appName("li")
      .master("local[*]")
      .enableHiveSupport()
      .config("hive.metastore.uris", "thrift://192.168.23.40:9083")
      .config("dfs.client.use.datanode.hostname", "true")
      .config("hive.exec.dynamic.partition.mode", "nonstrict")
      .getOrCreate()

    readMysql(session,"col_06").createTempView("temp")

    val comIndex = session.table("ods.lx_comindex").limit(10000)
//    val com = session.table("ods.lx_com").limit(100000)
    val area = session.table("ods.lx_area").limit(10000)
    val comClick = session.table("ods.lx_com_click").limit(10000)

    val value = comIndex.as("comIndex")
      .join(
        area
          .as("area")
        ,
        col("comIndex.city") === col("area.city")
        ,"left"
      ).select(col("comIndex.id"), col("comIndex.name"), col("area.state")).as("comIndex").distinct()

    value.show(30)

    session
      .table("temp")
      .as("temp")
      .join(
        value.as("b")
        ,
        col("temp.state") === col("b.state")
      ).select(
      col("temp.state"),
      col("b.id"),
      col("b.name")
    )
      .as("a")
      .join(
        comClick.as("b")
        ,
        col("a.id") === col("b.id")
      )
      .orderBy(col("clicked") desc)
      .select("a.name","a.state","b.clicked")

      .write
      .format("jdbc")
      .option("url", "jdbc:mysql://192.168.23.40:3306/shtd_store?useSSL=false&characterEncoding=utf8")
      .option("user", "root")
      .option("password", "123456")
      .option("dbtable", "col_09")
      .mode(SaveMode.Overwrite)
      .save()



  }

  def readMysql(session:SparkSession,table:String):DataFrame={
    val reader: DataFrame = session
      .read
      .format("jdbc")
      .option("url", "jdbc:mysql://192.168.23.40:3306/shtd_store?useSSL=false")
      .option("user", "root")
      .option("password", "123456")
      .option("dbtable", table)
      .load()
    reader
  }

  def read2Mysql(session:SparkSession,table:String):DataFrame={
    val reader: DataFrame = session
      .read
      .format("jdbc")
      .option("url", "jdbc:mysql://192.168.23.34:3306/laxaio?useSSL=false")
      .option("user", "root")
      .option("password", "123456")
      .option("dbtable", table)
      .load()
    reader
  }

}
