package com.dmall.scf.demo

import java.util.Properties

import com.dmall.scf.dto.ScoreModelDimension
import com.dmall.scf.utils.{MySQLUtils, SparkUtils}
import org.apache.spark.sql.{Dataset, SaveMode, SparkSession}

/**
 * @descrption
 * scf
 * @author wangxuexing
 * @date 2019/12/22
 */
object SparkReadHive {
  val CLASS_NAME = this.getClass.getSimpleName().filter(!_.equals('$'))
  def execute(args: Array[String]): Unit = {
    val spark= SparkSession
      .builder()
      .master("local[*]")
      .appName("local-1576939514234")
      .config("spark.sql.warehouse.dir", "H:\\data\\spark-ware")//不指定，默认C:\data\projects\parquet2dbs\spark-warehouse
      .enableHiveSupport()
      .getOrCreate();//激活hive支持

    import spark.implicits._

    val df = spark.read.json("H:\\data\\people.json")
    df.createOrReplaceTempView("people")
    val peopleDf = spark.sql("select * from people")
    peopleDf.show()

    val peopleId = peopleDf.select("id").foreach(x=> println(x.get(0)))

    val connectionProperties = new Properties()
    connectionProperties.put("user", "db_pop_test")
    connectionProperties.put("password", "8tHfVWCLe7Giq8")
    val url = "jdbc:mysql://10.248.224.3:11202/dmall_scf?useUnicode=true&characterEncoding=UTF-8&autoReconnect=true&failOverReadOnly=false"
    val jdbcDF2 = spark.read
      .jdbc(url, "dmall_scf.people", connectionProperties)
    println("======================before============================")
    jdbcDF2.show()

    peopleDf.write.mode(SaveMode.Append)//.option("createTableColumnTypes", "supplier_code varchar(64), supplier_name varchar(128), grab_date date, field_code varchar(128), field_name varchar(64), field_value varchar(512)")
      .jdbc(url, "dmall_scf.people", connectionProperties)

    val jdbcDF3 = spark.read
      .jdbc(url, "dmall_scf.people", connectionProperties)
    println("======================after============================")
    jdbcDF3.show()

    val sql = s"""    SELECT
                 |      sm.company_id,
                 |      sm.frequency,
                 |      sm.frequency_cron,
                 |      sm.`name` AS model_name,
                 |      sm.id AS score_model_id,
                 |      smd.id dimension_id,
                 |      smd.score_rule,
                 |      smd.statis_time_type,
                 |      smd.statis_days,
                 |      smd.statis_begin_day,
                 |      smd.statis_end_day
                 |    FROM
                 |      scfc_score_model sm
                 |        JOIN scfc_score_model_dimension smd ON smd.score_model_id = sm.id
                 |    WHERE
                 |      sm.`status` = 1
                 |      AND length(trim(smd.score_rule)) != 0
                 |      AND sm.yn = 1
                 |      AND smd.yn = 1""".stripMargin
    val df5 = spark.read.format("jdbc").option("url", url)
      .option("user", "db_pop_test")
      .option("password", "8tHfVWCLe7Giq8")
      .option("driver", "com.mysql.jdbc.Driver")
      .option("query", sql)
      .load()
    println("======================df5============================")
    df5.show()

    val df6 = MySQLUtils.getDFFromMysql(spark, sql)
    println("======================df6============================")
    val beanList = SparkUtils.dataFrame2Bean[ScoreModelDimension](df6, classOf[ScoreModelDimension])
    beanList.foreach(x=> println(x.frequencyCron))
    df6.show()
  }
}
