package com.dmall.scf.demo

import com.dmall.scf.dto.ScoreModelDimension
import com.dmall.scf.utils.{MySQLUtils, SparkUtils}
import org.apache.spark.sql.{Dataset, SparkSession}
import scala.collection.JavaConversions._

/**
 * @ClassName SparkReadMysql
 * @Description TODO
 * @Author wangxuexing
 * @Date 2020/1/5 21:39
 * @Version 1.0
 */
object SparkReadMysql {
  def main(args: Array[String]): Unit = {
    val spark= SparkSession
      .builder()
      .master("local[*]")
      .appName("local-1576939514234")
      .config("spark.sql.warehouse.dir", "H:\\data\\spark-ware")//不指定，默认C:\data\projects\parquet2dbs\spark-warehouse
      .enableHiveSupport()
      .getOrCreate();//激活hive支持
    import spark.implicits._

    val sql = s"""    SELECT
                 |      sm.company_id,
                 |      sm.`name` AS model_name,
                 |      sm.id AS score_model_id,
                 |      sm.frequency,
                 |      sm.frequency_cron,
                 |      smd.id dimension_id,
                 |      smd.score_rule,
                 |      smd.statis_time_type,
                 |      smd.statis_days,
                 |      smd.statis_begin_day,
                 |      smd.statis_end_day
                 |    FROM
                 |      scfc_score_model sm
                 |        JOIN scfc_score_model_dimension smd ON smd.score_model_id = sm.id
                 |    WHERE
                 |      sm.`status` = 1
                 |      AND length(trim(smd.score_rule)) != 0
                 |      AND sm.yn = 1
                 |      AND smd.yn = 1""".stripMargin

    val df6 = MySQLUtils.getDFFromMysql(spark, sql)
    println("======================df6============================")
    val beanList = SparkUtils.dataFrame2Bean[ScoreModelDimension](df6, classOf[ScoreModelDimension])
    beanList.foreach(x=> println(x.modelName))
  }
}
