package com.njbdqn.util

import java.util.Properties

import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object MySQLConnection {

  val param_map= ReadPropertiesFileTool.readProperty("mysql")
  //读取数据库中指定的表
  def readMySQL(spark: SparkSession,tableName:String) = {

    val map: Map[String, String] = Map[String, String](
      elems = "url" -> param_map.get("url").get,
      "driver" -> param_map.get("driver").get,
      "user" -> param_map.get("user").get,
      "password" -> param_map.get("password").get,
      "dbtable" -> tableName
    )
    spark.read.format("jdbc").options(map).load()
  }

  //将dataframe写入数据库到指定的表
  def writeTable(spark:SparkSession, df:DataFrame, tableName:String)={
    val prop = new Properties()
    prop.put("user",param_map.get("user").get)
    prop.put("password",param_map.get("password").get)
    df.write.mode(SaveMode.Overwrite).jdbc(param_map.get("url").get,tableName,prop)
  }
}
