package com.njbdqn.util

import java.util.Properties

import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}

object MysqlConnection {
  val param_map = ReadPropertiesFileTool.readProperty("mysql")
  // 读取数据库中制定的表
  def readTable(spark:SparkSession,tableName:String) = {
    val map:Map[String,String] = Map[String,String](
      elems = "url"-> param_map("url"),
      "driver"->param_map("driver"),
      "user"->param_map("user"),
      "password"->param_map("password"),
      "dbtable"->tableName
    )
    spark.read.format("jdbc").options(map).load()
  }

  // 将dataFrame写入指定的表中
  def writeTable(spark:SparkSession,df:DataFrame,tableName:String)={
    val prop = new Properties()
    prop.put("user",param_map("user"))
    prop.put("password",param_map("password"))

    df.write.mode(SaveMode.Overwrite).jdbc(param_map("url"),tableName,prop)
  }
}
