package com.lvmama.monkey.common.utils

import com.lvmama.monkey.common.utils.JDBCUtils.JDBCTemplate
import org.apache.spark.sql.{DataFrame, SQLContext, SaveMode}

/**
  * Created by hejing on 2017/10/12.
  */
object Conversion {
  val JDBCDefault = JDBCTemplate.JDBCDefaultSet
  val connP = JDBCTemplate.getConProperties
  implicit class ImplicitInsert(df: DataFrame) extends Serializable{
    def insertDF2Mysql(tableName: String, database: String, saveMode: SaveMode): Unit = {
      df.write.mode(saveMode)
        .option("driver", JDBCDefault.get("driver").get)
        .jdbc(JDBCDefault.get("conn").get, tableName, connP)
    }
  }

  implicit class ImplicitLoad(sqlCtx: SQLContext) extends Serializable{
    def LoadFromMysql(tableName: String, database: String): DataFrame = {
      sqlCtx.read.jdbc(JDBCDefault.get("conn").get, tableName, connP)
    }
    def LoadFromMysql(tableName: String, database: String, predicates: Array[String]): DataFrame = {
      sqlCtx.read.jdbc(JDBCDefault.get("conn").get, tableName, predicates, connP)
    }
  }

}
