package com.sxzjrj.utils

import org.apache.spark.rdd.RDD
import scalikejdbc.{DB, SQL}
import scalikejdbc.config.DBs

/**
  * Created by ljj in 2019/3/23
  *
  */
object JdbcUtils {

  DBs.setup()

  def saveData2Mysql(result: RDD[((String, String), (Int, Double, Double, Double))]) = {

    result.foreachPartition(iter => {
      DB.localTx {
        implicit session =>
          iter.foreach(tp => {
            SQL(
              """
                |insert into t_mzlybc value(?,?,?,?,?,?)
              """.stripMargin)
              .bind(tp._1._1, tp._1._2, tp._2._1, tp._2._2, tp._2._3, tp._2._4)
              .update()
              .apply()
          })

      }
    })

  }

  def saveDStreamData2Mysql(result: RDD[((String, String), (Int, Double, Double, Double))]) = {

    result.foreachPartition(iter => {
      DB.localTx {
        implicit session =>
          iter.foreach(tp => {
            SQL(
              """
                |insert into t_mblybc_streaming_test
                |value(?,?,?,?,?,?) on duplicate key
                |update F_JZCS = F_JZCS + ?, F_SJ_JE = F_SJ_JE + ?, F_HS_JE = F_HS_JE + ?, F_MB_YYF = F_MB_YYF + ?
              """.stripMargin)
              .bind(tp._1._1, tp._1._2, tp._2._1, tp._2._2, tp._2._3, tp._2._4,
                tp._2._1, tp._2._2, tp._2._3, tp._2._4)
              .update()
              .apply()
          })
      }
    })

  }

}
