package cn.bigdata.structstreaming.job

import cn.bigdata.structstreaming.BusInfo
import org.apache.spark.sql.ForeachWriter
import com.mchange.v2.c3p0.ComboPooledDataSource
import java.sql.Connection


object MysqlWriter {

  var pools: ComboPooledDataSource = null

  def initMysqlPool(): Unit = {
    pools = new ComboPooledDataSource()
    try pools.setDriverClass("com.mysql.jdbc.Driver")
    catch {
      case e: Exception =>
        e.printStackTrace()
    }
    pools.setJdbcUrl(String.format("jdbc:mysql://centos1:3306/bus_info"))
    pools.setUser("root")
    pools.setPassword("12345678")
    pools.setMinPoolSize(5)
    pools.setMaxPoolSize(20)
    pools.setAcquireIncrement(5)
  }

  // 获取mysql连接
  def getMysqlConnection: Connection = {
    if (null == pools) {
      initMysqlPool()
    }
    pools.getConnection
  }
}

class MysqlWriter extends ForeachWriter[BusInfo] with org.apache.spark.internal.Logging {

  var mysqlConn: Connection = null


  override def open(partitionId: Long, epochId: Long): Boolean = {
    mysqlConn = MysqlWriter.getMysqlConnection
    log.info("open function get mysql connection ")
    true
  }

  override def process(value: BusInfo): Unit = {
    val lglat: String = value.lglat
    val deployNum: String = value.deployNum
    val preparedStatement = mysqlConn.prepareStatement("insert into lglat_info(lglat,deploy_num,create_time) values(?, ?, ?) ")
    preparedStatement.setString(1, lglat)
    preparedStatement.setString(2, deployNum)
    preparedStatement.setLong(3, System.currentTimeMillis())
    val execute = preparedStatement.execute
    log.info(s"process write to mysql ${execute.booleanValue()}")
  }

  override def close(errorOrNull: Throwable): Unit = {
    mysqlConn.close()
  }
}
