package com.bigdata.flink.producer

import java.sql.{Connection, DriverManager, PreparedStatement, ResultSet, SQLException}

import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.source.{RichParallelSourceFunction, SourceFunction}

import scala.collection.mutable

class CustomMysqlSource extends RichParallelSourceFunction[mutable.HashMap[String,String]]{

  var connection : Connection = _

  var ps : PreparedStatement = _

  var resultSet : ResultSet = _

  //open：建立连接
  override def open(parameters: Configuration): Unit = {
    val driver = "com.mysql.jdbc.Driver"
    val url = "jdbc:mysql://192.168.201.100:3306/test?useUnicode=true&characterEncoding=utf-8&useSSL=false"
    val user = "root"
    val password = "H123456h"
    Class.forName(driver)
    connection = DriverManager.getConnection(url,user,password)

    val sql = "select user_id, domain from user_domain_config"
    ps = connection.prepareStatement(sql)
  }

  // 释放资源
  override def close(): Unit = {
    if(ps != null){
      ps.close()
    }

    if(connection != null){
      connection.close()
    }
  }

  /**
   * 此处是代码的关键，要从MySQL表中把数据读取出来转成Map进行数据的封装
   * @param ctx
   */
  override def run(ctx: SourceFunction.SourceContext[mutable.HashMap[String, String]]): Unit = {
    try{
      resultSet = ps.executeQuery()
      while (resultSet.next()){
        var map = new mutable.HashMap[String,String]()
        val userId = resultSet.getString(1)
        val domain = resultSet.getString(2)
        map += ("userId" -> userId)
        map += ("domain" -> domain)
        ctx.collect(map)
      }
    } catch {
      case ex: SQLException => {
        ex.printStackTrace()
      }
    }
  }

  override def cancel(): Unit = {}


}
