package cn.tedu.stream.sink

import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}

import java.sql.{Connection, DriverManager, PreparedStatement}

object StreamMySqlSinkDemo {

  def main(args: Array[String]): Unit = {

    //环境
    val env = StreamExecutionEnvironment.getExecutionEnvironment

    import org.apache.flink.api.scala._
    val source: DataStream[Student] = env.fromElements(
      Student(8, "xiaoming", "beijing biejing", "female")
    )

    //将数据保存到mysql数据库
    source.addSink(new MySqlSink)
    env.execute()
  }

}

class MySqlSink extends RichSinkFunction[Student] {

  var connection: Connection = null
  var ps: PreparedStatement = null

  //初始化资源
  override def open(parameters: Configuration): Unit = {
    val driver = "com.mysql.jdbc.Driver"
    val url = "jdbc:mysql://hadoop01:3306/jtlog"
    val username = "root"
    val password = "root"

    //加载驱动
    Class.forName(driver)

    //创建数据库的连接
    connection = DriverManager.getConnection(url, username, password)
    val sql = "insert into student(sid,name,address,gender) values(?,?,?,?)"
    ps = connection.prepareStatement(sql)
  }

  //关闭资源
  override def close(): Unit = {
    if (ps != null) ps.close()
    if (connection != null) connection.close()
  }

  //每来一条数据，调用一次方法，处理数据一次
  override def invoke(stu: Student, context: SinkFunction.Context[_]): Unit = {
    try {
      ps.setInt(1, stu.id)
      ps.setString(2, stu.name)
      ps.setString(3, stu.address)
      ps.setString(4, stu.gender)
      ps.executeUpdate()
    } catch {
      case ex: Exception => println(ex.getMessage)
    }


  }
}