package org.yuanzheng.sink

import java.sql.{Connection, DriverManager, PreparedStatement}

import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.yuanzheng.source.{CustomSource, StationLog}

/**
 * @author yuanzheng
 * @date 2020/6/16-21:55
 */

class CustomSink extends RichSinkFunction[StationLog] {
  var conn: Connection = _
  var pst: PreparedStatement = _

  override def invoke(in: StationLog): Unit = {
    pst.setString(1, in.sid)
    pst.setString(2, in.callOut)
    pst.setString(3, in.callIn)
    pst.setString(4, in.callType)
    pst.setLong(5, in.callTime)
    pst.setLong(6, in.duration)
    pst.executeUpdate()
  }

  override def open(parameters: Configuration): Unit = {
    conn = DriverManager.getConnection("jdbc:mysql://localhost/yuanzheng?characterEncoding=UTF-8&useUnicode=true&useSSL=false", "root", "root")
    pst = conn.prepareStatement("INSERT INTO stationlog(sid, callOut, callIn, callType, callTime, duration) VALUES (?, ?, ?, ?, ?, ?)")
  }

  override def close(): Unit = {
    pst.close()
    conn.close()
  }
}

object UserDefinedJdbcSink {
  def main(args: Array[String]): Unit = {
    val streamEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    streamEnv.setParallelism(1)
    import org.apache.flink.streaming.api.scala._
    val stream: DataStream[StationLog] = streamEnv.addSource(new CustomSource)
    stream.addSink(new CustomSink)
    streamEnv.execute()
  }

}
