package com.yeming.flink.practice

import java.sql.{Connection, DriverManager, PreparedStatement}

import com.yeming.flink.practice.source.{MyCustomerSource, StationLog}
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
import org.apache.flink.streaming.api.scala._

object CustomJdbcSink {

  //随机生成StationLog,写入本地的mysql数据表（t_station_log）中。
  def main(args: Array[String]): Unit = {

    val streamEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    streamEnv.setParallelism(1)
    val stream: DataStream[StationLog] = streamEnv.addSource(new MyCustomerSource)

    stream.addSink(new MyCustomJdbcSink)

    streamEnv.execute("CustomJdbcSink")

  }

  /**
   * 自定义的Sink类
   */
  class MyCustomJdbcSink extends RichSinkFunction[StationLog] {

    var conn:Connection = _
    var pst:PreparedStatement = _
    /**
     * 把StationLog写入Mysql的数据表中，每写入一条执行一次
     * @param value
     * @param context
     */
    override def invoke(value: StationLog, context: SinkFunction.Context[_]): Unit = {
      pst.setString(1,value.sid)
      pst.setString(2,value.callOut)
      pst.setString(3,value.callIn)
      pst.setString(4,value.callType)
      pst.setLong(5,value.callTime)
      pst.setLong(6,value.duration)
      pst.executeUpdate()
    }

    /**
     * Sink初始化的时候调用一次，一个并行度调用一次
     * 创建连接对象和Statement对象
     * @param parameters
     */
    override def open(parameters: Configuration): Unit = {
      conn = DriverManager.getConnection("jdbc:mysql://localhost/mytest","root","root")
      pst = conn.prepareStatement("insert into t_station_log (sid, call_out,call_in, call_type, call_time, duration) values (?,?,?,?,?,?)")
    }

    override def close(): Unit = {
      pst.close()
      conn.close()
    }
  }
}
