package com.shujia.sink

import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
import org.apache.flink.streaming.api.scala._

import java.sql.{Connection, DriverManager, PreparedStatement}

object Demo04WCSinkMySQL {
  def main(args: Array[String]): Unit = {
    /**
     * 创建Flink入口
     */

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    // 默认并行度等于CPU的逻辑核数 相当于是任务的一个并行度
    env.setParallelism(2)

    /**
     * 通过Socket模拟实时数据
     * nc -lk 8888
     *
     * DataStream: Flink中的编程模型 类似Spark中的DStream
     * 通过Socket创建一个无界流
     */
    val linesDS: DataStream[String] = env.socketTextStream("master", 8888)

    //    linesDS.print()

    // 对每一条数据进行切分
    val wordsDS: DataStream[String] = linesDS.flatMap(line => line.split(","))

    // 将每个单词变成 K V格式
    val wordsKVDS: DataStream[(String, Int)] = wordsDS.map(word => (word, 1))

    // 按照每个单词进行分组
    val keyByDS: KeyedStream[(String, Int), String] = wordsKVDS.keyBy(kv => kv._1)

    // 统计每个单词的数量 指定位置进行累加
    val wordCntDS: DataStream[(String, Int)] = keyByDS.sum(1)

    wordCntDS.addSink(new MyMySQLSink)

    env.execute()
  }

}

class MyMySQLSink extends RichSinkFunction[(String, Int)] {
  var conn: Connection = _

  override def open(parameters: Configuration): Unit = {
    // 建立连接
    Class.forName("com.mysql.jdbc.Driver")
    conn = DriverManager.getConnection("jdbc:mysql://master:3306/student", "root", "123456")
  }

  override def close(): Unit = {
    conn.close()
  }

  override def invoke(value: (String, Int), context: SinkFunction.Context[_]): Unit = {

    val preSt: PreparedStatement = conn.prepareStatement("replace into word_cnt values(?,?)")


    preSt.setString(1, value._1)
    preSt.setInt(2, value._2)


    preSt.execute()
  }
}
