package com.codejiwei.sample

import com.alibaba.fastjson.{JSON, JSONObject}
import org.apache.flink.api.common.eventtime.WatermarkStrategy
import org.apache.flink.api.common.serialization.{DeserializationSchema, SimpleStringSchema}
import org.apache.flink.api.scala.createTypeInformation
import org.apache.flink.connector.jdbc.{JdbcConnectionOptions, JdbcExecutionOptions, JdbcSink, JdbcStatementBuilder}
import org.apache.flink.connector.kafka.sink.{KafkaRecordSerializationSchema, KafkaSink}
import org.apache.flink.connector.kafka.source.KafkaSource
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer
import org.apache.flink.streaming.api.functions.sink.SinkFunction
import org.apache.flink.streaming.api.scala.{DataStream, OutputTag, StreamExecutionEnvironment}

import java.sql.PreparedStatement

/**
 * Author: jiwei01
 * Date: 2022/8/26 14:45
 * Package: com.codejiwei.sample
 * Description:
 */
object Kafka2Mysql extends App {
  private val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
  env.setParallelism(1)

  private val jsonStrDS: KafkaSource[String] = KafkaSource.builder[String]()
    .setBootstrapServers("172.28.16.37:9092")
    .setTopics("kafka2mysql_sample_source")
    .setGroupId("zlink-sample")
    .setStartingOffsets(OffsetsInitializer.latest())
    .setValueOnlyDeserializer(new SimpleStringSchema())
    .build()

  private val processDS: DataStream[Book] = env.fromSource(jsonStrDS, WatermarkStrategy.noWatermarks(), "KafkaSource")
    .map(jsonStr => JSON.parseObject(jsonStr, classOf[Book]))

  private val jdbcSink: SinkFunction[Book] = JdbcSink.sink(
  "insert into books (id, title, authors, year) values (?, ?, ?, ?)",
      new JdbcStatementBuilder[Book]() {
        override def accept(statement: PreparedStatement, book: Book): Unit = {
          statement.setLong(1, book.id)
          statement.setString(2, book.title)
          statement.setString(3, book.authors)
          statement.setInt(4, book.year)
        }
      },
  JdbcExecutionOptions.builder()
    .withBatchSize(1000)
    .withBatchIntervalMs(200)
    .withMaxRetries(5)
    .build(),
  new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
    .withUrl("jdbc:mysql://172.29.218.10:3306/flink_test")
    .withDriverName("com.mysql.jdbc.Driver")
    .withUsername("root")
    .withPassword("codejiwei")
    .build())

  //测输出流
  private val tag = new OutputTag[String]("side")
  private val sideDS: DataStream[String] = processDS.getSideOutput(tag)
  private val kafkaSink: KafkaSink[String] = KafkaSink.builder[String]()
    .setBootstrapServers("172.28.16.37:9092")
    .setRecordSerializer(KafkaRecordSerializationSchema.builder()
    .setTopic("kafka2mysql_sample_sink")
    .setValueSerializationSchema(new SimpleStringSchema())
    .build()).build()

  processDS.addSink(jdbcSink).name("Mysql Sink")
  sideDS.sinkTo(kafkaSink).name("Kafka Sink")


  env.execute("Kafka2MySQL")
}
case class Book(id: Long, title: String, authors: String, year: Int) extends Serializable {
  override def toString: String = s"Book id:${id}, title:${title}, authors:${authors}, year:${year}"
}
