package com.hrt.choperator

import java.sql.PreparedStatement

import org.apache.flink.connector.jdbc.{JdbcConnectionOptions, JdbcExecutionOptions, JdbcSink, JdbcStatementBuilder}
import org.apache.flink.streaming.api.functions.sink.SinkFunction
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment

/**
 * flink操作clickhouse
 */
case class PersonInfo(id:Int,name:String,age:Int)
object FlinkCH2 {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    import org.apache.flink.streaming.api.scala._
    val ds1: DataStream[String] = env.socketTextStream("node5",9999)

    val personDS: DataStream[PersonInfo] = ds1.map(line => {
      val arr: Array[String] = line.split(",")
      PersonInfo(arr(0).toInt, arr(1), arr(2).toInt)
    })


    val sink: SinkFunction[PersonInfo] = JdbcSink.sink(
      //sql 语句
      "insert into t_java values (?,?,?)",
      //配置从DataStream中如何获取数据，来填补sql语句中的“？”
      new JdbcStatementBuilder[PersonInfo] {
        override def accept(t: PreparedStatement, u: PersonInfo): Unit = {
          t.setInt(1, u.id)
          t.setString(2, u.name)
          t.setInt(3, u.age)
        }
      },
      //执行sql语句配置
      new JdbcExecutionOptions.Builder().withBatchSize(2).build(),

      //设置连接
      new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
        .withUrl("jdbc:clickhouse://node1:8123/newdb")
        .withDriverName("ru.yandex.clickhouse.ClickHouseDriver")
        .withUsername("default")
        .withPassword("").build()
    )

    personDS.addSink(sink)

    env.execute()

  }

}
