package com.shujia.sink

import java.sql.PreparedStatement

import org.apache.flink.connector.jdbc.JdbcStatementBuilder
import org.apache.flink.streaming.api.functions.sink.SinkFunction
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}

import org.apache.flink.connector.jdbc.JdbcConnectionOptions
import org.apache.flink.connector.jdbc.JdbcSink

object Demo4JdbcSInk {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val studentDS: DataStream[String] = env.readTextFile("data/students.txt")


    /**
      * jdbc sink
      *
      */
    val jdbcSink: SinkFunction[String] = JdbcSink.sink(
      //插入数据的sql
      "insert into students (id, name, age, gender, clazz) values (?,?,?,?,?)",
      //按照顺序给sql 赋值
      new JdbcStatementBuilder[String] {
        override def accept(stat: PreparedStatement, student: String): Unit = {
          val split: Array[String] = student.split(",")
          val id: Long = split(0).toLong
          val name: String = split(1)
          val age: Long = split(2).toLong
          val gender: String = split(3)
          val clazz: String = split(4)

          //按照位置赋值
          stat.setLong(1, id)
          stat.setString(2, name)
          stat.setLong(3, age)
          stat.setString(4, gender)
          stat.setString(5, clazz)
        }
      },
      //jdbc链接参数
      new JdbcConnectionOptions.JdbcConnectionOptionsBuilder()
        .withUrl("jdbc:mysql://master:3306/bigdata?useUnicode=true&characterEncoding=UTF-8")
        .withDriverName("com.mysql.jdbc.Driver")
        .withUsername("root")
        .withPassword("123456")
        .build()
    )


    studentDS.addSink(jdbcSink)

    env.execute()

  }

}
