package com.shujia.flink.sink

import java.sql.{Connection, DriverManager, PreparedStatement}

import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}

object Demo2SinkFunction {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    env.setParallelism(1)

    val studentDS: DataStream[String] = env.readTextFile("data/students.txt")

    //使用自定义的sink
    studentDS.addSink(new MysqlSink)

    env.execute()
  }

}

/**
  * 自定义sink
  * SinkFunction : 普通sink
  * RichSinkFunction: 多了open和close 方法
  *
  */

class MysqlSink extends RichSinkFunction[String] {


  /**
    * open 在invoke之前执行，每一个task中只执行一次
    *
    */
  var con: Connection = _

  override def open(parameters: Configuration): Unit = {
    println("open")

    //1、加载驱动
    Class.forName("com.mysql.jdbc.Driver")
    //创建链接
    con = DriverManager.getConnection("jdbc:mysql://master:3306/bigdata?useUnicode=true&characterEncoding=utf-8", "root", "123456")

  }


  /**
    * close 在invoke之后执行，每一个task中只执行
    *
    */
  override def close(): Unit = {
    println("close")
    con.close()
  }

  /**
    * invoke： 每一条数据执行一次
    *
    * @param stu     ： 一条数据
    * @param context ： 上下文对
    */
  override def invoke(stu: String, context: SinkFunction.Context[_]): Unit = {

    val split: Array[String] = stu.split(",")

    val stat: PreparedStatement = con.prepareStatement("insert into student (id,name,age,gender,clazz) values(?,?,?,?,?)")

    stat.setString(1, split(0))
    stat.setString(2, split(1))
    stat.setInt(3, split(2).toInt)
    stat.setString(4, split(3))
    stat.setString(5, split(4))

    //执行插入
    stat.execute()

  }
}
