package com.shujia.sink

import java.sql.{Connection, DriverManager, PreparedStatement}

import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.sink.{RichSinkFunction, SinkFunction}
import org.apache.flink.streaming.api.scala._

object Demo1Sink {

  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    env.setParallelism(1)

    val student: DataStream[String] = env.readTextFile("data/students.txt")

    student.addSink(new MySinkFunction)

    env.execute()

  }

}

/**
  * RichSinkFunction: 多了open 和close 方法
  * SinkFunction：
  *
  */
class MySinkFunction extends RichSinkFunction[String]{

  var conn: Connection = _
  var ps: PreparedStatement = _

  /**
    * open函数在invoke函数之前执行，每一个分区只执行一次
    * @param parameters
    */
  override def open(parameters: Configuration): Unit = {
    //1、加载驱动
    Class.forName("com.mysql.jdbc.Driver")

    //2、建立连接
     conn = DriverManager.getConnection("jdbc:mysql://master:3306/tour?useUnicode=true&characterEncoding=utf8","root","123456")

  }

  override def close(): Unit = {
    ps.close()
    conn.close()
  }

  /**
    * invoke 将数据发送出去的函数，每条数据都会执行一次
 *
    * @param value ds的一条数据
    * @param context  上下文对象
    */
  override def invoke(value: String, context: SinkFunction.Context[_]): Unit = {

    ps = conn.prepareStatement("insert into student(id,name,age,gender,clazz) values(?,?,?,?,?)")

    val splits: Array[String] = value.split(",")

    ps.setString(1,splits(0))
    ps.setString(2,splits(1))
    ps.setInt(3,splits(2).toInt)
    ps.setString(4,splits(3))
    ps.setString(5,splits(4))

    ps.execute()

  }
}
