package com.sunzm.flink.datastream.scala.source

import java.sql.{Connection, DriverManager, PreparedStatement, ResultSet, SQLException}
import java.util.Properties

import org.apache.commons.lang3.RandomUtils
import org.apache.commons.lang3.time.DateFormatUtils
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.source.{RichSourceFunction, SourceFunction}
import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment, _}
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.slf4j.{Logger, LoggerFactory}

/**
 * FLink的各种source示例代码
 */
object ScalaFlinkSourceDemo {
  private val logger: Logger = LoggerFactory.getLogger(this.getClass.getName.stripSuffix("$"))
  private val isLocal = true

  def main(args: Array[String]): Unit = {

    //1.创建执行的环境
    val env: StreamExecutionEnvironment = if (isLocal) {
      StreamExecutionEnvironment.createLocalEnvironmentWithWebUI()
    } else {
      StreamExecutionEnvironment.getExecutionEnvironment
    }

    logger.info("开始...")

    //从集合创建
    //fromCollection(env)

    //从元素
    //fromElements(env)

    //socketTextStream
    //socketTextStream(env)

    //readTextFile
    //readTextFile(env)

    //cusSourceSimple
    //cusSourceSimple(env)

    //cusSourceMySQL
    //cusSourceMySQL(env)

    //kafka数据源
    val properties: Properties = new Properties
    properties.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "82.156.210.70:9093")
    properties.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "flink-test")
    properties.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest")
    val dataStream: DataStream[String] = env.addSource(new FlinkKafkaConsumer[String]("my-topic", new SimpleStringSchema, properties))

    dataStream.print


    //5.执行
    env.execute(this.getClass.getSimpleName.stripSuffix("$"))

  }

  def fromCollection(env: StreamExecutionEnvironment) = {
    val dataStream: DataStream[String] = env.fromCollection(Seq(
      "hello,java,hello,hadoop",
      "hello, spark,hello, flink")
    )

    dataStream.print()
  }

  def fromElements(env: StreamExecutionEnvironment) = {
    val dataStream: DataStream[Int] = env.fromElements(1, 3, 5, 7, 9)

    dataStream.print()
  }

  def socketTextStream(env: StreamExecutionEnvironment) = {

    val dataStream: DataStream[String] = env.socketTextStream("82.156.210.70", 9999)

    dataStream.print()
  }

  def readTextFile(env: StreamExecutionEnvironment) = {
    val dataStream: DataStream[String] = env.readTextFile("data/spark/rdd/word.txt", "UTF-8")

    dataStream.print()
  }

  def cusSourceSimple(env: StreamExecutionEnvironment) = {
    val dataStream: DataStream[String] = env.addSource(new MyDataSourceSimple())

    dataStream.print()
  }

  def cusSourceMySQL(env: StreamExecutionEnvironment) = {
    val dataStream: DataStream[String] = env.addSource(new MyDataSourceMySQL())

    dataStream.print()
  }

  private class MyDataSourceSimple extends SourceFunction[String] {
    private var isRunning = true

    override def run(ctx: SourceFunction.SourceContext[String]): Unit = {
      while (isRunning) {
        //获取当前时间
        val currentTimeMillis = System.currentTimeMillis
        val formatDateStr = DateFormatUtils.format(currentTimeMillis, "yyyy-MM-dd HH:mm:ss")
        //将数据输出
        ctx.collect(formatDateStr)
        //防止数据产生过快，休眠一会
        val nextInt = RandomUtils.nextInt(100, 20000)
        Thread.sleep(nextInt)
      }
    }

    override def cancel(): Unit = {
      isRunning = false
    }
  }

  private class MyDataSourceMySQL extends RichSourceFunction[String] {
    private var isRunning: Boolean = true
    private var connection: Connection = null
    private var pstmt: PreparedStatement = null
    private var rs: ResultSet = null
    private val sql: String = "SELECT * FROM t_user"


    override def open(parameters: Configuration): Unit = {
      connection = DriverManager.getConnection("jdbc:mysql://localhost:3306/mydb?useSSL=false", "root", "root")

      pstmt = connection.prepareStatement(sql)
    }

    override def run(ctx: SourceFunction.SourceContext[String]): Unit = {

      while (isRunning) {
        rs = pstmt.executeQuery
        while (rs.next) {
          val userId = rs.getString("userId")
          val name = rs.getString("name")
          val age = rs.getString("age")
          //输出结果
          ctx.collect(userId + "," + name + "," + age)
        }
        //防止数据频繁访问数据源，休息10秒
        Thread.sleep(10000)
      }

    }

    override def cancel(): Unit = {
      closeConn
    }

    override def close(): Unit = {
      closeConn
    }

    private def closeConn(): Unit = {
      isRunning = false
      if (rs != null) try rs.close()
      catch {
        case throwables: SQLException =>
          throwables.printStackTrace()
      }
      if (pstmt != null) try pstmt.close()
      catch {
        case throwables: SQLException =>
          throwables.printStackTrace()
      }
      if (connection != null) try connection.close()
      catch {
        case throwables: SQLException =>
          throwables.printStackTrace()
      }
    }
  }

}
