package com.baishancloud.log.common.source

import com.baishancloud.log.common.connector.file.FileSourceConnector
import com.baishancloud.log.common.connector.kafka.{KafkaSourceConnector, sourceNameP}
import com.baishancloud.log.common.env.StreamEnv
import com.baishancloud.log.common.util.ParamUtil.paramName
import org.apache.flink.api.common.eventtime.WatermarkStrategy
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.api.scala._
import org.apache.flink.connector.kafka.source.KafkaSource
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode
import org.apache.flink.streaming.api.functions.source.{RichSourceFunction, SourceFunction}
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}
import org.apache.flink.streaming.util.serialization.JSONKeyValueDeserializationSchema

/**
 * 获取source的统一出口
 *
 * @author ziqiang.wang 
 * @date 2021/10/20 16:10
 */
object SourceUtil extends Serializable {


  /**
   * 获取kafka source对应的数据流<br>
   * 只读取value值，反序列化为字符串
   *
   * @param parameterTool 主类输入参数
   * @param number        要获取那个数字对应的数据源，通过该数字来区分启动参数。比如 soruce1 ，则会使用 source1. 开头的参数来获取source。使用此方式来获取不同的source。
   */
  def kafkaOnlyValue(parameterTool: ParameterTool, number: String = ""): DataStream[String] = {
    val source: KafkaSource[String] = KafkaSourceConnector.builder(parameterTool, number).build().sourceOnlyValue()
    StreamEnv.getStreamExecutionEnvironment.fromSource(source, WatermarkStrategy.noWatermarks(), parameterTool.get(paramName(sourceNameP, number), ""))
  }


  /**
   * 返回更多kafka消息信息<br>
   * 可获取数据，具体参考 [[JSONKeyValueDeserializationSchema]]。
   *
   * @param parameterTool   主类输入参数
   * @param includeMetaData 是否包含元数据，默认为false
   * @param number          要获取那个数字对应的数据源，通过该数字来区分启动参数。比如 soruce1 ，则会使用 source1. 开头的参数来获取source。使用此方式来获取不同的source。
   */
  def kafkaJsonObject(parameterTool: ParameterTool, includeMetaData: Boolean = false, number: String = ""): DataStream[ObjectNode] = {
    val source: KafkaSource[ObjectNode] = KafkaSourceConnector.builder(parameterTool, number).build().sourceObjectNode(includeMetaData)
    StreamEnv.getStreamExecutionEnvironment.fromSource(source, WatermarkStrategy.noWatermarks(), parameterTool.get(paramName(sourceNameP, number), ""))
  }

  /**
   * 获取本地文件source对应的数据流<br>
   * 只读取value值，反序列化为字符串
   *
   * @param parameterTool 主类输入参数
   */
  def fileOnlyValue(parameterTool: ParameterTool, number: String = ""): DataStream[String] = {
    val source: RichSourceFunction[String] = FileSourceConnector.builder(parameterTool, number).build().sourceOnlyValue()
    StreamEnv.getStreamExecutionEnvironment.addSource(source)
  }

  /**
   * 获取本地文件source对应的数据流<br>
   * 读取key和value值
   *
   * @param parameterTool 主类输入参数
   */
  def fileKeyValue(parameterTool: ParameterTool, number: String = ""): DataStream[(String, String)] = {
    val source: RichSourceFunction[(String, String)] = FileSourceConnector.builder(parameterTool, number).build().sourceKeyValue()
    StreamEnv.getStreamExecutionEnvironment.addSource(source)
  }


  /**
   * 获取持续发射递增数字的数据源，并行度为1，方便本地调试
   *
   * @param delay 数据发射延迟时间，默认为100毫秒，单位：毫秒
   */
  def numberEmit(delay: Long = 100): DataStream[Long] = {
    val env: StreamExecutionEnvironment = StreamEnv.getStreamExecutionEnvironment
    env.addSource(new SourceFunction[Long] {
      var flag: Boolean = true
      var number: Long = 1

      override def run(ctx: SourceFunction.SourceContext[Long]): Unit = {
        while (flag) {
          ctx.collect(number)
          number += 1
          Thread.sleep(delay)
        }
      }

      override def cancel(): Unit = {
        flag = false
      }
    })
      .setParallelism(1)
  }

  /**
   * 获取socket中的数据，并行度为1，方便本地调试<br>
   * 默认换行符为\n
   *
   * @param port     端口号
   * @param hostname 主机名，默认为：localhost
   * @return
   */
  def socket(port: Int, hostname: String = "localhost"): DataStream[String] = {
    val env: StreamExecutionEnvironment = StreamEnv.getStreamExecutionEnvironment
    env.socketTextStream(hostname, port).setParallelism(1)
  }


}
