package com.baishancloud.log.common.connector.file

import com.baishancloud.log.common.util.ParamUtil.paramName
import org.apache.flink.api.java.utils.ParameterTool
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.source.{RichSourceFunction, SourceFunction}

import java.io.{BufferedReader, FileReader, Serializable}
import scala.beans.BeanProperty

/**
 * 本地文件连接器，可以模拟kafka发送数据
 *
 * @author ziqiang.wang 
 * @date 2021/10/21 18:20
 */
class FileSourceConnector extends Serializable {

  /**
   * 必选<br>
   * 本地文件路径，使用绝对路径
   */
  @BeanProperty var filePath: String = ""
  /**
   * 发送数据的时间间隔，默认为1秒一条，单位：毫秒
   */
  @BeanProperty var emitIntervalMs: Long = 1
  /**
   * 数据分隔符号，默认为空字符串，不分割<br>
   * 如果要模拟k-v类型的kafka数据，则可以设置分隔符号来分隔文件中的每一行数据
   */
  @BeanProperty var dataSeparator: String = ""

  /**
   * 返回连接器参数对象
   */
  def build(): FileSourceConnector = {
    this
  }


  /**
   * 模拟发送只读取value值的kafka数据
   */
  def sourceOnlyValue(): RichSourceFunction[String] = {
    new RichSourceFunction[String] {

      var reader: BufferedReader = _
      var flag: Boolean = true

      override def open(parameters: Configuration): Unit = {
        reader = new BufferedReader(new FileReader(filePath))
      }

      var line: String = _

      override def run(ctx: SourceFunction.SourceContext[String]): Unit = {
        while (flag) {
          line = reader.readLine()
          if (line != null) {
            ctx.collect(line)
          } else {
            flag = false
          }
          Thread.sleep(emitIntervalMs)
        }
      }

      override def close(): Unit = {
        flag = false
        reader.close()
      }

      override def cancel(): Unit = {
        flag = false
        reader.close()
      }
    }
  }

  /**
   * 模拟发送包含key和value的kafka数据
   */
  def sourceKeyValue(): RichSourceFunction[(String, String)] = {
    new RichSourceFunction[(String, String)] {

      var reader: BufferedReader = _
      var flag: Boolean = true

      override def open(parameters: Configuration): Unit = {
        reader = new BufferedReader(new FileReader(filePath))
      }

      override def run(ctx: SourceFunction.SourceContext[(String, String)]): Unit = {
        var line: String = null
        while (flag) {
          line = reader.readLine()
          if (line == null) {
            val split: Array[String] = line.split(dataSeparator)
            ctx.collect(split(0), split(1))
          } else {
            flag = false
          }
          Thread.sleep(emitIntervalMs)
        }
      }

      override def close(): Unit = {
        flag = false
        reader.close()
      }

      override def cancel(): Unit = {
        flag = false
        reader.close()
      }
    }
  }

}


object FileSourceConnector extends Serializable {


  /**
   * 构建方法，返回文件source连接器参数对象，后面可以通过这个对象来获取需要用到的属性<br>
   * 主参数输入示例：--source.filePat<br>
   * 具体参数，可参考包对象
   *
   * @param parameterTool 参数工具类，包含了主方法接收到的参数
   * @param number        要获取那个数字对应的数据源，通过该数字来区分启动参数。比如 soruce1 ，则会使用 source1. 开头的参数来获取source。使用此方式来获取不同的source。
   */
  def builder(parameterTool: ParameterTool, number: String = ""): FileSourceConnector = {
    val connector: FileSourceConnector = new FileSourceConnector

    if (parameterTool.has(paramName(sourceFilePathP, number))) {
      connector.setFilePath(parameterTool.get(paramName(sourceFilePathP, number)))
    } else {
      throw new RuntimeException(s"未设置 --${paramName(sourceFilePathP, number)} 参数，可能是在source后面加上了数字，但是初始化时没指定数字？")
    }

    if (parameterTool.has(paramName(sourceEmitIntervalMsP, number))) {
      connector.setEmitIntervalMs(parameterTool.getLong(paramName(sourceEmitIntervalMsP, number)))
    }

    if (parameterTool.has(paramName(sourceDataSeparatorP, number))) {
      connector.setDataSeparator(parameterTool.get(paramName(sourceDataSeparatorP, number)))
    }

    connector
  }


}