package com.ydl.learning.flink.demo.pipeline.impl

import com.ydl.learning.flink.demo.pipeline.java.entity.JobConfig
import com.ydl.learning.flink.demo.pipeline.{BaseProcedureImpl, Source}
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}

/**
 *
 *
 * @author ydl
 * @since 2022/1/7
 */
class SourceImpl extends Source[String] with BaseProcedureImpl {
  private var jobConfig: JobConfig = _
  private var host: String = _
  private var port: String = _

  /**
   *
   */
  override def init(): DataStream[String] = {
    streamEnv = StreamExecutionEnvironment.getExecutionEnvironment
    streamEnv.setMaxParallelism(this.jobConfig.getMaxParallelism)
    streamEnv.setParallelism(this.jobConfig.getDefaultParallelism)
    streamEnv.socketTextStream(this.host, this.port.toInt, '\n')
  }

  /**
   *
   */
  override def setConfig(config: JobConfig): Unit = {
    this.jobConfig = config
    this.host = config.get("host")
    this.port = config.get("port")
  }

  /**
   *
   */
  override def checkConfig(): Unit = {
    if (this.host == null || this.port == null) {
      throw new Exception("SocketStreamInitImpl config error!")
    }
  }

}
