package com.li.flink.datastream

import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.functions.source.{ParallelSourceFunction, RichParallelSourceFunction, SourceFunction}
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment

object SourceOp {
  def main(args: Array[String]): Unit = {
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    // You can attach a source to your program by using StreamExecutionEnvironment.addSource(sourceFunction).
    val sf = new SourceFunction[String] {
      override def run(sourceContext: SourceFunction.SourceContext[String]): Unit = ???

      override def cancel(): Unit = ???
    }
    //并行的ParallelSourceFunction
    val sfParallel = new ParallelSourceFunction[String] {
      override def run(sourceContext: SourceFunction.SourceContext[String]): Unit = ???

      override def cancel(): Unit = ???
    }

    //增强的并行RichParallelSourceFunction
    val sfRichParallel = new RichParallelSourceFunction[String] {
      override def run(sourceContext: SourceFunction.SourceContext[String]): Unit = ???

      override def cancel(): Unit = ???
    }
    //    env.addSource(sf).setParallelism(2) //不可设置并行度
    env.addSource(sfParallel).setParallelism(2) //可设置并行度
    env.addSource(sfRichParallel)

    //从文件读取
    //    env.readTextFile("xxx/xx/x")
    //    val fileInput = FileInputFormat[String]
    //    env.readFile(fileInput, "xxx/xx/x")

    // 基于socket
    env.socketTextStream("bigdata04", 9001)

    //从集合读取
    val iter = new Iterator[String] {
      override def hasNext: Boolean = ???

      override def next(): String = ???
    }
    env.fromCollection(iter)
  }
}
