package com.wudl.flink.stream.source

import org.apache.flink.api.java.{DataSet, ExecutionEnvironment}
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment


object SourceCollertion {
  def main(args: Array[String]): Unit = {
//    readTextFile
    readArrCollection

  }

  def readArrCollection(): Unit = {
//    val env = StreamExecutionEnvironment.getExecutionEnvironment
//    env.setParallelism(1)
////    val arrStream: DataSet[Integer] = env.fromCollection(Array(1, 2, 4, 5, 6))
//    val arrStream: DataSet[String] = env.fromCollection(Array("spark", "flink"))
//    arrStream.print()
//
//    env.execute()

    val env = ExecutionEnvironment.getExecutionEnvironment
    //0.用element创建DataSet(fromElements)
    val ds0: DataSet[String] = env.fromElements("spark", "flink")
    ds0.print()


  }


  /**
   * 读取文件
   */
  def readTextFile(): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    // 读取文件
    val txtStream = env.readTextFile("F:\\ideaWorkSpace2020\\demo\\Flink-wudl\\src\\main\\resources\\readme.txt")
    txtStream.print("arr")
    env.execute()
  }

}
