package com.shujia.flink.source

import org.apache.flink.streaming.api.scala._

object Demo1ListSource {
  def main(args: Array[String]): Unit = {
    /**
      * 创建环境
      *
      */

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    /**
      * 基于本地集合构建DS --- 有界流
      *
      */
    val listDS: DataStream[String] = env.fromCollection(List("java,spark", "java,java", "spark,hadoop"))


    listDS
      .flatMap(_.split(","))
      .map((_, 1))
      .keyBy(_._1)
      .sum(1)
      .print()


    env.execute()

  }

}
