package com.shujia.tf

import org.apache.flink.api.common.functions.FlatMapFunction
import org.apache.flink.streaming.api.scala._
import org.apache.flink.util.Collector

object Demo2FlatMap {
  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    val listDS: DataStream[String] = env.fromCollection(List("java,spark", "java,hadoop,flink", "spark,flink"))

    //scala api
    val scalaApiDS: DataStream[String] = listDS.flatMap(line => line.split(","))
    //scalaApiDS.print()

    val javaApiDS: DataStream[String] = listDS.flatMap(new FlatMapFunction[String, String] {
      /**
        * 将数据一行一行传给flatMap，可以返回多行
        *
        * @param value ： 一行数据
        * @param out   ：将数据发生到下游，可以发生多次
        */
      override def flatMap(value: String, out: Collector[String]): Unit = {
        //循环多个单词
        for (word <- value.split(",")) {
          //将数据发生到下游
          out.collect(word)
        }
      }
    })

    javaApiDS.print()

    env.execute()

  }

}
