package com.shujia.flink.transformaction

import org.apache.flink.api.common.functions.FlatMapFunction
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.util.Collector

object Demo2FlatMapOperator {
  def main(args: Array[String]): Unit = {


    //创建flink  程序的入口  相当于sparkContext
    val env = StreamExecutionEnvironment.getExecutionEnvironment

    val ds = env.fromCollection(List("java,spark,python", "a,v,sw,s"))

    val wordsDS = ds.flatMap(new FlatMapFunction[String, String] {
      //每一行数据会被调用一次
      override def flatMap(line: String, collector: Collector[String]): Unit = {
        line
          .split(",")
          .foreach(word => {
            //将数据发生到下游. 和自定义source里面的collect类似
            collector.collect(word)
          })
      }
    })

    wordsDS.print()

    env.execute("flatMap")


  }
}
