package com.shujia.flink.source

import org.apache.flink.api.common.RuntimeExecutionMode
import org.apache.flink.streaming.api.scala._

object Demo2FileSource {
  def main(args: Array[String]): Unit = {
    //创建flink环境
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    //批处理模式
    env.setRuntimeMode(RuntimeExecutionMode.BATCH)

    /**
     * 基于集合构建source   --- 有界流
     *
     */
    val studentDS: DataStream[String] = env.readTextFile("data/students.txt")

    val clazzDS: DataStream[(String, Int)] = studentDS.map(stu => (stu.split(",")(4), 1))

    val clazzNumDS: DataStream[(String, Int)] = clazzDS.keyBy(_._1).sum(1)

    clazzNumDS.print()

    env.execute()


  }

}
