package com.shujia.core

import org.apache.flink.streaming.api.scala._
import org.apache.flink.api.common.RuntimeExecutionMode

object Demo2barch {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment


    /**
      * 设置处理模式
      * 1、STREAMING: 流处理模式，（默认的模式），每一条数据都出处理输出一个结果
      * 2、BATCH:批处理模式，输出一个统一的结果，数据源只能是有界流
      * 3、AUTOMATIC :自动，更具数据源大的类型自动选择
      */

    env.setRuntimeMode(RuntimeExecutionMode.BATCH)

    val studentsDS: DataStream[String] = env.readTextFile("data/students.txt")

    val kvDS: DataStream[(String, Int)] = studentsDS.map(line => (line.split(",")(4), 1))

    val clazzNumDS: DataStream[(String, Int)] = kvDS
      .keyBy(_._1)
      .sum(1)

    clazzNumDS.print()

    env.execute()

  }

}
