package com.shujia.flink.sink

import org.apache.flink.api.common.RuntimeExecutionMode
import org.apache.flink.streaming.api.scala._

object Demo1FIleSInk {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    //指定执行模式
    env.setRuntimeMode(RuntimeExecutionMode.BATCH)

    //指定并行度
    env.setParallelism(1)

    val studentDS: DataStream[String] = env.readTextFile("data/students.txt")

    val kvDS: DataStream[(String, Int)] = studentDS.map(stu => {
      val clazz: String = stu.split(",")(4)
      (clazz, 1)
    })

    //统计班级的人数
    val clazzNum: DataStream[(String, Int)] = kvDS
      .keyBy(_._1)
      .sum(1)

    //整理数据
    val resultDS: DataStream[String] = clazzNum.map(kv => {
      val clazz: String = kv._1
      val num: Int = kv._2
      s"$clazz,$num"
    })

    //保存结果到文件
    resultDS.writeAsText("data/clazz_num")

    env.execute()

  }
}
