package com.csw.flink.core

import org.apache.flink.api.scala._

object Demo02BatchWordCount {
  def main(args: Array[String]): Unit = {

    /**
      *
      * 创建flink batch环境
      */

    val env: ExecutionEnvironment = ExecutionEnvironment.getExecutionEnvironment


    env.setParallelism(1)
    /**
      * 读取数据
      */

    val lineDS: DataSet[String] = env.readTextFile("flink/data/words.txt")

    val words: DataSet[String] = lineDS.flatMap(_.split(","))

    val kvDS: DataSet[(String, Int)] = words.map((_,1))

    //指定分组字段
    val groupByDS: GroupedDataSet[(String, Int)] = kvDS.groupBy(0)

    val countDS: AggregateDataSet[(String, Int)] = groupByDS.sum(1)

    val resultDS: DataSet[(String, Int)] = countDS.map(kv => (kv._1,kv._2))


    resultDS.writeAsText("flink/wordcount/out")

    env.execute()

  }
}
