package com.xzc.wc

import com.xzc.apitest.source.SensorReading
import org.apache.flink.api.scala._
import org.apache.flink.streaming.api.functions.source.SourceFunction

import scala.util.Random

//batch process
object WordCount {
  def main(args: Array[String]): Unit = {
    //create a batch execution environment
    val env = ExecutionEnvironment.getExecutionEnvironment

    //read data from file
    val inputPath = "D:\\git\\learning_flink\\_01_试用\\src\\main\\resources\\hello.txt"
    val inputDataSet = env.readTextFile(inputPath)

    //statistics,participle, group by key:word,aggregation statistics
    val resultDataSet: DataSet[(String, Int)] = inputDataSet
      .flatMap(_.split(" "))
      .map((_, 1))
      .groupBy(0) //key
      .sum(1) //value

    //print
    resultDataSet.print()
  }
}
