package com.mjf.file

import org.apache.flink.api.common.RuntimeExecutionMode
import org.apache.flink.core.fs.FileSystem
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.api.windowing.assigners.TumblingProcessingTimeWindows
import org.apache.flink.streaming.api.windowing.time.Time

object WriteAsText {
  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)
    env.setRuntimeMode(RuntimeExecutionMode.BATCH)

    val inputPath: String = WriteAsText.getClass.getResource("/input.txt").getPath
    val outputPath: String = "flink-share-connector/connector-file/output.txt"

    // 从文本文件读取数据
    val source: DataStream[String] = env.readTextFile(inputPath)

    val result: DataStream[(String, Int)] = source
      .flatMap(_.split("\\W+"))
      .map(word => (word.toLowerCase, 1))
      .keyBy(_._1)
      .window(TumblingProcessingTimeWindows.of(Time.seconds(10)))
      .sum(1)

    result.print()

    // 写出文件
    result.writeAsText(outputPath, FileSystem.WriteMode.OVERWRITE)

    env.execute(WriteAsText.getClass.getName)

  }
}
