package flink_p1

import org.apache.flink.api.java.io.TextInputFormat
import org.apache.flink.core.fs.Path
import org.apache.flink.streaming.api.functions.source.FileProcessingMode
import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment, createTypeInformation}

object FlinkTest_03_readhdfs {


  def main(args: Array[String]): Unit = {

    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment

    //
    //val stream: DataStream[String] = env.readTextFile("hdfs://node1:9000/flink/data.txt")
    // ==> readFile(format, filePath, FileProcessingMode.PROCESS_ONCE, -(1), typeInfo);
    /**
     * FileProcessingMode:
     * PROCESS_ONCE          :读取1次
     * PROCESS_CONTINUOUSLY  :持续监控
     */

    val filePath = "hdfs://node1:9000/flink/data.txt"

    val format = new TextInputFormat(new Path(filePath))
    //指定PROCESS_CONTINUOUSLY 就会持续观察目标文件，只要有改变，每次都会全量读取
    val stream: DataStream[String] = env.readFile(format, filePath, FileProcessingMode.PROCESS_CONTINUOUSLY, 1000)


    stream.print()


    env.execute("read hdfs app")
  }
}
