package com.lecosa.flink;

import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment}

/**
  *  Flink 读取文件数据
  */
object FileSourceTest2 {
  def main(args: Array[String]): Unit = {
    import org.apache.flink.streaming.api.scala._
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    val lines: DataStream[String] = env.readTextFile("hdfs://192.168.249.113:9000/flink/data/words.txt")
    
//    hdfs://192.168.249.113:9000/home/data/result
    lines.flatMap(_.split(" "))
      .map((_,1))
      .keyBy(0)
      .sum(1)
      .print()

    env.execute()



//    import org.apache.flink.streaming.api.scala._
//    //1.创建环境
//    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
//    //2.读取文件
//    val lines: DataStream[String] = env.readTextFile("./data/words")
//    //3. 转换数据
//    lines.flatMap(line=>{line.split(" ")})
//      .map((_,1))
//      .keyBy(0)
//      .sum(1)
//      //4. sink
//      .print()
//    //5.触发
//    env.execute()



  }

}
