package com.gitee.broadcast

import java.io.File

import org.apache.flink.api.common.functions.RichMapFunction
import org.apache.flink.api.scala.ExecutionEnvironment
import org.apache.flink.configuration.Configuration

import scala.io.Source

object DistrbutedCacheDemo {
  def main(args: Array[String]): Unit = {
    val env: ExecutionEnvironment = ExecutionEnvironment.getExecutionEnvironment

    //注册分布式缓存
    env.registerCachedFile("D:\\data\\input\\sutdent.txt", "count1",true)
    //准备数据
    import org.apache.flink.api.scala._
    val socreDataSet: DataSet[(Int, String, Int)] = env.fromCollection(List((1, "语文", 50), (2, "数学", 70), (5, "英文", 86)))
    //处理数据
    val result: DataSet[(String, String, Int)] = socreDataSet.map(new RichMapFunction[(Int, String, Int), (String, String, Int)] {
      var studentMap: Map[Int, String] = _
      var file: File =_
      override def open(parameters: Configuration): Unit = {
        file = getRuntimeContext.getDistributedCache.getFile("count1")

        //解析文件
        val lines: Iterator[String] = Source.fromFile(file).getLines()
        studentMap = lines.map(iter => {
          val array: Array[String] = iter.split(",")
          (array(0).toInt, array(1))
        }).toMap
      }

      override def map(in: (Int, String, Int)): (String, String, Int) = {
        val studentId: Int = in._1
        //使用getOrElse提高容错
        val studentNaem: String = studentMap.getOrElse(studentId, "null")
        (studentNaem, in._2, in._3)
      }

      override def close(): Unit = {

      }
    })
    result.print()

  }

}
