package com.zheng.flink.study.dataset.filecache;

import org.apache.commons.io.FileUtils;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.DataSource;
import org.apache.flink.configuration.Configuration;

import java.io.File;
import java.util.Arrays;
import java.util.List;

/**
 * 基于flink的分布式文件缓存
 *
 * @author zhengbo
 * @date 2019/12/18
 */
public class DistributeFileCache {

    public static void main(String[] args) throws Exception {

        ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

        //先注册文件
        String filePath = "file:///d:/etc/dbexport.txt";
        env.registerCachedFile(filePath, "flink-cache-file");

        //通过集合创建一个数据源
        DataSource<String> dataSource = env.fromCollection(Arrays.asList("zhangsan", "lisi", "zhangyi", "zhengb"));

        //对数据源做map操作时 读取文件
        dataSource.map(new RichMapFunction<String, String>() {

            private File cacheFile;

            @Override
            public void open(Configuration parameters) throws Exception {
                //得到文件
                cacheFile = getRuntimeContext().getDistributedCache().getFile("flink-cache-file");

                List<String> list = FileUtils.readLines(cacheFile);

                for (String line : list) {
                    System.out.println(line);
                }
            }

            @Override
            public String map(String value) throws Exception {
                return value;
            }


        }).print();

    }
}
