package com.shujia;

import java.io.File;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

//由于每一个block块都会产生一个map任务，但是这里仅仅是模拟的，并不是实际hadoop的做法
public class Map {
    public static void main(String[] args) {


        //多线程，线程池
        ExecutorService pool = Executors.newFixedThreadPool(8);
        //将data/blocks目录进行封装
        File file = new File("data/blocks");
        //获取该目录下的所有文件
        File[] files = file.listFiles();
        int index = 0;

        long start = System.currentTimeMillis();
//        System.out.println(start);
        if (files != null) {
            for (File file1 : files) {
                pool.submit(new MyMap(file1, index));
                index++;
            }
        }

        long end = System.currentTimeMillis();
//        System.out.println(end);
        System.out.println((end - start) + "毫秒");
        pool.shutdown();
    }
}
