package com.lagou.mr.inputformat;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

import java.io.IOException;


//负责读取数据，一次读取整个文件内容，封装成kv输出
public class CustomRecordReader extends RecordReader<Text, BytesWritable> {
    //hadoop配置文件对象
    private Configuration configuration;
    private FileSplit split;//切片

    //是否读取到内容的标识符
    private boolean isProgress = true;

    //定义key,value的成员变量
    //输出的kv
    private BytesWritable value = new BytesWritable();
    private Text k = new Text();

    //初始化方法，把切片以及上下文提升为全局
    @Override
    public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
        this.split = (FileSplit) split;
        configuration = context.getConfiguration();
    }

    //用来读取数据的方法
    @Override
    public boolean nextKeyValue() throws IOException, InterruptedException {

        //对于当前split来说只需要读取一次即可，因为一次就把整个文件全部读取了。
        if (isProgress) {

            //准备一个数组存放读取到的数据，数据大小是多少？
            byte[] contents = new byte[(int) split.getLength()];
            FileSystem fs = null;
            FSDataInputStream fis = null;
            Path path = split.getPath();//获取切片的path信息
            try {
                // 2 获取文件系统
                fs = path.getFileSystem(configuration);
                //3 读取数据
                fis = fs.open(path);
                //读取数据并把数据放入byte[]
                IOUtils.readFully(fis, contents, 0, contents.length);

                //封装key和value
                value.set(contents, 0, contents.length);
                String name = split.getPath().getName();
                k.set(name);
            } catch (IOException e) {
                e.printStackTrace();
            } finally {
                IOUtils.closeStream(fis);
            }
            isProgress = false;
            return true;
        }

        return false;
    }

    //获取到key
    @Override
    public Text getCurrentKey() throws IOException, InterruptedException {
        return k;
    }

    //获取到value
    @Override
    public BytesWritable getCurrentValue() throws IOException, InterruptedException {
        return value;
    }

    //获取进度
    @Override
    public float getProgress() throws IOException, InterruptedException {
        return 0;
    }

    //关闭资源
    @Override
    public void close() throws IOException {

    }
}
