package com.kkb.hbase.mr.demo2UDInputFormat;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

import java.io.IOException;

/**
 * 自定义RecordReader 用来 MyInputFormat中
 */
public class MyRecodReader extends RecordReader<NullWritable, BytesWritable> {
    private FileSplit fileSplit;
    private Configuration configuration;
    private BytesWritable bytesWritable;
    private boolean flag = false;

    /**
     * 初始化方法
     * @param split 给我们传入了文件的切片,因为设置了文件不可切分，所以这里是整个文件的内容
     * @param context
     * @throws IOException
     * @throws InterruptedException
     */
    @Override
    public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {
        //拿到文件内容
        this.fileSplit = (FileSplit) split;
        //获取配置属性
        this.configuration = context.getConfiguration();
        this.bytesWritable = new BytesWritable();
    }

    /**
     * 不断往下读取
     * @return
     * @throws IOException
     * @throws InterruptedException
     */
    @Override
    public boolean nextKeyValue() throws IOException, InterruptedException {
        if(!flag){
            //获取到文件长度
            long length = fileSplit.getLength();
            //声明字节数组 用于存放文件内容
            byte[] splitContext = new byte[(int)length];
            //获取文件 在hdfs 路径
            Path path = fileSplit.getPath();
            //通过路径 获取文件系统
            FileSystem fileSystem = path.getFileSystem(configuration);
            //打开文件读取输入流
            FSDataInputStream inputStream = fileSystem.open(path);
            //把输入流 读到 字节数组中
            IOUtils.readFully(inputStream,splitContext,0,(int)length);
            //将字节数组splitContext内容 赋值给value(bytesWritable)
            bytesWritable.set(splitContext,0,(int)length);
            flag = true;
            //关闭输入流
            IOUtils.closeStream(inputStream);
            return true;
        }
        return false;
    }

    @Override
    public NullWritable getCurrentKey() throws IOException, InterruptedException {
        return NullWritable.get();
    }

    @Override
    public BytesWritable getCurrentValue() throws IOException, InterruptedException {
        return bytesWritable;
    }

    //读取进度
    @Override
    public float getProgress() throws IOException, InterruptedException {
        return flag?1.0f:0.0f;
    }

    @Override
    public void close() throws IOException {
    //没有什么要关闭的
    }
}
