package org.egomsl.mw.mapred;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.egomsl.mw.HadoopRecord;
import org.egomsl.mw.record.Record;
import org.egomsl.mw.record.RecordScanner;
import org.egomsl.mw.region.RegionReader;

import java.io.IOException;

public class ABaseRecordReader implements RecordReader<BytesWritable, HadoopRecord> {
    private RecordScanner regionRecordScanner;
    private HadoopRecord currentRecord;

    public void initialize(InputSplit inputSplit,  JobConf job)
            throws IOException {

        currentRecord = null;
        regionRecordScanner = null;

        if(inputSplit instanceof ABaseInputSplit) {
            ABaseInputSplit aBaseInputSplit = (ABaseInputSplit)inputSplit;
            Path regionDir = new Path(aBaseInputSplit.getRegionDir());
            FileSystem fs = regionDir.getFileSystem(job);
            regionRecordScanner = new RegionReader(fs, regionDir).getScanner(true);
        }
    }


    @Override
    public float getProgress() throws IOException {
        return 0;
    }

    @Override
    public boolean next(BytesWritable key, HadoopRecord value) throws IOException {
        if(regionRecordScanner != null && regionRecordScanner.hashNext()) {
            Record record = regionRecordScanner.next();
            currentRecord = new HadoopRecord(record);
            key.set(currentRecord.getKey());
            return true;
        }else {
            currentRecord = null;
        }
        return false;
    }

    @Override
    public BytesWritable createKey() {
        return new BytesWritable();
    }

    @Override
    public HadoopRecord createValue() {
        return new HadoopRecord();
    }

    @Override
    public long getPos() throws IOException {
        return 0;
    }

    @Override
    public void close() throws IOException {

    }
}
