package bigo.lib;

import bigo.data.Matrix;
import bigo.data.MatrixMeta;
import bigo.data.Vector;
import java.io.File;
import java.io.IOException;
import java.util.TreeMap;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;

/**
 *
 * @author songliu
 */
public class BinaryMatrixInputFormat extends SequenceFileInputFormat<IntWritable, Vector.Wrapper> {

    public static void addInputMatrix(Job job, MatrixMeta meta) throws IOException {
        TextMatrixInputFormat.addInputMatrix(job, meta);
    }

    public static Matrix loadCachedMatrix(Configuration conf, MatrixMeta meta) throws IOException {

        TreeMap<Integer, Vector> matrixStore = new TreeMap<Integer, Vector>();
        for (File f : new File(meta.path).listFiles()) {
            if (f.getName().startsWith("_") || f.getName().startsWith(".")) {
                continue;
            }
            SequenceFile.Reader r = new SequenceFile.Reader(
                    FileSystem.getLocal(conf), new Path(f.getPath()), conf);
 
            IntWritable key = new IntWritable();
            Vector.Wrapper value = new Vector.Wrapper();
            while (r.next(key, value)) {
                matrixStore.put(key.get(), (Vector) value.get());
            }
            r.close();
        }

        Matrix ret = new Matrix();
        for (Vector v : matrixStore.values()) {
            ret.addRowVector(v);
        }
        return ret;
    }
}
