package bigo;

import bigo.lib.InputMatrix;
import bigo.data.Matrix;
import bigo.data.MatrixMeta;
import bigo.data.SmartMatrix;
import bigo.data.Vector;
import bigo.lib.BigOMapper;
import bigo.lib.BinaryMatrixInputFormat;
import bigo.lib.BinaryMatrixOutputFormat;
import bigo.lib.CachedMatrix;
import bigo.lib.InputFormat;
import bigo.lib.OutputFormat;
import bigo.lib.OutputMatrix;
import bigo.lib.TextMatrixInputFormat;
import bigo.lib.TextMatrixOutputFormat;

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.util.ToolRunner;

/**
 * This Driver is used to solve the Matrix multiplication with one Large matrix
 * read as Mapreduce input and one very small matrix that is fit for memory storage
 * @author Song Liu (sl9885@bristol.ac.uk)
 */
public class LargeASmallBMultiplicationDriver extends BigODriver {

    @InputFormat
    public static Class input = TextMatrixInputFormat.class;
    @OutputFormat
    public static Class output = TextMatrixOutputFormat.class;
    @InputMatrix
    public static MatrixMeta A;
    @CachedMatrix(overwrite = true, binary = false)
    public static MatrixMeta B;
    @OutputMatrix
    public static MatrixMeta C;

    public LargeASmallBMultiplicationDriver() {
    }

    
    /**
     * Construct an instance by given parameters
     * @param A the 'larger' matrix
     * @param B the 'smaller' matrix
     * @param C the result
     */
    public LargeASmallBMultiplicationDriver(MatrixMeta A,
            MatrixMeta B,
            MatrixMeta C) {

        LargeASmallBMultiplicationDriver.A = A;
        LargeASmallBMultiplicationDriver.B = B;
        LargeASmallBMultiplicationDriver.C = C;
    }

    @bigo.lib.Mapper
    public static class Mapper extends BigOMapper {

        protected Matrix multiplier;

        @Override
        protected void map(IntWritable key,
                Vector.Wrapper value, Context context) throws IOException, InterruptedException {
            SmartMatrix m = new SmartMatrix(multiplier, B.isTransposed);

            if (A.isTransposed) {
                // if the A is transposed, then calculate by column
                context.write(key, new Vector.Wrapper(((Vector) value.get()).multiplyWithT(m)));
            } else {
                context.write(key, new Vector.Wrapper(((Vector) value.get()).multiplyWith(m)));
            }
        }

        @Override
        protected void setup(Context context) throws IOException, InterruptedException {
            super.setup(context);
            multiplier =
                    LargeASmallBMultiplicationDriver.B.loadFromCache(
                    context, context.getConfiguration().getBoolean("cache.binary", false));
            System.out.println("loading cached matrix done!");
        }
    }

    @Override
    public void init(Job job) {
    }

    @Override
    public void preRun(Job job) {
    }

    public static void main(String[] args) throws Exception {
        int m = Integer.parseInt(args[3]), k = Integer.parseInt(args[4]),
                n = Integer.parseInt(args[5]);


        //see if we used the first one as the cache matrix
        if (Character.isLowerCase(args[0].charAt(0))) {
            //use the first one as the cache matrix, adjust meta accordingly
            ToolRunner.run(new Configuration(),
                    new LargeASmallBMultiplicationDriver(
                    new MatrixMeta(args[1], n, k),
                    new MatrixMeta(args[0], m, k),
                    new MatrixMeta(args[2], m, n)),
                    args);

        } else {
            //typical large A small B
            ToolRunner.run(new Configuration(),
                    new LargeASmallBMultiplicationDriver(
                    new MatrixMeta(args[0], m, k),
                    new MatrixMeta(args[1], k, n),
                    new MatrixMeta(args[2], m, n)),
                    args);
        }


    }
}
