/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
package bigo.lib;

import bigo.data.ArrayVector;
import bigo.data.MatrixMeta;
import bigo.data.Vector;
import java.io.DataOutputStream;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.ReflectionUtils;

/**
 *
 * @author songliu
 */
public class TextMatrixOutputFormat extends TextOutputFormat<IntWritable, Vector.Wrapper> {

    protected static class Writer extends RecordWriter<IntWritable, Vector.Wrapper> {

        private LineRecordWriter<Text, Text> w;

        public Writer(DataOutputStream dos, String seperator) {
            w = new LineRecordWriter<Text, Text>(dos, seperator);
        }

        public Writer(DataOutputStream dos) {
            w = new LineRecordWriter<Text, Text>(dos);
        }

        public synchronized void write(IntWritable key, Vector.Wrapper value)
                throws IOException {
            StringBuffer sb = new StringBuffer();

            Vector v = (Vector) value.get();
            if (v instanceof ArrayVector) {
                for (Integer i : v.keySet()) {
                    sb.append(v.get(i) + ",");
                }
            } else {
                for (Integer i : v.keySet()) {
                    sb.append(i + ":" + v.get(i) + ",");
                }
            }

            w.write(new Text(key.toString()), new Text(sb.toString()));

        }

        @Override
        public void close(TaskAttemptContext context) throws IOException,
                InterruptedException {
            w.close(context);
        }

        ;
    }

    @Override
    public RecordWriter<IntWritable, Vector.Wrapper> getRecordWriter(
            TaskAttemptContext job) throws IOException, InterruptedException {

        Configuration conf = job.getConfiguration();
        boolean isCompressed = getCompressOutput(job);

        CompressionCodec codec = null;
        String extension = "";
        if (isCompressed) {
            Class<? extends CompressionCodec> codecClass = getOutputCompressorClass(
                    job, GzipCodec.class);
            codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass,
                    conf);
            extension = codec.getDefaultExtension();
        }
        Path file = getDefaultWorkFile(job, extension);
        FileSystem fs = file.getFileSystem(conf);
        if (!isCompressed) {
            FSDataOutputStream fileOut = fs.create(file, false);
            return new Writer(fileOut, " ");
        } else {
            FSDataOutputStream fileOut = fs.create(file, false);
            return new Writer(new DataOutputStream(codec.createOutputStream(fileOut)), " ");
        }
    }

    public static void setOutputMatrix(Job job, MatrixMeta meta) {
        job.getConfiguration().set(meta.path, meta.toString());
        setOutputPath(job, new Path(meta.path));
    }
}
