package bigo.data;

import bigo.lib.BinaryMatrixInputFormat;
import bigo.lib.TextMatrixInputFormat;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.net.URI;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FsShell;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;

/**
 *
 * @author songliu
 */
public class MatrixMeta {

    public String path;
    public int numRows;
    public int numColumns;
    public boolean isTransposed;

    public MatrixMeta() {
    }

    public MatrixMeta(String path, int numRows, int numColumns) {
        this.path = path;
        this.numRows = numRows;
        this.numColumns = numColumns;
    }

    public MatrixMeta(String path, int numRows, int numColumns, boolean isTransposed) {
        this.path = path;
        this.numRows = numRows;
        this.numColumns = numColumns;
        this.isTransposed = isTransposed;
    }

    public MatrixMeta(String initString) {
        this.path = initString.split("\t")[0];
        this.numRows = Integer.parseInt(initString.split("\t")[1]);
        this.numColumns = Integer.parseInt(initString.split("\t")[2]);
        this.isTransposed = Boolean.parseBoolean(initString.split("\t")[3]);
    }

    public void addCacheMatrix(Job job, boolean overwrite) {
        try {
            String localFile = "./tmp/" + path + ".tgz",
                    remoteFile = path + ".tgz";
            new File("tmp").mkdir();
            //test if files are there
            FileContext fs = FileContext.getFileContext(job.getConfiguration());
            if (!overwrite && fs.util().exists(new Path(remoteFile))) {
                return;
            }
            //compress
            ProcessBuilder pb = new ProcessBuilder("tar", "-zc", "-f",
                    localFile, path);
            int res = pb.start().waitFor();

            //upload
            FsShell shell = new FsShell(job.getConfiguration());
            if (overwrite) {
                shell.run(new String[]{"-rmr", remoteFile});
            }
            shell.run(new String[]{"-put", localFile, remoteFile});

            //set distributed cache
            job.addCacheFile(new URI(new Path(fs.getWorkingDirectory(),
                    remoteFile).toUri().toString() + "#" + remoteFile));

        } catch (Exception ex) {
            ex.printStackTrace();
        }
    }

    public Matrix loadFromCache(JobContext context, boolean binary) {
        try {
            //decompress
            ProcessBuilder pb = new ProcessBuilder("tar", "xzvf",
                    this.path + ".tgz", this.path);
            Process p = pb.start();
            int res = p.waitFor();
            if (!binary) {
                return TextMatrixInputFormat.loadMatrix(
                        context.getConfiguration(), this);
            } else {
                return BinaryMatrixInputFormat.loadCachedMatrix(
                        context.getConfiguration(), this);
            }
        } catch (Exception ex) {
            Logger.getLogger(MatrixMeta.class.getName()).log(Level.SEVERE, null, ex);
        }
        return null;
    }

    @Override
    public String toString() {
        return path + "\t" + numRows + "\t" + numColumns + "\t" + isTransposed;
    }
}
