package org.hadoop;

import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.zip.GZIPInputStream;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.util.ReflectionUtils;

public class HadoopInputStream {
    static Configuration conf;
    static FileSystem    fs = null;

    public static HashMap<String, Long> getPaths(String dir, HashMap<String, Long> paths)
            throws Exception {

        try {
            if (conf == null) {
                conf = new Configuration();
            }
            fs = FileSystem.get(conf);
            if (!dir.startsWith("hdfs:"))
                dir = conf.get("fs.default.name") + dir;
            FileStatus fileList[] = fs.listStatus(new Path(dir));
            int size = fileList.length;
            for (int i = 0; i < size; i++) {
                if (fileList[i].isDir())
                    getPaths(fileList[i].getPath().toString(), paths);
                else
                    paths.put(fileList[i].getPath().toString(), fileList[i].getLen());
            }
        } catch (Exception e) {
            throw new RuntimeException("getPaths", e);
        } finally {
            if (fs != null)
                fs.close();
        }

        return paths;
    }

    public static InputStream getStream(String spath) throws IOException {
        if (conf == null) {
            conf = new Configuration();
        }

        fs = FileSystem.get(conf);
        Path p = new Path(spath);
        FSDataInputStream i = fs.open(p);
        switch (i.readShort()) {
            case 0x1f8b: // RFC 1952
                i.seek(0);
                return new GZIPInputStream(i);
            case 0x5345: // 'S' 'E'
                if (i.readByte() == 'Q') {
                    i.close();
                    return new TextRecordInputStream(fs.getFileStatus(p));
                }
                break;
        }
        i.seek(0);
        return i;

    }

    private static class TextRecordInputStream extends InputStream {
        SequenceFile.Reader r;
        @SuppressWarnings("rawtypes")
        WritableComparable  key;
        Writable            val;

        DataInputBuffer     inbuf;
        DataOutputBuffer    outbuf;

        public TextRecordInputStream(FileStatus f) throws IOException {
            r = new SequenceFile.Reader(fs, f.getPath(), conf);
            key = ReflectionUtils.newInstance(r.getKeyClass().asSubclass(WritableComparable.class),
                    conf);
            val = ReflectionUtils.newInstance(r.getValueClass().asSubclass(Writable.class), conf);
            inbuf = new DataInputBuffer();
            outbuf = new DataOutputBuffer();
        }

        public int read() throws IOException {
            int ret;
            if (null == inbuf || -1 == (ret = inbuf.read())) {
                if (!r.next(key, val)) {
                    return -1;
                }
                byte[] tmp = key.toString().getBytes();
                outbuf.write(tmp, 0, tmp.length);
                outbuf.write('\t');
                tmp = val.toString().getBytes();
                outbuf.write(tmp, 0, tmp.length);
                outbuf.write('\n');
                inbuf.reset(outbuf.getData(), outbuf.getLength());
                outbuf.reset();
                ret = inbuf.read();
            }
            return ret;
        }
    }
    
}
