package org.joy.hpc.analyzer.examples.sogou;

import java.io.IOException;
import java.text.DecimalFormat;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.RecordReader;

/**
 * 把搜狗语料转换成hadoop识别的key和value格式
 * 
 * @author Jeremy Chow(coderplay@gmail.com)
 */
public class SogouRecordReader implements
		RecordReader<Text, SogouDocumentInput> {

	private static final Log LOG = LogFactory.getLog(SogouRecordReader.class
			.getName());

	private CompressionCodecFactory compressionCodecs = null;
	private long start;
	private long end;
	private long pos;

	private static long lastCheckPoint = System.currentTimeMillis();
	private static long lastCheckPos = 0;
	
	private SogouCorpusReader in;

	public SogouRecordReader(Configuration job, FileSplit split)
			throws IOException {
		start = split.getStart();
		end = start + split.getLength();
		final Path file = split.getPath();
		compressionCodecs = new CompressionCodecFactory(job);
		final CompressionCodec codec = compressionCodecs.getCodec(file);

		// open the file and seek to the start of the split
		FileSystem fs = file.getFileSystem(job);
		FSDataInputStream fileIn = fs.open(split.getPath());
		if (codec != null) {
			in = new SogouCorpusReader(codec.createInputStream(fileIn), job);
			end = Long.MAX_VALUE;
		} else {
			if (start != 0)
				fileIn.seek(start);
			in = new SogouCorpusReader(fileIn, job);
		}
		this.pos = start;
	}

	public Text createKey() {
		return new Text();
	}

	public SogouDocumentInput createValue() {
		return new SogouDocumentInput();
	}

	public long getPos() throws IOException {
		return pos;
	}

	/**
	 * Get the progress within the split
	 */
	public float getProgress() throws IOException {
		if (start == end) {
			return 0.0f;
		} else {
			return Math.min(1.0f, (pos - start) / (float) (end - start));
		}
	}

	/**
	 * Close the input stream
	 */
	public void close() throws IOException {
		if (in != null) {
			in.close();
		}
	}

	public synchronized boolean next(Text key, SogouDocumentInput value)
			throws IOException {
		if (pos - lastCheckPos > 1024*1024) {
			long elapsedTime = System.currentTimeMillis() - lastCheckPoint;
			double speed = (pos - lastCheckPos) / 1024.0
					/ ((double) elapsedTime / 1000);
			DecimalFormat dec = new DecimalFormat(".00");
			LOG.info("speed:" + dec.format(speed) + "k/s");
			lastCheckPoint = System.currentTimeMillis();
			lastCheckPos = pos;
		}
		
		if (pos < end) {
			long docPos = in.nextDoc((SogouDocumentInput) value);
			if (docPos < 0)
				return false;
			key.set(value.getDocumentId());
			pos = start + docPos;
			return true;
		}
		return false;
	}

}
