package com.myorg.util;

import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.PushbackInputStream;

import org.apache.commons.lang.ArrayUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.compress.CodecPool;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionInputStream;
import org.apache.hadoop.io.compress.CompressionOutputStream;
import org.apache.hadoop.io.compress.Compressor;
import org.apache.hadoop.io.compress.Decompressor;
import org.apache.hadoop.util.ReflectionUtils;

public class CompressionHelper {
	private final static byte first = (byte) 0x1F;
	private final static byte second = (byte) 0x8B;
	private final static byte third = (byte) 0x08;

	public static long compressAndSave(String msgBody, String delimiter, FSDataOutputStream targetOutputStream)
			throws IOException {
		HdfsHelper helper = new HdfsHelper();
		Configuration conf = helper.getConfig();
		CompressionCodec codec = (CompressionCodec) ReflectionUtils.newInstance(
				org.apache.hadoop.io.compress.GzipCodec.class, conf);
		Compressor compressor = null;
		BufferedInputStream bis = null;
		InputStream is = null;
		try {
			compressor = CodecPool.getCompressor(codec);
			// compress body
			is = new ByteArrayInputStream((msgBody + "\n").getBytes());
			bis = new BufferedInputStream(is);
			CompressionOutputStream out = codec.createOutputStream(targetOutputStream, compressor);
			IOUtils.copyBytes(bis, out, 4096, false);
			targetOutputStream.flush();
			out.finish();
			bis.close();
			is.close();

			is = new ByteArrayInputStream((delimiter + "\n").getBytes());
			bis = new BufferedInputStream(is);
			out = codec.createOutputStream(targetOutputStream, compressor);
			IOUtils.copyBytes(bis, out, 4096, false);
			targetOutputStream.flush();
			out.finish();
		} finally {
			CodecPool.returnCompressor(compressor);
			bis.close();
			is.close();
		}
		return targetOutputStream.getPos();
	}

	public static StringBuilder decompress(InputStream is) throws IOException {
		HdfsHelper helper = new HdfsHelper();
		StringBuilder builder = new StringBuilder();
		Configuration conf = helper.getConfig();
		CompressionCodec codec = (CompressionCodec) ReflectionUtils.newInstance(
				org.apache.hadoop.io.compress.GzipCodec.class, conf);
		Decompressor decompressor = null;
		BufferedInputStream bis = null;
		BufferedReader rd = null;
		try {
			decompressor = CodecPool.getDecompressor(codec);
			bis = new BufferedInputStream(is);
			CompressionInputStream cis = codec.createInputStream(bis, decompressor);
			String line = "";
			rd = new BufferedReader(new InputStreamReader(cis));
			while ((line = rd.readLine()) != null) {
				builder.append(line);
			}
			// is.close();
			return builder;
		} finally {
			CodecPool.returnDecompressor(decompressor);
			rd.close();
			bis.close();
		}
	}

	private static int search(byte[] arrByte) {
		int index = ArrayUtils.indexOf(arrByte, first);
		if (index != -1 && (index + 1 < arrByte.length && arrByte[index + 1] == second)
				&& (index + 2 < arrByte.length && arrByte[index + 2] == third)) {
			return index;
		} else if (index != -1) {
			int subIndex = search(ArrayUtils.subarray(arrByte, index + 1, arrByte.length));
			if (subIndex == -1) {
				return -1;
			} else {
				return index + subIndex + 1;
			}

		} else {
			return -1;
		}
	}

	public static boolean isGZ3byte(PushbackInputStream in) throws IOException {
		byte[] arrByte = new byte[3];
		byte[] token = new byte[] { first, second, third };
		int numBytes = 0;
		boolean ret = false;
		// read the first 3 bytes,
		numBytes = in.read(arrByte);
		if (numBytes == 3) {
			ret = ArrayUtils.isEquals(arrByte, token);
			in.unread(arrByte, 0, numBytes);
		} else {
			ret = false;
			in.unread(arrByte, 0, numBytes);
		}
		return ret;
	}

	public static long readUptoNextGZ3byte(PushbackInputStream in) throws IOException {
		long numBytesRead = 0;
		byte[] arrByte = new byte[ConstantsToBeExternalized.BUFFER_SIZE_FOR_DECOMPRESSION];
		int numBytes = 0;
		while ((numBytes = in.read(arrByte)) > 0) {
			int ind = search(arrByte);
			if (ind != -1) {
				numBytesRead += ind + 1;
				byte[] pushBack = ArrayUtils.subarray(arrByte, ind, numBytes);
				in.unread(pushBack);
				break;
			} else {
				numBytesRead += numBytes;
			}
		}
		return numBytesRead;
	}

	public static StreamFeedToDecompressor parse(PushbackInputStream in) throws IOException {
		StreamFeedToDecompressor ret = new StreamFeedToDecompressor();
		long numBytesRead = 0;
		final ByteArrayOutputStream out = new ByteArrayOutputStream();
		byte[] arrByte = new byte[ConstantsToBeExternalized.BUFFER_SIZE_FOR_DECOMPRESSION];
		byte[] token = new byte[] { first, second, third };
		int numBytes = 0;
		// read the first 3 bytes,
		in.read(token);
		out.write(token, 0, 3);
		numBytesRead += 3;
		// read rest
		while ((numBytes = in.read(arrByte)) > 0) {
			int ind = search(arrByte);
			if (ind != -1) {
				numBytesRead += ind + 1;
				byte[] pushBack = ArrayUtils.subarray(arrByte, ind, numBytes);
				in.unread(pushBack);
				out.write(arrByte, 0, ind);
				break;
			} else {
				numBytesRead += numBytes;
				out.write(arrByte, 0, numBytes);
			}
		}
		out.flush();
		byte[] outArray = out.toByteArray();
		out.close();
		ret.setIn(new ByteArrayInputStream(outArray));
		ret.setNumByteRead(numBytesRead);
		return ret;
	}

}
