package com.bj58.ecdata.hadoop.fourthinfo;

import java.io.BufferedReader;
import java.io.InputStreamReader;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionCodecFactory;
import org.apache.hadoop.io.compress.CompressionInputStream;

public class Test {

    /**
	 * 程序运行时 需要设置LD_LIBRARY_PATH,使其包含含有libsnappy.so的目录.对于Windows,需要设置PATH, 使其包含含有snappy.dll的目录
	 * 
	 * @param file hdfs file, such as hdfs://hadoop-master-node:9000/user/hadoop/wordcount/output/part-r-00000.snappy
	 * @throws Exception
	 */
	public void testReadOutput_Snappy2(String file) throws Exception {
		Configuration conf = new Configuration();
//		conf.set("fs.default.name", "hdfs://hadoop-master-node:9000");
		FileSystem fs = FileSystem.get(conf);
		// Compress Map output 
		conf.set("mapred.compress.map.output","true"); 
		conf.set("mapred.map.output.compression.codec","org.apache.hadoop.io.compress.SnappyCodec");

		// Compress MapReduce output 
		conf.set("mapred.output.compress","true"); 
		conf.set("mapred.output.compression","org.apache.hadoop.io.compress.SnappyCodec");
		
		CompressionCodecFactory factory = new CompressionCodecFactory(conf);
		CompressionCodec codec = factory.getCodec(new Path(file));
		if (codec == null) {
			System.out.println("Cannot find codec for file " + file);
			return;
		}
		CompressionInputStream in = codec.createInputStream(fs.open(new Path(
				file)));
		BufferedReader br = null;
		String line;
		try {
			br = new BufferedReader(new InputStreamReader(in, "UTF-8"));
			while ((line = br.readLine()) != null) {
				System.out.println(line);
			}
		} finally {
			if (in != null) {
				br.close();
			}
		}
	}
	
	/**
	 * @param args
	 */
	public static void main(String[] args) {
		// TODO Auto-generated method stub

	}

}
