package com.nightsoul.hadoop1.test;

import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.MapFile;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.MapFileOutputFormat;
import org.apache.hadoop.util.ReflectionUtils;

public class MapFileReadTest2 {
	
	public static void main(String[] args) throws Exception {
		String uri = args[0];
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(URI.create(uri), conf);
		Path dir = new Path(URI.create(uri));
		
		//调用getReaders方法时，如果直接使用作业输出的MapFile，因其目录中还含有_SUCCESS文件和logs目录，也会当成MapFile，所以会出错
		//故应该清除_SUCCESS文件和logs目录
		MapFile.Reader[] readers = MapFileOutputFormat.getReaders(fs, dir, conf);
		for(MapFile.Reader reader : readers) {
			try {
				IntWritable key = (IntWritable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
				Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
				
				while(reader.next(key, value)) {
					System.out.printf("%s\t%s\n", key, value);
				}
			} finally {
				IOUtils.closeStream(reader);
			}
		}
		
	}
	

}
