package org.shj.hadoop.fs;

import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.SequenceFile.Writer;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.zookeeper.common.IOUtils;

/**
 * Sequence File 是可切分的，压缩过的文件
 * @author Administrator
 *
 */
public class SequenceFileDemo {

	public static final String[] DATA = {
			"One, two, buckle my shoe",
			"Three, four, shut the door",
			"Five ,six, pick up sticks",
			"Seven, eight, lay them straight",
			"nine ,ten, abig fat hen"
	};
	
	public static void writeFile() throws Exception{
		//在java虚拟机运行参数里加上  "-DHADOOP_USER_NAME=shj",以避免下面的exception
		// org.apache.hadoop.security.AccessControlException: Permission denied: user=Administrator
		String path = "/user/shj/test.txt";
		Configuration cfg = new Configuration();
		cfg.set("fs.defaultFS", "hdfs://192.168.56.101:9000");
		
		Path p = new Path(path);
		
		IntWritable key = new IntWritable();
		Text value = new Text();
		SequenceFile.Writer writer = null;
		
		try{
			SequenceFile.Writer.Option opt1 = Writer.file(p);
			SequenceFile.Writer.Option opt2 = Writer.keyClass(IntWritable.class);
			SequenceFile.Writer.Option opt3 = Writer.valueClass(Text.class);
//					writer = SequenceFile.createWriter(fc, cfg, p, IntWritable.class, Text.class, 
//								CompressionType.RECORD, null,null,
//								EnumSet.of(CreateFlag.APPEND), null);
			//这里有压缩选项，所以直接view 文件的话， 会看到乱码
			writer = SequenceFile.createWriter(cfg, opt1, opt2, opt3, Writer.compression(CompressionType.RECORD));
			for(int i = 0 ; i < 100; i++){
				key.set(100 -i);
				value.set(DATA[ i % DATA.length]);
				System.out.printf("[%s]\t%s\t%s\n", writer.getLength(), key, value);
				writer.append(key, value);
			}
		}finally{
			IOUtils.closeStream(writer);
		}
	}
	
	public static void readFile() throws Exception{
		String path = "/user/shj/test.txt";
		Configuration cfg = new Configuration();
		cfg.set("fs.defaultFS", "hdfs://192.168.56.101:9000");
		
		//通过此方法设置user，同样可以消除 writeFile()中所说的exception
		FileSystem fs = FileSystem.get(new URI(path), cfg, "shj");
		
		Path p = new Path(path);
				
		SequenceFile.Reader reader = null;
		try{
			//实例化reader 应该仿照上面writeFile()中方法。
			reader = new SequenceFile.Reader(fs, p, cfg);
			
			Writable key = (Writable)ReflectionUtils.newInstance(reader.getKeyClass(), cfg) ;
			Writable value = (Writable)ReflectionUtils.newInstance(reader.getValueClass(), cfg);
			long position = reader.getPosition();
			while(reader.next(key, value)){
				String syncSeen = reader.syncSeen() ? "*" : "";
				System.out.printf("[%s%s]\t%s\t%s\n", position, syncSeen, key, value);
				position = reader.getPosition(); // 开始下一条记录
			}
			
		}finally{
			IOUtils.closeStream(reader);
		}
	}
	
	public static void main(String[] args) throws Exception{
//		writeFile();
		readFile();
	}
}
