package com.six.compress.seqfile;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.Writer;

import java.io.IOException;
import java.util.Random;

public class THT_testSequenceFile2 {
 
	private static final String[] DATA = { "One, two, buckle my shoe",
			"Three, four, shut the door", "Five, six, pick up sticks",
			"Seven, eight, lay them straight", "Nine, ten, a big fat hen" };
 
	public static void main(String[] args) throws IOException {
		String uri = "/home/hdfs/v.seq";
		Configuration conf = new Configuration();
		Path path = new Path(uri);
 
		IntWritable key = new IntWritable();
		FloatWritable  value = new FloatWritable();
		SequenceFile.Writer writer = null;
		SequenceFile.Writer.Option optPath = SequenceFile.Writer.file(path);
		//定义key
		SequenceFile.Writer.Option optKey = SequenceFile.Writer.keyClass(IntWritable.class);
		//定义value
		SequenceFile.Writer.Option optVal = SequenceFile.Writer.valueClass(FloatWritable.class);
        Random random = new Random(100000000);
		try {
			writer = SequenceFile.createWriter( conf, optPath,optKey,optVal,Writer.compression(SequenceFile.CompressionType.BLOCK));
			for (int i = 0; i < 80000*120; i++) {
				key.set(1 + i);
				value.set(random.nextFloat());
				System.out.printf("[%s]\t%s\t%s\n", writer.getLength(), key,value);
				writer.append(key, value);
			}
		} finally {
			IOUtils.closeStream(writer);
		}
	}
}
