package com.lj.hadoop;

import java.io.IOException;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.Writer;
import org.apache.hadoop.io.Text;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;

public class SequenceFileWriterDemo {

	private static final String[] DATA = { "One,two,buckle my shoe",
			"Three,four,shut the door", "FIve,six,pick up sticks ",
			"Seven,eight,lay them straight", "Nine,ten a big fat hen" };

	public static void main(String[] args) throws IOException {
		String uri = args[0];
		ApplicationContext ctx = new ClassPathXmlApplicationContext(
				"classpath:spring/applicationContext.xml");
		Configuration conf = ctx.getBean("hadoopConfiguration",
				Configuration.class);
		// 获取文件系统
		//FileSystem fs = FileSystem.get(URI.create(uri), conf);
		Path path = new Path(uri);
		IntWritable key = new IntWritable();
		Text value = new Text();
		SequenceFile.Writer writer = null;
		// 这个方法已经过时
		// writer = SequenceFile.createWriter(fs, conf, path,
		// key.getClass(),value.getClass());
		try {
			writer = SequenceFile.createWriter(conf, Writer.file(path),
					Writer.keyClass(key.getClass()),
					Writer.valueClass(value.getClass()));

			for (int i = 0; i < 100; i++) {
				key.set(100 - i);
				value.set(DATA[i % DATA.length]);
				System.out.printf("[%s]\t%s\t%s\n", writer.getLength(), key,
						value);
			}
		} finally {
			IOUtils.closeStream(writer);
		}

	}
}
