package cn.mingyuan.hadoop.fileoperation;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
/**
 * 
 * Displaying files from a Hadoop filesystem on standard output twice,by using seek
 * @author mingyuan
 *
 */
public class FSDataInputStreamTest {

	/**
	 * @param args
	 * @throws IOException
	 */
	public static void main(String[] args) throws IOException {
		// TODO Auto-generated method stub
		Configuration conf = new Configuration();
		Path path = new Path("hdfs://localhost:9000/data/newfile.txt");
		FileSystem fs = FileSystem.get(path.toUri(), conf);
		FSDataInputStream in = fs.open(path);
		IOUtils.copyBytes(in, System.out, 4096, false);
		in.seek(6);
		System.out.println("\npos now:"+in.getPos());
		System.out.println("\n-------\n");
		IOUtils.copyBytes(in, System.out, 4096, false);
		
		in.seek(0);
		System.out.println("\n---read test----");
		byte[] buffer=new byte[10];
		while(in.read(buffer)!=-1){
			System.out.println(new String(buffer));
		}
		in.seek(0);
		byte[] buffer2=new byte[in.available()];
		in.readFully(buffer2);
		System.out.println(new String(buffer2));
		IOUtils.closeStream(in);
	}

}
