package io;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;

import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.io.IOUtils;
import org.apache.log4j.Logger;

public class URLCat {
	private final static Logger log = Logger.getLogger(URLCat.class);
	static {
		System.setProperty("hadoop.home.dir", "D:/soft/hadoop-2.7.0");
		URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory()); // 使其支持hdfs文件系统

	}

	public static void main(String[] args) {

		InputStream in = null;
		try {
			in = new URL("hdfs://node1:9000/user/hadoop/test.txt").openStream();
			IOUtils.copyBytes(in, System.out, 4069, false);
		} catch (IOException e) {
			log.error("IO错误", e);
		} finally {
			IOUtils.closeStream(in);
		}

	}

}
