package com.jc.demo.hadoop.hdfs;

import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.io.IOUtils;

import java.io.InputStream;
import java.net.URL;

/**
 * 前期准备：
 * [jevoncode@s1 ~]# hdfs dfs -mkdir /opt/
 * [jevoncode@s1 ~]# hdfs dfs -mkdir /opt/command/
 * [jevoncode@s1 ~]# hdfs dfs -put sougouword.txt /opt/command/word.txt
 * <p>
 * <p>
 * 方法一：动态参数
 * 命令如下：上传至hadoop服务器
 * [jevoncode@s1 ~]# export HADOOP_CLASSPATH=jc-demo-hadoop-0.0.1.0-SNAPSHOT-development.jar
 * [jevoncode@s1 ~]# hadoop com.jc.demo.hadoop.hdfs.URLCat hdfs://ns/opt/command/word.txt
 * 其中ns是hdfs-site.xml配置的主机名，用于高可用
 *
 * <p>
 * 方法二：远程访问
 * 直接执行main方法，使用hdfsHost做参数，可远程访问
 */
public class URLCat {

    static {
        URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
    }

    public static void main(String[] args) throws Exception {
        String hdfsHost = "hdfs://s1.jevoncode.com:9000/opt/command/word.txt";
        InputStream in = null;
        try {
//            in = new URL(args[0]).openStream();  //方法一：动态参数
            in = new URL(hdfsHost).openStream();    //方法二：远程访问
            IOUtils.copyBytes(in, System.out, 4096, false);
        } finally {
            IOUtils.closeStream(in);
        }
    }
}