package Hdfs;

import java.io.FileInputStream;
import java.io.InputStream;
import java.net.URL;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

import ch.qos.logback.core.net.SyslogOutputStream;

public class HelloHDFS {
	
	 public static Log log =  LogFactory.getLog(HelloHDFS.class);
	
	public static void main(String[] args) throws Exception{
/*		URL url = new URL("http://www.baidu.com");
		InputStream inputStream = url.openStream();
		IOUtils.copyBytes(inputStream, System.out, 4096,true);//true 自动关闭流
		*/
		
		/* 
		 
		URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());//URL默认只认识http协议 所以要添加hdfs处理器
		URL url = new URL("hdfs://192.168.56.100:9000/hello.txt");
		InputStream inputStream = url.openStream();
		IOUtils.copyBytes(inputStream, System.out, 4096,true);//true 自动关闭流
		*/
		
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://192.168.56.100:9000");
		FileSystem fileSystem =FileSystem.get(conf);
		
		boolean success = fileSystem.mkdirs(new Path("/xiaoxiwen"));
	    log.info("创建文件是否成功:" + success);
	        
	    success = fileSystem.exists(new Path("/xiaoxiwen"));
	    log.info("文件是否存在:" + success);
	        
	    success = fileSystem.delete(new Path("/xiaoxiwen"), true);
	    log.info("删除文件是否成功：" + success);
	    
	    //打开hdfs的输出流，写东西进去
	    /*FSDataOutputStream out = fileSystem.create(new Path("/test.data"), true);
        FileInputStream in = new FileInputStream("d:/test.txt");
        IOUtils.copyBytes(in, out, 4096, true);*/
	    
        //打开hdfs的输出流，写东西进去
	    /*FSDataOutputStream out = fileSystem.create(new Path("/test2.data"));
        FileInputStream in = new FileInputStream("d:/test.txt");
        byte[] buf = new byte[4096];
        int len = in.read(buf);
        while(len != -1) {
            out.write(buf,0,len);
            len = in.read(buf);
        }
        in.close();
        out.close();*/
        
        FileStatus[] statuses = fileSystem.listStatus(new Path("/"));
        log.info(statuses.length);
        for(FileStatus status : statuses) {
            log.info(status.getPath());
            log.info(status.getPermission());
            log.info(status.getReplication());
        }
        
        
        //往hdfs写东西进去 
//        fileSystem.copyFromLocalFile(new Path("d://input.txt"), new Path("/"));
//        fileSystem.close();
        
        //从hdfs中复制到本地系统中
        //第一个false参数表示不删除源文件，第4个true参数表示使用本地原文件系统，因为这个Demo程序是在Windows系统下运行的。
        fileSystem.copyToLocalFile(false,new Path("/wcoutput/part-r-00000"), new Path("/d://output.txt"),true);
        fileSystem.close();
	}
}
