package org.jst4me.hadoop;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.jst4me.common.exceptions.ArgumentException;
import org.jst4me.hadoop.HDFS_Configuration;
import org.jst4me.hadoop.util.HDFSAccessInterface;
import org.jst4me.hadoop.util.HDFSAccessInterfaceImpl;

import java.io.*;

public class HDFS_Demo1 {
    public static void main(String[] args) throws IOException, ArgumentException,Exception {

        //hdfsCopy();
        HDFSAccessInterface util = new HDFSAccessInterfaceImpl();
       // util.init("hdfs://jst4me.org.master:9000");
        //util.makeDir("/user/hadoop/test");
       // util.copyFileToHDFS("E:\\dtinone\\ppt\\debug.log","/user/hadoop/debug.log",true,true);
        //downToInputStreamTest(util);
        util.append("E:\\dtinone\\ppt\\debug.log","/user/hadoop/shell_test5.sh");
        util.close();
    }

    private static  void  hdfsCopy() throws IOException{
        FileSystem hdfs = HDFS_Configuration.getFileSystem();
        hdfs.copyFromLocalFile(new Path(HDFS_Configuration.Local_Work_Dir+"/" + "shell_test5.sh"), new Path( HDFS_Configuration.HDFS_Server_And_Port+"/user/hadoop/shell_test5.sh"));
        hdfs.close();
    }

    private static  void downToInputStreamTest(HDFSAccessInterface util) throws Exception{
        InputStream inputStream  = util.downToInputStream("/user/hadoop/debug.log");
        BufferedInputStream bis = new BufferedInputStream(inputStream);
        byte[] buffer = new byte[4096];
        File file = new File("e:\\test.log");
        FileOutputStream fos = new FileOutputStream(file);
        int len = -1;
        while ( (len = bis.read(buffer)) != -1) {
            fos.write(buffer,0,len);
        }
        fos.flush();
        fos.close();
    }
}
