package hdfs;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

import java.io.BufferedOutputStream;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.net.URI;

public class WriteHdfs {
    public static void main(String[] args) throws Exception {
        System.setProperty("HADOOP_USER_NAME","hdfs");
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.0.78:8020"),conf);
        conf.setBoolean("dfs.support.append", true);
        conf.set("dfs.client.block.write.replace-datanode-on-failure.policy", "NEVER");
        conf.setBoolean("dfs.client.block.write.replace-datanode-on-failure.enable", true);
        Path p = new Path("/chen/test/test.log");
        FSDataOutputStream out = fs.create(p);
        BufferedOutputStream bout = new BufferedOutputStream(out);
        bout.write("hao".getBytes());
        bout.close();
        out.write("nishishui".getBytes());
        System.out.println(out==null);
        System.out.println(bout==null);
//        out.writeBytes("haha");
//        out.close();
    }
}
