package cc.lucien.hadoop;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class MyWrite {
    public static void main(String[] args) {
        try {
            // 设置用户为hadoop
            System.setProperty("HADOOP_USER_NAME","hadoop");
            // 创建一个配置对象
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS","hdfs://10.24.130.132:9000");
            conf.set("fs.hdfs.impl","org.apache.hadoop.hdfs.DistributedFileSystem");
            // 向Hadoop的hdfs中写⼊信息
            FileSystem fs = FileSystem.get(conf);
            //输出fs的相关信息
            System.out.println("fs="+fs);
            // 要写的信息
            byte[] buff = "Hello Hadoop! 这是做实验的内容!\n".getBytes();
            String fileName = "test";
            FSDataOutputStream os = fs.create(new Path(fileName));
            os.write(buff,0,buff.length);
            System.out.println("Create "+ fileName);
            // 关闭资源
            os.close();
            fs.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
