package com.bigdata.example;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import java.io.*;


public class HDFS {
    public static void main(String[] args) throws IOException {
        try {
            Configuration conf = new Configuration();//创建
            conf.set("fs.defaultFS", "hdfs://master:9000");
            conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
            FileSystem fs = FileSystem.get(conf);
            String fileName="pom.xml";
            Path  file=new Path(fileName);
            if(fs.exists(file)){//如果文件存在就读取
                FSDataInputStream in=fs.open(file);
                BufferedReader  br=new BufferedReader(new InputStreamReader(in));
                String content=null;
                while ((content=br.readLine())!=null){
                    System.out.printf(content);
                }
                br.close();
                in.close();
                fs.close();
            }else{//如果文件不存在就复制文件
                //InputStream in=new BufferedInputStream(new FileInputStream("/usr/local/hadoop/readme.txt"));
                InputStream in=new BufferedInputStream(new FileInputStream("pom.xml"));
                FSDataOutputStream out=fs.create(file);
                IOUtils.copyBytes(in,out,4096,true);
                out.close();
                fs.close();
                System.out.printf("create"+fileName+"successful");
            }

        }catch (Exception e){
            e.printStackTrace();
        }
    }
}
