package com.sjsd.hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;

import java.io.FileInputStream;
import java.io.FileOutputStream;
public class HDFS {

    public static void hdfsReadFile(String hdfsFile,String hdfsUrl,String fileName) throws Exception {
        //创建FileSystem对象
        Configuration cfg = new Configuration();
        cfg.set("fs.defaultFS",hdfsUrl);
        FileSystem fs = FileSystem.get(cfg);

        //判断要读取的文件是否在hdfs存在
        if (!fs.exists(new Path(hdfsFile))) {
            throw new Exception("文件不存在");
        }

        //保存文件到本地
        FSDataInputStream in = fs.open(new Path(hdfsFile));
        FileOutputStream out = new FileOutputStream(fileName);
        IOUtils.copyBytes(in,out,2048);

        //关闭连接
        fs.close();
    }

    public static void hdfsWrite(String fileName,String hdfsUrl,String hdfsFile) throws Exception {
        //创建FileSystem对象
        Configuration cfg = new Configuration();
        cfg.set("fs.defaultFS",hdfsUrl);
        FileSystem fileSystem = FileSystem.get(cfg);

        if (fileSystem.exists(new Path(hdfsFile))) {
            throw new Exception("目标文件已经存在");
        }

        //文件拷贝
        FSDataOutputStream out = fileSystem.create(new Path(hdfsFile));
        FileInputStream in = new FileInputStream(fileName);
        IOUtils.copyBytes(in,out,2048);
        fileSystem.close();
    }
}