
package ltd.hxya.novel.common.utils;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

@Slf4j
public class HDFSUtils {
    public static String hdfsUrl="hdfs://192.168.65.39/:8020";
    public static String path="/test/";
    public static String fileSuffix=".txt";

    public static FileSystem getFileSystem() throws URISyntaxException, IOException, InterruptedException {
        System.out.println("开始建立与HDFS的连接");
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI(hdfsUrl),configuration,"root");
        return fileSystem;
    }

    public static void createFile(String fileName,String json) throws IOException, URISyntaxException, InterruptedException {
        FileSystem fileSystem = getFileSystem();
        FSDataOutputStream fsDataOutputStream = fileSystem.create(new Path(getDefaultFullPath(fileName)));
        fsDataOutputStream.write(json.getBytes());
        close(fileSystem);
    }

    public static String getDefaultFullPath(String fileName){
        return path+fileName+fileSuffix;
    }

    public static void append(FSDataOutputStream fsDataOutputStream,String json) throws IOException {
        fsDataOutputStream.write(json.getBytes());
        fsDataOutputStream.write(json.getBytes());
    }

    public static void close(FileSystem fileSystem) throws IOException {
        fileSystem.close();
        System.out.println("关闭与HDFS的连接");

    }
}
