package deal;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;

public class SendJsonToHdfs {

    public static void send(String pastJsonFilePath,String pasthdfsJsonFilePath){
        try {
            sendJsonToHdfs(pastJsonFilePath, pasthdfsJsonFilePath);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    private static void sendJsonToHdfs(String localFilePath, String hdfsFilePath) throws IOException {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://192.168.88.131:8020"); // 设置你的HDFS地址

        try (InputStream inStream = new FileInputStream(new File(localFilePath))) {
            FileSystem fs = FileSystem.get(conf);
            Path hdfsPath = new Path(hdfsFilePath);
            
            // 如果文件已经存在，可以选择覆盖或追加
            if (fs.exists(hdfsPath)) {
                fs.delete(hdfsPath, true); // 删除已存在的文件
            }

            // 将本地文件复制到HDFS
            fs.copyFromLocalFile(new Path(localFilePath), hdfsPath);
            System.out.println("JSON 文件成功发送到HDFS：" + hdfsFilePath);
        }
    }
}
