package src.mian.java;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.IOException;
import java.io.OutputStream;
import java.io.InputStream;

public class  copyToLocal{
    public static void main(String[] args) {
        String hdfsUri = "hdfs://192.168.254.100:8020"; // Hadoop集群的URI
        String inputFilePath = "/test/hello.txt"; // 要下载的Hadoop文件的路径
        String outputLocalFilePath = "/D:/hadoop/hello.txt"; // 下载到本地的文件路径
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", hdfsUri);
        try {
            FileSystem fs = FileSystem.get(conf);
            Path inputPath = new Path(inputFilePath);
            Path outputPath = new Path(outputLocalFilePath);
            // 检查要下载的文件是否存在
            if (!fs.exists(inputPath)) {
                System.out.println("Input file does not exist");
                return;
            }
            // 打开输入流和输出流
            try (InputStream in = fs.open(inputPath);
                 OutputStream out = new java.io.FileOutputStream(outputPath.toString())) {
                // 将Hadoop文件内容复制到本地文件
                byte[] buffer = new byte[4096];
                int bytesRead;
                while ((bytesRead = in.read(buffer)) > 0) {
                    out.write(buffer, 0, bytesRead);
                }
            }

            System.out.println("File downloaded successfully");

        } catch (IOException e) {
            e.printStackTrace();
        }
    }
}