import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.InputStream;

public class HDFSFileDownloader {

    // HDFS文件路径
    private static final String HDFS_FILE_PATH = "/user/hive/warehouse/dblab.db/job_action/000000_0";
    // 本地保存路径
    private static final String LOCAL_FILE_PATH = "/usr/local/bigdatacase/dataset/job_action2.output";

    public static void main(String[] args) {
        // HDFS配置和文件操作
        Configuration config = new Configuration();
        FileSystem fs = null;

        try {
            // 获取HDFS文件系统实例
            fs = FileSystem.get(config);
            Path hdfsFilePath = new Path(HDFS_FILE_PATH);
            FSDataInputStream inputStream = fs.open(hdfsFilePath);

            // 创建本地输出文件流
            File localFile = new File(LOCAL_FILE_PATH);
            OutputStream outputStream = new FileOutputStream(localFile);

            // 缓冲读取与写入
            byte[] buffer = new byte[1024];
            int bytesRead;
            while ((bytesRead = inputStream.read(buffer)) > 0) {
                outputStream.write(buffer, 0, bytesRead);
            }

            // 关闭流
            outputStream.close();
            inputStream.close();

            System.out.println("File downloaded successfully to: " + LOCAL_FILE_PATH);

        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            try {
                if (fs != null) {
                    fs.close();
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }
}
