package com.example;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;

public class HadoopMain {
    public static void main(String[] args) throws Exception {
        FileSystem fs = getFileSystem();

        FSDataInputStream inputStream = fs.open(new Path("/user/hp/file.txt"));

        BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
        br.lines().forEach(System.out::println);
    }

    private static void extracted() throws IOException {
        FileSystem fs = getFileSystem();

        // 上传文件
        fs.copyFromLocalFile(new Path("data/file.txt"), new Path("/user/hp"));
    }

    private static FileSystem getFileSystem() throws IOException {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://mycluster"); // 指向你的 HA 集群
//        conf.set("user.name", "vagrant");

        // 或者直接指向 NameNode（非 HA）
//         conf.set("fs.defaultFS", "hdfs://hadoop01:9000");

        FileSystem fs = FileSystem.get(conf);
        return fs;
    }

}