package cn.lagou.dw.flume.interceptor.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;

/*
 * @className ReadHDFSText
 * @description
 * @author admin
 * @date 2022/9/19 22:00
 * */
public class ReadHDFSText {
    /**
     * 读取文件内容
     */
    public static List<String> readTxtFile()  {
        Configuration conf = new Configuration();
        String remoteFilePath = "/user/data/holidayAndWork.txt";
        List<String> holidays = new ArrayList<>();
        try {
            FileSystem fs = FileSystem.get(new URI("hdfs://centos7-1:9000"),conf,"root");
            Path remotePath = new Path(remoteFilePath);
            FSDataInputStream fis = fs.open(remotePath);
            BufferedReader d = new BufferedReader(new InputStreamReader(fis));
            String lineTxt = null;
            while ((lineTxt = d.readLine()) != null) {
                holidays.add(lineTxt);
            }
            d.close();
            fis.close();
            fs.close();
        } catch (IOException e) {
            e.printStackTrace();
        } catch (URISyntaxException e) {
            e.printStackTrace();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
        return holidays;
    }
}
