/*******************************************************************************
 * Package: com.hadoop.hdfs
 * Type:    WordCountHdfs
 * Date:    2024-05-13 20:57
 *
 * Copyright (c) 2024 LTD All Rights Reserved.
 *
 * You may not use this file except in compliance with the License.
 *******************************************************************************/
package com.hadoop.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;

import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.URI;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;

/**
 * 功能描述： 原生的hdfs 处理文件 获取word数据
 *
 * @author Songxianyang
 * @date 2024-05-13 20:57
 */
public class WordCountHdfs {
    public static void main(String[] args) throws Exception {
        Configuration configuration = new Configuration();
        URI uri = new URI("hdfs://localhost:9000");
        FileSystem fs = FileSystem.get(uri, configuration, "SongXianYang");
        Path in = new Path("/hadooptest11/wordCountHdfs.txt");
        // 存数据 wordCount
        Map<String, Integer> map = new HashMap<>();
        // 文件路径  是否递归 否
        RemoteIterator<LocatedFileStatus> files = fs.listFiles(in, false);
        while (files.hasNext()) {
            //获取文件
            LocatedFileStatus file = files.next();
            // 打开文件
            FSDataInputStream inputStream = fs.open(file.getPath());

            BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
            String line;
            while ((line = reader.readLine()) != null) {
                String[] split = line.split("\\|");
                System.out.println("------>>>>>" + Arrays.asList(split));
                for (String s : split) {
                    if (Objects.isNull(map.get(s))) {
                        map.put(s, 1);
                    } else {
                        map.put(s, map.get(s) + 1);
                    }
                }
            }
            reader.close();
            inputStream.close();
        }

        Path out = new Path("/hadooptest11/wordCountHdfsOut.txt");
        FSDataOutputStream fsDataOutputStream = fs.create(out);

        for (Map.Entry<String, Integer> entry : map.entrySet()) {
            String value= entry.getKey()+" "+entry.getValue()+"\n";
            fsDataOutputStream.write(value.getBytes());
        }

        fsDataOutputStream.close();
        fs.close();
    }
}
