package com.galeno.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.*;
import java.util.HashMap;

/**
 * @author galeno
 * @Title:
 * @Description: 统计字母个数
 * @date 2021/7/2220:30
 */
public class WordCountHdfs {
    public static void main(String[] args) throws IOException {
        WordCountHdfs wch=new WordCountHdfs();
        wch.countWorld();

    }

    public static FileSystem getFileSystem() throws IOException {
        return FileSystem.get(new Configuration());
    }
    /**
     * 统计单词个数并统计并写入文件
     */
    public void countWorld() throws IOException {
        FileSystem fs = getFileSystem();
        HashMap<String,Integer> map=new HashMap<>();
        FSDataInputStream in = fs.open(new Path("/wc.txt"));
        FSDataOutputStream os = fs.create(new Path("/wcres.txt"));
        BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(os));
        BufferedReader br = new BufferedReader(new InputStreamReader(in));
        String wordStr=null;
        while ((wordStr=br.readLine())!=null){
            String[] word = wordStr.split(" ");
            for (String s : word) {
                map.put(s,map.getOrDefault(s,0)+1);
            }
        }
        /**
         * 遍历map内容,看看单词统计情况,并且写入文件HDFS中的"wcres.txt"
         */
        map.forEach((a,b)->{
            System.out.println("单词"+a+":" +b);
            String str="单词"+a+":"+b;
            try {
                bw.write(str);
                bw.newLine();
            } catch (IOException e) {
                e.printStackTrace();
            }

        });
        bw.close();
        br.close();
        os.close();
        in.close();
        fs.close();

    }



}
