package cn.com.coding.common.utils;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.springframework.stereotype.Component;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashMap;

/**
 * 从hadoop中读取文件
 *
 * @author inke219223m
 */
@Component
public class ReadFile {
    //    public static List<ArrayList<String>> ReadFromHDFS(String file) throws IOException {
    public static LinkedHashMap<String, String> ReadFromHDFS(String file) throws IOException {
//        List<ArrayList<String>> res =  new ArrayList<>();

        LinkedHashMap<String, String> res = new LinkedHashMap<>();

        System.setProperty("HADOOP_USER_NAME", "codingce");
        Configuration conf = new Configuration();
        conf.set("dfs.replication", "2");
        conf.set("dfs.client.socket-timeout", "300000");
        //添加此配置信息即可
        conf.set("dfs.client.use.datanode.hostname", "true");

        ArrayList<String> list = new ArrayList();
        int i = 0;
        StringBuffer buffer = new StringBuffer();
        FSDataInputStream fsr = null;
        BufferedReader bufferedReader = null;
        String lineTxt = null;

        try {
            FileSystem fs = FileSystem.get(URI.create(file), conf, "codingce");
            fsr = fs.open(new Path(file));
            bufferedReader = new BufferedReader(new InputStreamReader(fsr));
            while ((lineTxt = bufferedReader.readLine()) != null) {
                String[] arg = lineTxt.split("\t");
//                list.add(arg[0]);
//                list.add(arg[1]);
//                res.add(list);

                res.put(arg[1], arg[0]);
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if (bufferedReader != null) {
                try {
                    bufferedReader.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
        return res;

    }

    public static void main(String[] args) throws IOException {
//        ReadFile readFile = new ReadFile();
//        List<ArrayList<String>> ll = ReadFromHDFS("hdfs://8.130.17.56:8020/fly/out2/part-r-00000");
//        for (int i = 0; i < ll.size(); i++) {
//            System.out.println(ll.get(i));
//            for (int j = 0; j < ll.get(i).size(); j++) {
//                System.out.print(ll.get(i).get(j) + "\t");
//            }
//            System.out.println();
//        }

        LinkedHashMap<String, String> map = ReadFromHDFS("hdfs://8.130.17.56:8020/fly/out2/part-r-00000");

        Iterator iter = map.keySet().iterator();
        //数据展示
        System.out.println("数据展示");
        while (iter.hasNext()) {
            Object key = iter.next();
            Object val = map.get(key);
            System.out.println(key + "\t" + val);
        }

    }

}
