package HDFS;

import org.apache.hadoop.net.DNSToSwitchMapping;

import java.util.ArrayList;
import java.util.List;

/**
 * @Author lixin
 * @Date 2023/3/10 10:25
 */
public class Demo2_RackAware implements DNSToSwitchMapping {

    //判断节点表示类型的关键字
    public static final String KEYWORD = "node";

    @Override
    public List<String> resolve(List<String> names) {
        //初始化一个存放节点对应机架名的列表，长度与节点长度一致
        ArrayList<String> rackList = new ArrayList<>(names.size());

        String ipStr = "";
        String rack = "";

        //对集群中的每个节点进行处理，给对应的机架名
        for (String name : names) {
            //如果是主机名形式传递
            if (name.startsWith(KEYWORD)) {
                ipStr = name.substring(KEYWORD.length());
            }else{
                //以ip形式传递
                ipStr = name.substring(name.lastIndexOf(".")+1);
            }

            //把字符串类型的ip转成int类型，方便后面的判断
            int ip = Integer.parseInt(ipStr);

            //根据实际的业务逻辑分配相应的机架
            if(ip >= 103 && ip<= 104){
                rack = "/rack1";
            }else{
                rack = "/rack2";
            }

            //把当前节点的机架名加入到机架列表中
            rackList.add(rack);
        }

        //返回所有节点的机架列表
        return rackList;
    }

    @Override
    public void reloadCachedMappings() {

    }

    @Override
    public void reloadCachedMappings(List<String> names) {

    }

//
//    public static void main(String[] args) {
//
//        ArrayList<String> nodeList = new ArrayList<>();
//        nodeList.add("node101");
//        nodeList.add("node102");
//        nodeList.add("node103");
//        nodeList.add("node104");
//        nodeList.add("192.168.145.101");
//        nodeList.add("192.168.145.102");
//        nodeList.add("192.168.145.103");
//        nodeList.add("192.168.145.104");
//
//
//        System.out.println(new Demo2_RackAware().resolve(nodeList));
//    }
}
