package com.ideal.hadoopadmin.api.yarn;

import com.ideal.hadoopadmin.crontab.property.Properties;
import com.ideal.tools.scheduler.DbFairScheduler;
import com.ideal.tools.ssh.common.CommonProperties;
import com.ideal.tools.ssh.context.ClusterContext;
import com.ideal.tools.ssh.entity.LinuxMachine;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

public class YarnAPI {

    //队列配置
    public static  void dbFairScheduler(ClusterContext context){
       new DbFairScheduler().initYarnFairScheduleFile(context);
    }

    public void call(){
        //构造测试ClusterContext  测试用
        Map<String, String> propertyMap = new HashMap<String, String>();
        propertyMap.put(CommonProperties.RM_SCHEDULE_XML_DES_PATH,"");

        CommonProperties commonProperties = new CommonProperties(propertyMap);
        ClusterContext context = new ClusterContext(commonProperties);
        //构造机器参数
        List<LinuxMachine> machineList = new ArrayList<LinuxMachine>();
        LinuxMachine nn1 = new LinuxMachine().initIP("10.5.24.151").initLoginName("I-Hadoop")
                .initPassWord("ideal123").initMachineType(LinuxMachine.MachineType.WebAPP)
                .initPubKey("AAAAC3NzaC1lZDI1NTE5AAAAIFceht+lsORHJXhdlnB6+zQJ3Z3vfme546mAuzqmbtIy");
        machineList.add(nn1);
        context.setMachineList(machineList);
        //构造结束  测试用
        new YarnAPI().dbFairScheduler(context);
    }
}
