package com.hugedata.idc.task;

import java.io.File;
import java.io.IOException;
import java.util.Date;
import java.util.List;

import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.time.DateFormatUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Logger;

import com.hugedata.cdnserver.common.CommonUnderTask;
import com.hugedata.cdnserver.util.SystemUtils;
import com.hugedata.idc.utils.HDFSUtils;
import com.hugedata.imsp.jabberc.IMSPClient;
import com.hugedata.imsp.jabberc.service.node.IDCNode;
import com.hugedata.util.PropUtils;

public class UnderTracerouteTask
{
    private static final Logger LOGGER = Logger.getLogger(UnderTracerouteTask.class);

    /**
     * @param args
     *            Traceroute目录，mnId
     * @throws IOException
     */
    public static void main(String[] args)
    {
        try
        {
            execute();
        }
        catch (IOException e)
        {
           LOGGER.error("", e);
        }
    }

    /**
     * 调用PD接口下发任务，Traceroute任务需要区分区域和运营商
     * 
     * @throws IOException
     */
    public static void execute() throws IOException
    {
        String taskName = "下发traceroute任务";
        String toolType = "traceroute_tool_type";

        Configuration conf = HDFSUtils.getConf();
        IMSPClient client = SpackClient.getSpackClient();
        if (!SpackClient.isMCSConnected())
        {
            LOGGER.error("can not connect MCS, terminate current UnderTracerouteTask");
            return;
        }

        String beginDate = SystemUtils.getCfg(SystemUtils.CONFIG_PATH).getProperty("under_traceroute_begin_time");
        // 获取可用节点
        List<IDCNode> nodeList = CommonUnderTask.getAvailableNode(client, taskName, toolType);
        LOGGER.info(taskName + ",可用节点数：" + nodeList.size());
        
        String ipListPath = PropUtils.getProp("ipListPath");
        LOGGER.info("ipListPath=" + ipListPath);
        
        String hadoopTmpDir = conf.get("hadoop.tmp.dir");
        LOGGER.info("hadoopTmpDir=" + hadoopTmpDir);
        String localIpListPath = hadoopTmpDir + "/iplist.txt";
        HDFSUtils.readfileFromHdfs(ipListPath, localIpListPath);
//        List<String> ipList = FileUtils.readLines(new File(localIpListPath), "UTF-8");
        File ipListFile = new File(localIpListPath);
        
        for (IDCNode nodeInfo : nodeList)
        {
            String area = nodeInfo.getAreaCode();
            String isp = nodeInfo.getIspCode();
//            String fileName = area + "-" + isp + ".txt";
            LOGGER.info("area=" + area);
            LOGGER.info("isp=" + isp);
//            StringBuilder qosFileStrTmp = new StringBuilder();
//            for (String ipStr : ipList)
//            {
//                String qosTmp = StringUtils.join(new String[]{"001", ipStr, area, isp}, SPACE_SEPARATOR);
//                qosFileStrTmp.append(qosTmp).append("\n");
//            }
            
//            File underFile = new File(hadoopTmpDir + "/qos" + DateFormatUtils.format(new Date(), "yyyyMMddHHmm")
//                    + "/" + fileName);
//            FileUtils.write(underFile, qosFileStrTmp, "UTF-8");
            
            int resultCode = CommonUnderTask.createTask(client, taskName, toolType, ipListFile, nodeInfo, beginDate);
            if (resultCode != 0)
            {
                LOGGER.error("向区域:" + nodeInfo.getAreaCode() + ",ISP:" + nodeInfo.getIspCode() + "节点下发Traceroute任务失败");
            }
            else
            {
                LOGGER.error("向区域:" + nodeInfo.getAreaCode() + ",ISP:" + nodeInfo.getIspCode() + "节点下发Traceroute任务成功");
            }
        }
        FileUtils.deleteQuietly(ipListFile);
    }
  
}
