package cqupt.spark_log.main;

import java.io.File;
import java.io.FileFilter;
import java.io.IOException;

import cqupt.spark_log.contrant.Contant;
import cqupt.spark_log.fileio.FileFilterMan;
import cqupt.spark_log.info.NodeEnergyInfo;
import cqupt.spark_log.utils.Utils;
import jxl.Workbook;
import jxl.write.WritableWorkbook;
/**
 * 统计每个节点的能耗，暂且没用到
 * @author TomWang
 *
 */
public class NodeEnergyMain {

	public static void main(String[] args) {
		// 定义参数
		String workLoadName = "sort";
		String schduler = "ENERGY_AWARE";
		int sparkShufle = 50;
		int beginIndex = 2;
		String experimentPath = Utils.getExperimentPath(Contant.BASE_PATH);
		File file = new File(experimentPath);
		FileFilter filter1 = new FileFilterMan(schduler + "-" + sparkShufle + "-\\d+");
		FileFilter filter2 = new FileFilterMan(workLoadName);
		try {
			WritableWorkbook book = Workbook.createWorkbook(new File("experimentPath" + workLoadName + ".xls"));
			NodeEnergyInfo nodeEnergyInfo = new NodeEnergyInfo(book);
			if (file.exists()) {
				File[] files = file.listFiles(filter1);
				for (File tmpFile : files) {
					String name = tmpFile.getName();
					String[] names = name.split("-");
					String index = names[2];
					if (Integer.parseInt(index) < beginIndex) {
						continue;
					}
					File[] childFiles = tmpFile.listFiles(filter2);
					if (childFiles.length == 1) {
						File childFile = childFiles[0];
						String path = childFile.getCanonicalPath() + "\\" + workLoadName;
						// 读进来这个文件，做计算，然后保存
						nodeEnergyInfo.save(path,index);
					}
				}
				nodeEnergyInfo.writeAndclose();
			}
		} catch (IOException e1) {
			e1.printStackTrace();
		}
	}
}
