package com.bff.gaia.client.combine.planutil;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;


import java.io.IOException;
import java.util.List;

public class PlanComputeUtil {

	//该方法用来计算作业所处理的文件的大小,传入参数是作业所处理的文件或目录的集合
	public static long getWorkFileSize(List<String> filePaths) {

		if (filePaths.size() > 0) {
		return  computeWorkFileSize(filePaths);
		}else {
			return 0L;
		}
	}
	private static long computeWorkFileSize(List<String> filePaths) {

		Configuration conf = new Configuration();
		conf.set("fs.default.name", GetConfigure.getHDFSMasterAddress());

			long fileSize = 0L;
			try {
				FileSystem fs = FileSystem.get(conf);

				Path filePath;

				for (String path : filePaths) {
					if(path.contains("hdfs")) {
						filePath = new Path(path);
						fileSize += fs.getContentSummary(filePath).getSpaceConsumed();
					}
				}
			} catch (IOException e) {
				fileSize = 0L;
			}
			return fileSize;
	}

	//获取作业分配的内存大小,留待获取Slot大小值，GB
	public static long getRAMSize(int parallizer, float slotSize){

		return parallizer * (long)(slotSize*1024*1024*1024);
	}
}