/*
 * Title:        CloudScheduler Toolkit
 * Description:  a Toolkit for Modeling and Simulation of Job Scheduling and Resource Provisioning in Cloud System
 * Licence:      GPL - http://www.gnu.org/copyleft/gpl.html
 * Author:       Fuhui Wu
 *
 * Copyright (c) 2013-2014, The National University of Defense Technology, China
 */

package org.nudt.jCloud.workflow;

import java.io.File;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.jdom2.input.SAXBuilder;

public class WorkflowGenFromXML implements WorkflowGenerator{
	private String fileName;
	private double bandwidth;  //in Byte per second;
	
	public WorkflowGenFromXML(String fileName, double bandwidth){
		this.fileName = fileName;
		this.bandwidth = bandwidth*1024*1024/8;
	}

	@Override
	public Workflow generate() {
		Workflow workflow = new Workflow();
		try{
			File file = new File(fileName);
			SAXBuilder builder = new SAXBuilder();
			Document doc = builder.build(file);			
			parseJDOM(doc, workflow);
		} catch(Exception e){
			System.out.println(e.getLocalizedMessage());
		}
		return workflow;
	}

	private void parseJDOM(Document doc, Workflow workflow) {
		Element root = doc.getRootElement();
		List<Element> jobList = root.getChildren("job",
				Namespace.getNamespace("http://pegasus.isi.edu/schema/DAX"));
		/**
		 * 1)The outputFileSet stores all output files from all nodes. An output file may be transfered from multiple nodes, as in data
		 * gathering situation for example; 
		 * 2)The inputFileSet stores all input files for every node.
		 */
		Map<String, Map<Node, Double>> outputFileSet = new HashMap<String, Map<Node, Double>>();
		Map<Node, Map<String, Double>> inputFileSet = new HashMap<Node, Map<String, Double>>();
		for(Iterator<Element> jobItr = jobList.iterator(); jobItr.hasNext(); ){
			Element job = jobItr.next();
			String jobId = job.getAttributeValue("id");
			String name = job.getAttributeValue("name");
			double runtime = Double.valueOf(job.getAttributeValue("runtime"));
			Node node = new Node(jobId, runtime);
			workflow.getDAG().addVertex(node);
			
			/**
			 * add the edges outgoing from the entryNode, and edges incoming to exitNode.
			 */
			if(job.getAttribute("level")!=null){
				if(job.getAttributeValue("level").equals("2") || job.getAttributeValue("level").equals("5")){
					workflow.getDAG().setEdgeWeight(workflow.getDAG().addEdge(node, workflow.getExitNode()), 0);
				}
			}
			
			switch(name){
				case "mProjectPP":
				case "ExtractSGT":
				case "fastQSplit":
				case "TmpltBank":
				case "Patser":
				case "Transterm":
				case "Findterm":
				case "RNA_Motif":
				case "Blast":{
					workflow.getDAG().setEdgeWeight(workflow.getDAG().addEdge(workflow.getEntryNode(), node), 0);
					break;
				}
				
				case "mJPEG":
				case "ZipSeis":
				case "ZipPSA":
				case "pileup":
				case "SRNA_annotate":{
					workflow.getDAG().setEdgeWeight(workflow.getDAG().addEdge(node, workflow.getExitNode()), 0);
					break;
				}				
			}
			
			/**
			 * cache all output files.
			 */
			List<Element> useList = job.getChildren();
			for(Iterator<Element> useItr=useList.iterator(); useItr.hasNext(); ){
				Element use = useItr.next();
				if(use.getAttributeValue("link").equals("output")){
					String outputFile = use.getAttributeValue("file");
					double size = Double.valueOf(use.getAttributeValue("size"));
					if(outputFileSet.containsKey(outputFile)){
						outputFileSet.get(outputFile).put(node, size);
					}else{
						Map<Node, Double> nodeSet = new HashMap<Node, Double>();
						nodeSet.put(node, size);
						outputFileSet.put(outputFile, nodeSet);						
					}
				}else if(use.getAttributeValue("link").equals("input")){
					String inputFile = use.getAttributeValue("file");
					double size = Double.valueOf(use.getAttributeValue("size"));
					if(inputFileSet.containsKey(node)){
						inputFileSet.get(node).put(inputFile, size);
					}else{
						Map<String, Double> fileSet = new HashMap<String, Double>();
						fileSet.put(inputFile, size);
						inputFileSet.put(node, fileSet);						
					}
				}
			}
		}
		
		/**
		 * create edges.
		 */
		List<Element> childList = root.getChildren("child",
				Namespace.getNamespace("http://pegasus.isi.edu/schema/DAX"));
		for(Iterator<Element> childItr=childList.iterator(); childItr.hasNext(); ){
			Element child = childItr.next();
			Node childNode = workflow.getNode(child.getAttributeValue("ref"));
			Map<String, Double> iFileSet = inputFileSet.get(childNode);
			List<Element> parentList = child.getChildren();
			Map<Node, Double> parentNodeSet = new HashMap<Node, Double>();
			for(Iterator<Element> parentItr=parentList.iterator(); parentItr.hasNext(); ){
				Element parent = parentItr.next();
				Node parentNode = workflow.getNode(parent.getAttributeValue("ref"));
				parentNodeSet.put(parentNode, 0.0);
			}
			for(String file : iFileSet.keySet()){
				Map<Node, Double> nodeSet = outputFileSet.get(file);
				if(nodeSet==null){
					continue;
				}else{
					if(nodeSet.size()==1){
						for(Node node : nodeSet.keySet()){
							parentNodeSet.put(node, parentNodeSet.get(node)+iFileSet.get(file));
						}
					}else if(nodeSet.size()>1){
						for(Node node : nodeSet.keySet()){
							parentNodeSet.put(node, parentNodeSet.get(node)+nodeSet.get(node));
						}
					}
				}				
			}
			for(Node parentNode : parentNodeSet.keySet()){
				workflow.getDAG().setEdgeWeight(workflow.getDAG().addEdge(parentNode, childNode),
						parentNodeSet.get(parentNode)/bandwidth);
			}
		}
	}

	public double getBandwidth() {
		return bandwidth;
	}

	public void setBandwidth(double bandwidth) {
		this.bandwidth = bandwidth;
	}
		
}
