package dataflowscheme.engine.core;

import java.io.File;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.LinkedBlockingQueue;

import org.apache.log4j.BasicConfigurator;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl;
import org.eclipse.emf.ecore.xmi.impl.XMIResourceFactoryImpl;

import dataflowScheme.Block;
import dataflowScheme.Connection;
import dataflowScheme.DataConnection;
import dataflowScheme.DataflowSchemePackage;
import dataflowScheme.PE;
import dataflowScheme.Port;
import dataflowScheme.PortSet;
import dataflowScheme.SynchroConnection;

/**
 * Main class of the data-flow runtime engine intended for a scheme execution,
 * possible arguments can be seen in main method.
 * 
 * @author <a href="mailto:misiak7@gmail.com">Michal Antolik</a>
 *
 */
public class Runner {

	static Logger log = Logger.getLogger(Runner.class);
	
	private final String schemeURI;
	
	/** parser to process IEvent */
	private final EventParser eventParser;
	/** parser to process IExecutionEvent */
	private final ExecutionEventParser executionEventParser;
	
	/**Event Queue in duo with Event Parser: deliver 
	 * signal and data tokens to processing entities (Task, Storage) */
	private final LinkedBlockingQueue<IEvent> eventQueue;
	
	/**Execution Event Queue in duo with Execution Event Parser: have 
	 * at disposal thread pool for submitting requests about Task and Storage execution. */
	private final LinkedBlockingQueue<IExecutionEvent> executionEventQueue;
	
	/** synchronization lock, use to stop main thread and to awake it from Stopper */
	private final CountDownLatch doneSignal = new CountDownLatch(1);
	
	/** instance of Activity checker to be run in a separate thread*/
	private final ActivityChecker activityChecker;
	
	//the list of tasks and storages used for data-flow graph
	private final List<CTask> cTasks = new ArrayList<CTask>();
	private final List<CStorage> cStorages = new ArrayList<CStorage>();
	
	//if statistics should be generated and printed (false = statistics are not generated)
	private boolean stats = false;
	
	/**initialize log4j logger, creates parsers and queues
	 * 
	 * @param schemeURI URI of a data-flow scheme
	 * @param properties as parameters as program arguments.
	 */
	public Runner(String schemeURI, Properties properties) {
		super();
		this.schemeURI = schemeURI;

		//initialize logger
		BasicConfigurator.resetConfiguration();
		if(properties.containsKey("log4jFile")){
			try{
				File log4jFile = new File(properties.getProperty("log4jFile"));
				if(log4jFile.exists()){
					//load log4j properties from file
					PropertyConfigurator.configure(properties.getProperty("log4jFile"));
				} else {
					//use default settings for log4j Logger
					PropertyConfigurator.configure(log4jProperties);
				}
			} catch(Exception e) {
				//use default settings for log4j Logger
				PropertyConfigurator.configure(log4jProperties);
			}
		} else {
			//use default settings for log4j Logger
			PropertyConfigurator.configure(log4jProperties);
		}
		
		//set logging level for log4j Logger
    	Logger.getRootLogger().setLevel(getLevel(properties));
    	
    	
    	log.info("\n\n****** Multithreaded Engine started ******");
    	
    	//prepare parsers
    	eventQueue = new LinkedBlockingQueue<IEvent>();
    	executionEventQueue = new LinkedBlockingQueue<IExecutionEvent>();
    	
    	stats = getStatsValue(properties);

    	//create parsers instances
    	activityChecker = new ActivityChecker(doneSignal);
    	eventParser = new EventParser(eventQueue, activityChecker, stats);
    	executionEventParser = new ExecutionEventParser(executionEventQueue, activityChecker, stats);
    	
	}
	
	/**
	 * Intended to load EMF model (Task, Storage, connections...) from XML scheme file 
	 * @return EMF Resource
	 */
	private Resource loadResourceFromFile(){
		// Create a resource set.
		ResourceSet resourceSet = new ResourceSetImpl();

		// Register the default resource factory -- only needed for stand-alone!
		resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap()
				.put(Resource.Factory.Registry.DEFAULT_EXTENSION,
						new XMIResourceFactoryImpl());
		resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap()
				.put(DataflowSchemePackage.eNAME,
						DataflowSchemePackage.eINSTANCE);

		// Get the URI of the model file.
		URI fileURI = URI.createFileURI(
				new File(schemeURI).getAbsolutePath());

		return resourceSet.getResource(fileURI, true);
	}
	
	/**
	 * Data-flow Scheme is executed by this method. 
	 */
	public void run() {
		try{
			// initialize threads for parsers (each parser in running in separate thread)  
			Thread eventParserThread = new Thread(eventParser, "Event Parser");
			Thread executorThread = new Thread(executionEventParser, "Executor");
			Thread activityThread = new Thread(activityChecker, "Activity Checker");
			
			eventParserThread.start();
			executorThread.start();

			Calendar startTime = Calendar.getInstance();

			// build data-flow graph and as z root node is used trigger
			CTrigger cTrigger = buildDataflowGraph();
			Calendar afterDFgraphBuildTime = Calendar.getInstance();

			//start activity checker
			activityThread.start();
			
			// START scheme execution
			cTrigger.run();
			// wait until any stopper will receive signals   
			doneSignal.await();

			Calendar endTime = Calendar.getInstance();
			
			// --finish program phase
			// put end events to queues to end parsers in a nice safety way
			eventQueue.add(new EventParserEndEvent(eventParser));
			executionEventQueue.add(new ExecutorEndEvent());
			//wait until parsers will really end
			eventParserThread.join();
			executorThread.join();
			
			//perform finalization on all sources referenced from eStorages
			StorageSourcesHandler.getInstance().finalizeStorageInstances();
			
			//print data which were not send over arcs 
			processUnusedData();

			if(stats){
				Calendar whole = Calendar.getInstance();
				whole.setTimeInMillis(endTime.getTimeInMillis() - startTime.getTimeInMillis());
				
				Calendar initPhase = Calendar.getInstance();
				initPhase.setTimeInMillis(afterDFgraphBuildTime.getTimeInMillis() - startTime.getTimeInMillis());
				
				Calendar runPhase = Calendar.getInstance();
				runPhase.setTimeInMillis(endTime.getTimeInMillis() - afterDFgraphBuildTime.getTimeInMillis());

				SimpleDateFormat sdf = new SimpleDateFormat("mm:ss.SSS");
				log.info("Program takes: " + sdf.format(whole.getTime()));
				log.info("   - init phase: " + sdf.format(initPhase.getTime()));
				log.info("   - run  phase: " + sdf.format(runPhase.getTime()));
				
				log.info("Statistics:\n" + ExecutionEPstats.getInstance().printStats());
			}
			
			activityThread.join();
			if(activityChecker.wasDeadLock){
				log.info("THE END Inactivity recognized\n\n");
			} else {
				log.info("THE END successfully\n\n");
			}
			
		}catch(DFinitException e){
			log.error(e.getMessage(), e);
		}catch(Exception e){
			log.error("Exception caught in engine end.", e);
		}
	}

	/**
	 * logLevel=[DEBUG,INFO,WARN,ERROR,OFF] (default INFO)
	 * stats=[YES,NO] (default YES)
	 * log4jFile="path to log4j.properties"
	 * 
	 * @param args 
	 */
	public static void main(String[] args) {
		if(args.length == 0){
			System.err.println("Path to dataflow scheme has to be set in argument.");
		}
		
		Properties properties = new Properties();
		if(args.length > 1){
			for(int i = 1; i < args.length; i++){
				if(args[i].contains("=")){
					String[] parts = args[i].split("=");
					properties.put(parts[0], parts[1]);
				}
			}
		}
		
		Runner runner = new Runner(args[0], properties);
		runner.run();
	}
	
	/**get root of data-flow scheme
	 * 
	 * @param resource EMF resource
	 * @return
	 * @throws DFinitException
	 */
	private Block getBlockElement(Resource resource) throws DFinitException{
		EList<EObject> list = resource.getContents();
		EObject obj = list.get(0); //only one block can be in scheme
		if(obj instanceof Block){
			return (Block)obj;
		} else {
			throw new DFinitException("Data-flow scheme doens't contain root element Block");
		}
	}
	
	/**It checks referenced classes of Task and Storage instances if
	 * classes were put to the class-path if they have correct structure. 
	 * 
	 * @param entities
	 * @param tasks
	 * @param storages
	 * @throws DFinitException
	 */
	private void initProcessingEntities(EList<PE> entities, 
			Map<dataflowScheme.Task, CTask> tasks, 
			Map<dataflowScheme.Storage, CStorage> storages) throws DFinitException{
		
		for(PE pe : entities){
			if(pe instanceof dataflowScheme.Task){
				// initialize cTasks
				dataflowScheme.Task eTask = (dataflowScheme.Task)pe;
				CTask task = new CTask(eTask, eventQueue, executionEventQueue);
				task.init();
				cTasks.add(task);
				tasks.put(eTask, task);
			} else if (pe instanceof dataflowScheme.Storage){
				// initialize cStorages
				dataflowScheme.Storage eStorage = (dataflowScheme.Storage)pe;
				CStorage storage = new CStorage(eStorage, eventQueue, executionEventQueue);
				storage.init();
				cStorages.add(storage);
				storages.put(eStorage, storage);
			}
		}
	}
	
	private void initStoppers(EList<dataflowScheme.Stopper> eStoppers, Map<dataflowScheme.Stopper, CStopper> stoppers){
		// initialize cStoppers
		for(dataflowScheme.Stopper eStopper : eStoppers){
			CStopper cStopper = new CStopper(eStopper, doneSignal);
			cStopper.init();
			stoppers.put(eStopper, cStopper);
		}
	}
	
	/**
	 * First process Task and Storage processing entities, afterwards
	 * stopper and trigger entities. Finally according to connections
	 * which are loaded from a XML scheme, A data-flow graph is build. 
	 * @return
	 * @throws DFinitException
	 */
	private CTrigger buildDataflowGraph() throws DFinitException{
		Resource resource = loadResourceFromFile();
		
		// create temporary maps for faster access from eTask to cTask 
		Map<dataflowScheme.Task, CTask> tasks = new HashMap<dataflowScheme.Task, CTask>();
		Map<dataflowScheme.Storage, CStorage> storages = new HashMap<dataflowScheme.Storage, CStorage>();
		Map<dataflowScheme.Stopper, CStopper> stoppers = new HashMap<dataflowScheme.Stopper, CStopper>();
		
		//root ecore element for data-flow scheme
		Block block = getBlockElement(resource);
		
		// initialize PEs
		EList<PE> entities = block.getEntity();
		initProcessingEntities(entities, tasks, storages);
		
		// initialize cTrigger
		CTrigger trigger = new CTrigger(block.getTrigger(), eventQueue);
		trigger.init();

		// initialize cStoppers
		EList<dataflowScheme.Stopper> eStoppers = block.getStopper();
		initStoppers(eStoppers, stoppers);
			
		//build data-flow graph from connection list
		EList<Connection> conns = block.getConnection();
		buildDataflowGraphFromConnections(conns, tasks, storages, stoppers, trigger);
		
		return trigger;
	}
	
	private void buildDataflowGraphFromConnections(
							EList<Connection> conns,
							Map<dataflowScheme.Task, CTask> tasks,
							Map<dataflowScheme.Storage, CStorage> storages,
							Map<dataflowScheme.Stopper, CStopper> stoppers,
							CTrigger trigger) throws DFinitException{
		
			for(Connection con : conns){
				Port inPort = null;
				Port outPort = null;
				
				if(con instanceof SynchroConnection){
					inPort = ((SynchroConnection)con).getInPort();
					outPort = ((SynchroConnection)con).getOutPort();
				} else {
					inPort = ((DataConnection)con).getInPort();
					outPort = ((DataConnection)con).getOutPort();
				}
				
				IConnectableEntity inEntity = getInputWiredEntity(inPort, tasks, storages, stoppers);
				IConnectableEntity outEntity = getOutputWiredEntity(outPort, tasks, storages, trigger);
				
				inEntity.addConnection(inPort.getName(), outEntity, outPort.getName());
				outEntity.addConnection(outPort.getName(), inEntity, inPort.getName());
			}
		
	}
	
	/**
	 * A ---> B, this is A
	 * 
	 * @param outPort
	 * @param tasks
	 * @param storages
	 * @param trigger
	 * @return
	 * @throws DFinitException
	 */
	private IConnectableEntity getOutputWiredEntity(
			Port outPort,
			Map<dataflowScheme.Task, CTask> tasks,
			Map<dataflowScheme.Storage, CStorage> storages,
			CTrigger trigger) throws DFinitException{
		
		IConnectableEntity outEntity = null;
		EObject outContainer = outPort.eContainer();
		
		if(outContainer instanceof PortSet){
			outContainer = outContainer.eContainer();
			if(outContainer instanceof dataflowScheme.Task){
				outEntity = tasks.get(outContainer);
			} else {
				outEntity = storages.get(outContainer);
			}
		}else if(outContainer instanceof dataflowScheme.Trigger){
			outEntity = trigger;
		}
		
		if(outEntity == null){
			throw new DFinitException("Connection is not leading to any connectable entity");
		}
		
		return outEntity;
	}
	
	/**
	 *  A ---> B, this is B 
	 *  
	 * @param inPort
	 * @param tasks
	 * @param storages
	 * @param stoppers
	 * @return
	 * @throws DFinitException
	 */
	private IConnectableEntity getInputWiredEntity(
			Port inPort,
			Map<dataflowScheme.Task, CTask> tasks,
			Map<dataflowScheme.Storage, CStorage> storages,
			Map<dataflowScheme.Stopper, CStopper> stoppers) throws DFinitException{
		
		IConnectableEntity inEntity = null;
		EObject inContainer = inPort.eContainer();
		
		if(inContainer instanceof PortSet){
			inContainer = inContainer.eContainer();
			if(inContainer instanceof dataflowScheme.Task){
				inEntity = tasks.get(inContainer);
			} else {
				inEntity = storages.get(inContainer);
			}
		} else if (inContainer instanceof dataflowScheme.Stopper){
			inEntity = stoppers.get(inContainer);
		}
		
		if(inEntity == null){
			throw new DFinitException("Connection is not leading to any connectable entity");
		}
		
		return inEntity;
	}
	
	/**
	 * Print warning about data, which were not send but program ends.
	 */
	private void processUnusedData(){
		for(CTask cTask : cTasks){
			if(!cTask.producedData.isEmpty()){
				log.warn("Produced data of '" + cTask.ePe.getName() + "' were not send.");
				for(String port : cTask.producedData.keySet()){
					log.warn("   - Port: " + port);
				}
			}
			if(!cTask.signals2send.isEmpty()){
				log.warn("Signals of '" + cTask.ePe.getName() + "' were not send.");
				for(String port : cTask.signals2send){
					log.warn("   - Port: " + port);
				}
			}
		}
		
		for(CStorage cStorage : cStorages){
			if(!cStorage.producedData.isEmpty()){
				log.warn("Produced data of '" + cStorage.ePe.getName() + "' were not send.");
				for(String port : cStorage.producedData.keySet()){
					log.warn("   - Port: " + port);
				}
			}
			if(!cStorage.signals2send.isEmpty()){
				log.warn("Signals of '" + cStorage.ePe.getName() + "' were not send.");
				for(String port : cStorage.signals2send){
					log.warn("   - Port: " + port);
				}
			}
		}
	}
	
	/** default log4j properties */
	public static final Properties log4jProperties = new Properties();
	static{
		log4jProperties.put("log4j.rootCategory", "WARN, stdout");
		log4jProperties.put("log4j.appender.stdout", "org.apache.log4j.ConsoleAppender");
		log4jProperties.put("log4j.appender.stdout.layout", "org.apache.log4j.PatternLayout");
		log4jProperties.put("log4j.appender.stdout.layout.ConversionPattern", "%5p [%t] (%F:%L) - %m%n");
	}
	
	private static Level getLevel(Properties p){
		String logLevel = (String)p.getProperty("logLevel", "info");
		if(logLevel.equalsIgnoreCase("debug")){
			return Level.DEBUG;
		} else if(logLevel.equalsIgnoreCase("info")){
			return Level.INFO;
		} else if(logLevel.equalsIgnoreCase("warn")){
			return Level.WARN;
		} else if(logLevel.equalsIgnoreCase("error")){
			return Level.ERROR;
		} else if(logLevel.equalsIgnoreCase("off")){
			return Level.OFF;
		}
		return Level.INFO;
	}
	
	private static boolean getStatsValue(Properties p){
		String sv = (String)p.getProperty("stats", "NO");
		if(!sv.equalsIgnoreCase("NO")){
			return true;
		} else {
			return false;
		}
	}
}
