package dataflow.core;

import java.io.File;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.LinkedBlockingQueue;

import org.apache.log4j.BasicConfigurator;
import org.apache.log4j.Logger;
import org.apache.log4j.PropertyConfigurator;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.common.util.URI;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.resource.Resource;
import org.eclipse.emf.ecore.resource.ResourceSet;
import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl;
import org.eclipse.emf.ecore.xmi.impl.XMIResourceFactoryImpl;

import dataflowScheme.Block;
import dataflowScheme.Connection;
import dataflowScheme.DataConnection;
import dataflowScheme.DataflowSchemePackage;
import dataflowScheme.PE;
import dataflowScheme.Port;
import dataflowScheme.PortSet;
import dataflowScheme.SynchroConnection;

public class Runner {

	static Logger log = Logger.getLogger(Runner.class);
	
	static private Runner instance = null;
	
	static public Runner getInstance(){
		if(instance == null){
			instance = new Runner();
		}
		return instance;
	}
	
	static public EventParser getEventParser(){
		return getInstance().eventParser;
	}

	static public ExecutionEventParser getExecutionEventParser(){
		return getInstance().executionEventParser;
	}
	
	private final EventParser eventParser;
	private final ExecutionEventParser executionEventParser;
	
	private final LinkedBlockingQueue<IEvent> eventQueue;
	private final LinkedBlockingQueue<ExecutionEvent> executionEventQueue;
	
	private final CountDownLatch doneSignal = new CountDownLatch(1);
	
	private Runner() {
		super();
		
		BasicConfigurator.resetConfiguration();
    	PropertyConfigurator.configure("log4j.properties");
    	
    	log.info("\n\n****** Multithreaded Engine started ******");
    	
    	eventQueue = new LinkedBlockingQueue<IEvent>();
    	executionEventQueue = new LinkedBlockingQueue<ExecutionEvent>();
    	
    	eventParser = new EventParser(eventQueue);
    	executionEventParser = new ExecutionEventParser(executionEventQueue);
	}
	
	public void run() {
		try{
			// Create a resource set.
			ResourceSet resourceSet = new ResourceSetImpl();
	
			// Register the default resource factory -- only needed for stand-alone!
			resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap()
					.put(Resource.Factory.Registry.DEFAULT_EXTENSION,
							new XMIResourceFactoryImpl());
			resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap()
					.put(DataflowSchemePackage.eNAME,
							DataflowSchemePackage.eINSTANCE);
	
			// Register the package -- only needed for stand-alone!
	
			// LibraryPackage libraryPackage = LibraryPackage.eINSTANCE;
	
			// Get the URI of the model file.
			URI fileURI = URI.createFileURI(new File(
					"models/default.dataflowscheme").getAbsolutePath());
//			URI fileURI = URI.createFileURI(new File(
//				"models/dataflow-loop.dataflowscheme").getAbsolutePath());
	
			// Demand load the resource for this file.
			Resource resource = resourceSet.getResource(fileURI, true);
			
			Map<dataflowScheme.Task, Task> tasks = new HashMap<dataflowScheme.Task, Task>();
			Map<dataflowScheme.Storage, Storage> storages = new HashMap<dataflowScheme.Storage, Storage>();
			Map<dataflowScheme.Stopper, Stopper> stoppers = new HashMap<dataflowScheme.Stopper, Stopper>();
			
			EList<EObject> list = resource.getContents();
			EObject obj = list.get(0);
			if(obj instanceof Block){
				Block block = (Block)obj;
				
				/** initialize PEs */
				EList<PE> entities = block.getEntity();
				for(PE pe : entities){
					if(pe instanceof dataflowScheme.Task){
						dataflowScheme.Task eTask = (dataflowScheme.Task)pe;
						Task task = new Task(eTask, eventQueue, executionEventQueue);
						if(!task.init()){
							log.error("Task '" + eTask.getName() + "' failed while initializing.");
							return;
						}
						
						tasks.put(eTask, task);
					} else if (pe instanceof dataflowScheme.Storage){
						dataflowScheme.Storage eStorage = (dataflowScheme.Storage)pe;
						Storage storage = new Storage(eStorage, eventQueue, executionEventQueue);
						if(!storage.init()){
							log.error("Storage '" + eStorage.getName() + "' failed while initializing.");
							return;
						}
						
						storages.put(eStorage, storage);
					}
				}
				
				/** initialize Trigger */
				dataflowScheme.Trigger eTrigger = block.getTrigger();
				Trigger trigger = new Trigger(eTrigger, eventQueue);
				trigger.init();
	
				/** initialize Stoppers */
				EList<dataflowScheme.Stopper> eStoppers = block.getStopper();
				for(dataflowScheme.Stopper eStopper : eStoppers){
					Stopper stopper = new Stopper(eStopper, doneSignal);
					stopper.init();
					stoppers.put(eStopper, stopper);
				}
				
	
				/** add connections */
				for(Connection con : block.getConnection()){
					Port inPort = null;
					Port outPort = null;
					IConnectableEntity inEntity = null;
					IConnectableEntity outEntity = null;
					
					if(con instanceof SynchroConnection){
						inPort = ((SynchroConnection)con).getInPort();
						outPort = ((SynchroConnection)con).getOutPort();
					} else {
						inPort = ((DataConnection)con).getInPort();
						outPort = ((DataConnection)con).getOutPort();
					}
					
					EObject inContainer = inPort.eContainer();
					if(inContainer instanceof PortSet){
						inContainer = inContainer.eContainer();
						if(inContainer instanceof dataflowScheme.Task){
							inEntity = tasks.get(inContainer);
						} else {
							inEntity = storages.get(inContainer);
						}
					} else if (inContainer instanceof dataflowScheme.Stopper){
						inEntity = stoppers.get(inContainer);
					}
					
					if(inEntity == null){
						log.error("Connection is not leading to any connectable entity");
						return;
					}
					
					
					EObject outContainer = outPort.eContainer();
					if(outContainer instanceof PortSet){
						outContainer = outContainer.eContainer();
						if(outContainer instanceof dataflowScheme.Task){
							outEntity = tasks.get(outContainer);
						} else {
							outEntity = storages.get(outContainer);
						}
					}else if(outContainer instanceof dataflowScheme.Trigger){
						outEntity = trigger;
					}
					
					if(outEntity == null){
						log.error("Connection is not leading to any connectable entity");
						return;
					}
					
					inEntity.addConnection(inPort.getName(), outEntity, outPort.getName());
					outEntity.addConnection(outPort.getName(), inEntity, inPort.getName());
					 
				}
					
				/** start program */
				Thread eventParserThread = new Thread(eventParser, "Event Parser");
				Thread executorThread = new Thread(executionEventParser, "Executor");
				eventParserThread.start();
				executorThread.start();
				
				trigger.run();
				
				doneSignal.await();
				
				//finish program
				eventQueue.add(new EventParserEndEvent(eventParser));
				executionEventQueue.add(new ExecutorEndEvent());
				
				//finalize storages
				StorageHandler.getInstance().finalizeStorageInstances();
				
				eventParserThread.join();
				executorThread.join();
				
				log.info("THE END successfully\n\n");
			}
			
		}catch(Exception e){
			log.error("Exception caught in engine end.", e);
		}
	}

	public static void main(String[] args) {
		Runner runner = new Runner();
		runner.run();
	}
}
