package textEngine;

import gate.Annotation;
import gate.AnnotationSet;
import gate.Corpus;
import gate.Factory;
import gate.Gate;
import gate.ProcessingResource;
import gate.corpora.DocumentContentImpl;
import gate.corpora.DocumentImpl;
import gate.creole.ANNIEConstants;
import gate.creole.ResourceInstantiationException;
import gate.creole.SerialAnalyserController;
import gate.util.GateException;
import gate.util.Out;
import java.io.File;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import manager.Phase1;
import domain.EclapRecord;
import domain.NameRawRecord;


public class NameFinder {
	

	
	public List<NameRawRecord> findNames(List<EclapRecord> eclapRecordList) throws GateException, MalformedURLException{
		

		if (Phase1.printOut>0) System.out.println("==Text Engine: Start to find Names (from "+eclapRecordList.size()+" actual records)");
		long startTime = System.currentTimeMillis();
		gate.Corpus corpus= initAnnie();
		
		Map<EclapRecord, DocumentImpl> mappa= insertValues(corpus, eclapRecordList);
		
		//Crea pipeline con Annie
		SerialAnalyserController pipeline = (SerialAnalyserController) Factory.createResource("gate.creole.SerialAnalyserController", Factory.newFeatureMap(), Factory.newFeatureMap(), "ANNIE_" + Gate.genSym());
		pipeline.setCorpus(corpus);
		
		loadAllAnnieProcessingResource(pipeline);
        
        pipeline.execute();
        long endTime = System.currentTimeMillis();
        if (Phase1.printOut>0) System.out.println("==Text Engine: finding names phase ended (in "+(endTime-startTime)+" ms)");
        return getResults(mappa);
       
        
	}

	private List<NameRawRecord> getResults(Map<EclapRecord, DocumentImpl> mappa) {
		

		 /* TOGLIERE!
        //Extract info from an annotated document.
        AnnotationSetImpl ann = (AnnotationSetImpl) doc.getAnnotations();
        Iterator<Annotation>i = ann.get().iterator();
        String corpusString= corpus.get(0).toString();
        while(i.hasNext()){
    	    Annotation annotation = i.next();
    	    long start = annotation.getStartNode().getOffset();
    	    long end =  annotation.getEndNode().getOffset();
    	    System.out.print(corpusString.substring((int)start, (int)end)+" ");
        }*/
		if (Phase1.printOut>0) System.out.println("==Text Engine: start to create raw names list");
		List<NameRawRecord> nameRawRecords= new ArrayList<NameRawRecord>();
        
        
        Set<EclapRecord> records=mappa.keySet();
        Iterator<EclapRecord> iter = records.iterator();
        
        while(iter.hasNext()){
        	EclapRecord rec= (EclapRecord) iter.next();
        	DocumentImpl doc= mappa.get(rec);
        	
        	AnnotationSet defaultAnnotSet = doc.getAnnotations();
        	
        	//TODO: Se si prendono solo le "Person" cavare il seguente pezzo!
        	Set<String> annotTypesRequired = new HashSet<String>(); 
            annotTypesRequired.add("Person");
        	Set<Annotation> peopleAndPlaces =
                new HashSet<Annotation>(defaultAnnotSet.get(annotTypesRequired));
        	
        	
        	Iterator<Annotation> it = peopleAndPlaces.iterator();
        	long endPosition;
            long startPosition;
        	 while(it.hasNext()) {
        		 Annotation currAnnot= (Annotation) it.next();
        		 startPosition =
                     currAnnot.getStartNode().getOffset().longValue();
                   endPosition = currAnnot.getEndNode().getOffset().longValue();
                   if(endPosition != -1 && startPosition != -1) {
                 	/* Out.prln(((String)
                 	        doc.getFeatures().get(GateConstants.ORIGINAL_DOCUMENT_CONTENT_FEATURE_NAME)).substring((int)startPosition, (int)endPosition));
                   */
                	   
                      Long data= new Date().getTime();
                	   
                	   NameRawRecord nameRec= new NameRawRecord(rec.getAxoid(), rec.getTypeOfTable(), rec.getId(), rec.getFieldType(), data, rec.getFieldValue().substring((int)startPosition, (int)endPosition));
           
                	   if (Phase1.printOut>1) Out.prln("-Found and added to raw list: "+rec.getFieldValue().substring((int)startPosition, (int)endPosition)+ " (found in field: "+ rec.getFieldType()+")");//Stampa di prova: i nomi trovati
                	   nameRawRecords.add(nameRec);
                     
                   }
        	 }
        	 
        }
    	if (Phase1.printOut>0) System.out.print("==Text Engine: creating raw names list phase ended");
        return nameRawRecords;
		
	}

	private void loadAllAnnieProcessingResource(SerialAnalyserController pipeline) throws ResourceInstantiationException {
		/**
		 * Carica sulla pipeline tutti i ProcessingResource disponibili con il plugin ANNIE
		 */
		
		   //C) Loading processing resources. refer http://gate.ac.uk/gate/doc/plugins.html for what class the plugin belongs to
        // http://gate.ac.uk/gate/doc/plugins.html
      //  ProcessingResource token = (ProcessingResource) Factory.createResource("gate.creole.tokeniser.DefaultTokeniser", Factory.newFeatureMap());
      // ProcessingResource sspliter = (ProcessingResource) Factory.createResource("gate.creole.splitter.SentenceSplitter", Factory.newFeatureMap());
      //  ProcessingResource number = (ProcessingResource) Factory.createResource("gate.creole.numbers.NumbersTagger", Factory.newFeatureMap());
       
       
        
        /* pipeline is an application that needs to be created to use resources loaded above.
        Reasources must be added in a particular order eg. below the 'number' resource requires the document to be tokenised. */
        
        //D) Create and Initialize pipeline
        
        //E) Carica il corpus nel pipeline
         
        
        //F) Carica i processing Resourcer nel pipeline
       // pipeline.add(token);
       // pipeline.add(sspliter);
      //  pipeline.add(number);
      
        //C1,F1) In alternativa ai passi C,F, si caricano tutti i ProcessingResource disponibili per annie
        
        for(int i = 0; i < ANNIEConstants.PR_NAMES.length; i++) {
        ProcessingResource pr = (ProcessingResource)
        Factory.createResource(ANNIEConstants.PR_NAMES[i], Factory.newFeatureMap() );
        pipeline.add(pr);
        }
		
	}

	private Map<EclapRecord, DocumentImpl> insertValues(Corpus corpus, List<EclapRecord> eclapRecordList) {
		
		/** 
		 * Inserisce i valori dei record nel corpus (ciascun campo valore di ogni record va in un documento diverso)
		 * 
		 */
		Iterator<EclapRecord> it= eclapRecordList.iterator();
		Map<EclapRecord, DocumentImpl> mappa= new Hashtable<EclapRecord, DocumentImpl>();
		while(it.hasNext()){
			EclapRecord rec=it.next();
			if(rec.getFieldValue()!=null && !rec.getFieldValue().equals("")){
				
				if (Phase1.printOut>2 ){
					Out.prln ("Valore Eclap value-field analizzato: "+rec.getFieldValue());
				}
				DocumentImpl doc= new DocumentImpl();
		       
				DocumentContentImpl impl = new DocumentContentImpl(rec.getFieldValue());
				doc.setContent(impl);
				corpus.add(doc); 
				mappa.put(rec, doc);
			}
			
		}
		return mappa;
		
	}

	private gate.Corpus initAnnie() throws GateException, MalformedURLException {
		/**
		 * Inizializza il Gate, crea il corpus, carica i plugins di ANNIE e ritorna il corpus
		 */
		// Gate inizializzazione e creazione del corpus
		Gate.init();
		gate.Corpus corpus= (Corpus) Factory.createResource("gate.corpora.CorpusImpl");
		
		//A) You need to register the plugin before you load it.
		Gate.getCreoleRegister().registerDirectories(new File(Gate.getPluginsHome(), ANNIEConstants.PLUGIN_DIR).toURI().toURL());
		//System.out.println(new File(Gate.getPluginsHome(), ANNIEConstants.PLUGIN_DIR).toURI().toURL());
		
     //   Gate.getCreoleRegister().registerDirectories(new URL("file:/Applications/GATE_Developer_7.1/plugins/Tagger_Numbers/"));
        return corpus;	
	}
}
