package loader;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.regex.Pattern;

import edu.stanford.nlp.ling.HasWord;

import accessories.tools.Preprocess;

import rainbownlp.core.Artifact;
import rainbownlp.core.Setting;
import rainbownlp.util.FileUtil;
import rainbownlp.util.HibernateUtil;


public class SemEvalLoader {
	public static void main(String[] args)
	{
		// we use 1000 test for train and 250 trials for test
		String input_text_file_test = tools.Configuration.getValue("ProjectDataFilesRoot")+
				"/AffectiveText.trial/affectivetext_trial.xml";
		String input_text_file_train = tools.Configuration.getValue("ProjectDataFilesRoot")+
				"/AffectiveText.test/affectivetext_test.xml";
		
		SemEvalLoader doc_proc = new SemEvalLoader();
		
		Setting.TrainingMode = true;
		if(args.length>1 && args[1].equals("test"))
			Setting.TrainingMode = false;
		if (Setting.TrainingMode == true)
		{
			try {
				doc_proc.loadDocuments(input_text_file_train);
			} catch (Exception e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		else
		{
			try {
				doc_proc.loadDocuments(input_text_file_test);
			} catch (Exception e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		
		
	}
	

	private void loadDocuments(String input_text_file) throws IOException {
		
		List<Artifact> loaded_documents = new ArrayList<Artifact>();

		List<String> lines = FileUtil.loadLineByLine(input_text_file);
		int counter = 0;

	    for (String line: lines)
	    {
	    	if (line.matches("<corpus.*")||
	    			line.matches("</corpus.*"))
	    		continue;
	    	SemEvalSent s = new SemEvalSent(line);  
	    	String file_path_or_id = s.getId().toString();

	          Artifact new_doc = 
	                      Artifact.getInstance(Artifact.Type.Document, file_path_or_id, 0);
	              
	          loaded_documents.add(new_doc);
	          loadSentences(new_doc,s);
	          counter++;
	     }
		
//		for(Artifact doc:loaded_documents){
//			System.out.print("\nLoading document: " + doc.getAssociatedFilePath());
//			try {
//				loadSentences(doc,s);
//			} catch (IOException e) {
//				// TODO Auto-generated catch block
//				e.printStackTrace();
//			}
//		}

	}


	private void loadSentences(Artifact parentDoc,SemEvalSent s) throws IOException {
		
		Preprocess pre_processed_sent = new Preprocess(s.getContent());
		
		HashMap<Integer, String> setences = pre_processed_sent.getSentenceIndexMap();
		
		List<Artifact> setencesArtifacts = new ArrayList<Artifact>();
		Artifact previous_sentence = null;
		
		for (Integer sent_index: setences.keySet())
		{	
			String tokenizedSentence = setences.get(sent_index);

			Artifact new_sentence = Artifact.getInstance(Artifact.Type.Sentence,
					parentDoc.getAssociatedFilePath(), sent_index);
			
			new_sentence.setParentArtifact(parentDoc);
			new_sentence.setLineIndex(sent_index);
			new_sentence.setContent(tokenizedSentence);
			if (previous_sentence != null) {
				new_sentence.setPreviousArtifact(previous_sentence);
				previous_sentence.setNextArtifact(new_sentence);
				HibernateUtil.save(previous_sentence);
			}
			
			HibernateUtil.save(new_sentence);
			
			loadWords(new_sentence,sent_index,pre_processed_sent);

			setencesArtifacts.add(new_sentence);
			
			previous_sentence = new_sentence;
			HibernateUtil.clearLoaderSession();
		}

	}
	private void loadWords(Artifact parentSentence, Integer sentIndex, Preprocess pre_processed_sent ) {
		
		List<Artifact> tokensArtifacts = new ArrayList<Artifact>();
		Artifact previous_word = null;
		
		String textContent = "";
		Artifact new_word = null;
			
		List<HasWord> tokens = pre_processed_sent.getSentTokensMap().get(sentIndex);
		
		for(int token_index = 0; token_index< tokens.size();token_index++){
			
			textContent = tokens.get(token_index).toString();
			int start_char = pre_processed_sent.getTokenStartCharIndex(sentIndex, token_index);
			new_word = Artifact.getInstance(
					Artifact.Type.Word, parentSentence.getAssociatedFilePath(),start_char);
			new_word.setContent(textContent);
			new_word.setParentArtifact(parentSentence);
			new_word.setLineIndex(sentIndex);
			new_word.setEndIndex(pre_processed_sent.getTokenEndCharIndex(sentIndex, token_index));
			new_word.setWordIndex(token_index);

			if (previous_word != null) {
				new_word.setPreviousArtifact(previous_word);
				previous_word.setNextArtifact(new_word);
				HibernateUtil.save(previous_word);
			}
			
			HibernateUtil.save(new_word);
				
			tokensArtifacts.add(new_word);
			previous_word = new_word;
			
		}
//		parentSentence.setChildsArtifact(tokensArtifacts);
	}

}
