package websem.handlers;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;

import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;

import org.w3c.dom.Document;
import org.w3c.dom.NodeList;

import websem.database.DatabaseRequest;
import websem.model.InfosTerm;
import websem.model.Paragraph;
import websem.model.Term;

/**
 * Handle a request from user to get paragraphs
 * @author Jissay
 *
 */
public class RequestHandler {
	
	private DatabaseRequest db_request;
	
	private static String QUERIES_FILE = "res/Queries/queries.xml";
	
	/**
	 * Construct with creating a new databaseRequest
	 */
	public RequestHandler() {
		this.db_request = new DatabaseRequest();
	}
	
	public void treatRequests() {
		try{
			DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
			DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
			File file = new File(QUERIES_FILE);
			Document doc = dBuilder.parse((File)file);
			doc.getDocumentElement().normalize();
			
			//Get requests 
			NodeList list = doc.getElementsByTagName("query");
			NodeList textList = doc.getElementsByTagName("text");
			for (int i=0; i<list.getLength(); i++){
				
				// Get request text
				String queryId = list.item(i).getAttributes().getNamedItem("id").getNodeValue().substring(1);
				String requestText = textList.item(i).getTextContent();	
				
				System.out.println("--- Request :: " + requestText);
				
				// Parse request to get associated words from request by SPARQL requests on ontology
				// Put all in a HashMap to keep informations about words origin
				String[] requestWords = requestText.replaceAll("\\p{Punct}", " ").toLowerCase().split(" ");
				ArrayList<String> requestTerms = new ArrayList<String>();
				ArrayList<String> subsumedTerms = new ArrayList<String>();
				ArrayList<String> synonymesTerms = new ArrayList<String>();
				ArrayList<String> fullRequest = new ArrayList<String>();
				for (String w : requestWords) {
					String word = w.replace(" ", "");
					if (word.length() > 1 && !TermHandler.isInStopList(w)) {
						
						//Get ontology associated words
						subsumedTerms.addAll(OntologyHandler.generateSubsumedWordsFromOntology(word));
						synonymesTerms.addAll(OntologyHandler.generateSynonymesWordsFromOntology(word));
						requestTerms.add(word);
						
						//Prepare the full request
						fullRequest.addAll(subsumedTerms);
						fullRequest.addAll(synonymesTerms);
						fullRequest.add(word);
						
						//Delete multiple terms in arraylists
						subsumedTerms = this.formatArrayList(subsumedTerms);
						synonymesTerms = this.formatArrayList(synonymesTerms);
						fullRequest = this.formatArrayList(fullRequest);
					}
				}
				
				System.out.println("--- Words found :: " + fullRequest.toString());
				
				// SQL Request format :: SELECT ___ FROM ___ WHERE (contains(Syn1.1) || contains(Syn1.2)) OR (contains(Syn2.1) || contains(Syn.2.2));

				//Add terms weights to paragraphs
				//TODO : poids des mots de la requ�te + important que celui des mots associ�s (sauf si synonyme)
				for (String string : fullRequest) {
					if (string.length() > 6) {
						string = string.substring(0,6);
					}
					Term term = this.db_request.getTerm(string);
					if (term != null) {
						HashMap<Paragraph, InfosTerm> infos = term.getInfos();
						if (infos != null && !infos.isEmpty()){
							for (Paragraph p : infos.keySet()){
								//ADD TERM WEIGHT:
								//request term : x20
								//subsumed term : x5
								//synonyme term : x10
								double termWeight = infos.get(p).getWeight();
								if (requestTerms.contains(term.getLabel())){
									termWeight *= 20;
								} else if (synonymesTerms.contains(term.getLabel())){
									termWeight *= 10;
								} else if (subsumedTerms.contains(term.getLabel())){
									termWeight *= 5;
								}
								p.addWeight(termWeight);
							}
						}
						this.db_request.addParagraphsWeights(term);
					}
				}
				
				this.db_request.commit();
				
				// Execute request
				this.handleResults(queryId); 
				
				this.db_request.clearWeights();
			}
			
		} catch (Exception e){
			e.printStackTrace();
		}
	}
	
	/**
	 * Truncate terms and remove the multiple ones 
	 * @param list the term list
	 */
	private ArrayList<String> formatArrayList(ArrayList<String> list) {
		ArrayList<String> resultList = new ArrayList<String>();
		for (String label : list){
			if (label.length() > 6){
				label = label.substring(0, 6);
			}
			if (!resultList.contains(label.toLowerCase())){
				resultList.add(label.toLowerCase());
			}
		}
		
		return resultList;
	}

	/**
	 * Handle results from user interface
	 * @param queryId the query id
	 * @throws IOException 
	 */
	public void handleResults(String queryId) throws IOException {
			
			ArrayList<Paragraph> list = this.db_request.getAllParagraphs();
			Collections.sort(list);
			Collections.reverse(list);
			
			for (int i = 0; i < 5; i++) {
				System.out.println("Document #" + i + " :: " + list.get(i).getDocumentName() + " --- " + list.get(i).getxPath());
			}
			
			//Open the corresponding qrel file
			FileReader fr = new FileReader("res/qrels/qrel" + queryId + ".txt");
			BufferedReader br = new BufferedReader(fr);
			
			int evaluationValue = 0;
			int evaluationCount = 0;
			double precision5 = 0;
			double precision10 = 0;
			double precision20 = 0;
			String line = "";
			//Compare paragraphs values for the request
			while ((br != null) && ((line = br.readLine()) != null)){
				
				evaluationCount++;
				
				//Get qrel data
				String[] tabLine = line.split("\t");
				String qrelDocName = tabLine[0].substring(11);
				String qrelXPath = tabLine[1];
				String qrelValue = String.valueOf(tabLine[2].charAt(0));
				Paragraph qrelParagraph = new Paragraph(Paragraph.TYPE_PARAGRAPH, qrelXPath, qrelDocName);
				
				//Evaluate the paragraphs
				for (Paragraph p : list) {
					if (p.getDocumentName().equals(qrelParagraph.getDocumentName()) && p.getxPath().equals(qrelParagraph.getxPath())) {
						
						if (qrelValue.equals("1")){
							int i=0;
						}
						if (p.getWeight() > 0 && qrelValue.equals("1")){
							evaluationValue++;
							if (list.indexOf(p) < 5){
								precision5++;
							} else if (list.indexOf(p) < 10){
								precision10++;
							} else if (list.indexOf(p) < 100){
								precision20++;
							}
						} else if (p.getWeight() == 0 && qrelValue.equals("0")){
							evaluationValue++;
						}
					}
				}
			}
			
			br.close();
			fr.close();
			
			//Number of same value / Total number of paragraphs
			//Rappel value
			//Precision value
			int rappel = evaluationValue * 100 / (evaluationCount-2);//-2 for 2 empty lines in the qrel files
			precision5 = precision5 * 100 / 5;
			precision10 = precision10 * 100 / 10;
			precision20 = precision20 * 100 / 100;
			
			System.out.println("--- Request #" + queryId +
							   " --- Rappel : " + String.valueOf(rappel) + "%" +
							   " --- Precision 5 doc : " + String.valueOf(precision5) + "%" +
							   " --- Precision 10 doc : " + String.valueOf(precision10) + "%" +
							   " --- Precision 100 doc : " + String.valueOf(precision20) + "%");
	}

	public DatabaseRequest getDb_request() {
		return db_request;
	}

	public void setDb_request(DatabaseRequest db_request) {
		this.db_request = db_request;
	}
}
