package servlets;

import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;

import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPathExpressionException;

import mx.bigdata.jcalais.CalaisClient;
import mx.bigdata.jcalais.CalaisConfig;
import mx.bigdata.jcalais.CalaisConfig.ConnParam;
import mx.bigdata.jcalais.CalaisObject;
import mx.bigdata.jcalais.CalaisResponse;
import mx.bigdata.jcalais.rest.CalaisRestClient;

import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.xml.sax.SAXException;

import pos.domain.Category;
import pos.domain.CategoryImpl;
import pos.domain.CategoryStore;
import pos.domain.Discipline;
import pos.domain.DisciplineImpl;
import pos.domain.DisciplineStore;
import pos.domain.Entity;
import pos.domain.EntityImpl;
import pos.domain.EntityStore;
import pos.domain.Event;
import pos.domain.EventImpl;
import pos.domain.EventStore;
import pos.domain.Relation;
import pos.domain.RelationImpl;
import pos.domain.RelationStore;
import pos.domain.SubType;
import pos.domain.SubTypeImpl;
import pos.domain.SubTypeStore;
import pos.domain.Type;
import pos.domain.TypeImpl;
import pos.domain.TypeStore;
import util.ArticleData;
import util.ArticleDataImpl;

import com.alchemyapi.api.AlchemyAPI;

/**
 * Servlet implementation class ParseProcess
 */
public class ServletParseProcess extends HttpServlet {
	private static final long serialVersionUID = 1L;


	/**
	 * @see HttpServlet#HttpServlet()
	 */
	public ServletParseProcess() {
		super();
	}

	/**
	 * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse
	 *      response)
	 */
	protected void doGet(HttpServletRequest request,
			HttpServletResponse response) throws ServletException, IOException {
		doPost(request, response);
	}

	/**
	 * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse
	 *      response)
	 */
	protected void doPost(HttpServletRequest request,
			HttpServletResponse response) throws ServletException, IOException {
		HttpSession session = request.getSession();
		
		ArticleData allArticlesTranslated = new ArticleDataImpl();
		List<Event> listEventsParsed = new ArrayList<Event>();
		
		/**
		 * Covering all valid article's URL from each section
		 * 
		 * @param npages
		 *            the number of pages to parse selected
		 * @param sectionHeaderUrl
		 *            the URL's header of the section chosen
		 */

		String url = request.getParameter("url");
		if(url!=""){
			try{
				List<String> listAllArticlesSection = new ArrayList<String>();
				listAllArticlesSection.add(url);
				allArticlesTranslated = getRelevantTextOfArticles(listAllArticlesSection);
				List<String> translatedTexts = allArticlesTranslated.getTranslatedTexts();
				List<String> originalsTitles = allArticlesTranslated.getOriginalsTitles();
				List<String> originalsTexts = allArticlesTranslated.getOriginalsTexts();
				List<String> urlList = allArticlesTranslated.getUrlList();
				
				for(int i=0; i<urlList.size(); i++){
					Event event = new EventImpl();
					Discipline discipline = new DisciplineImpl(); 
					discipline.setName(util.Functions.getSectionParsed(request));
					event = createEvent(translatedTexts.get(i),originalsTitles.get(i), originalsTexts.get(i), 
										urlList.get(i), discipline, request);
					if(event!=null){
						listEventsParsed.add(event);

					}
				
				}
				session.setAttribute("listEvents", listEventsParsed);
				session.setAttribute("numEvents", listEventsParsed.size());
				request.getRequestDispatcher("massiveLoadSucces.jsp").include(request, response);
			}
			catch(Exception e){
				
			}
		}
		else{
		
		
		String auxNumPages = request.getParameter("pages");
		int numPages = Integer.valueOf(auxNumPages);
		
		session.setAttribute("numPages", numPages);

		String sectionHeaderUrl = request.getParameter("section"); // e.g.: http://www.rtbf.be/culture/cinema
		session.setAttribute("section", sectionHeaderUrl);
		
		String patternEndPageOfArchive = "/archives?page=";

		try {
			allArticlesTranslated = massiveParseOfSection(sectionHeaderUrl, patternEndPageOfArchive, numPages, request);
			List<String> translatedTexts = allArticlesTranslated.getTranslatedTexts();
			List<String> originalsTitles = allArticlesTranslated.getOriginalsTitles();
			List<String> originalsTexts = allArticlesTranslated.getOriginalsTexts();
			List<String> urlList = allArticlesTranslated.getUrlList();
			
//			System.out.println("\n== Tamaño lista traducida: " + translatedTexts.size() + "\n==Tamaño lista títulos: "+
//			originalsTitles.size()+ "\n==Tamaño lista texto original: "+originalsTexts.size()+ "\n==Tamaño urlList: "+urlList.size());
			
			for(int i=0; i<urlList.size()-1; i++){
				Event event = new EventImpl();
				Discipline discipline = new DisciplineImpl(); 
				discipline.setName(util.Functions.getSectionParsed(request));
				event = createEvent(translatedTexts.get(i),originalsTitles.get(i), originalsTexts.get(i), 
									urlList.get(i), discipline, request);
				listEventsParsed.add(event);
			
			}
			session.setAttribute("listEvents", listEventsParsed);
			session.setAttribute("numEvents", listEventsParsed.size());
			request.getRequestDispatcher("massiveLoadSucces.jsp").include(request, response);
			
			
		} catch (Exception e) {
			e.printStackTrace();
		}
		}

	}

	/**
	 * Method to parsing massively a section of RTBF.
	 * 
	 * @param patternPage
	 *            the String with the header of a valid URL for an article
	 * @param numPages
	 *            the number of pages that the user wants to parse from a section of
	 *            RTBF
	 * @param sectionHeaderUrl 
	 * 				the string with the header of a valid URL section
	 * @return ArticleData object containing:
	 * 				- List with Articles
	 * 				- List with Original titles from the articles
	 * 				- List with Originals texts from the articles
	 * 				- List with Translated texts from the articles
	 * @throws 
	 * 				Exception 
	 */
	public ArticleData massiveParseOfSection(String sectionHeaderUrl,
			String patternPage, int numPages, HttpServletRequest request) throws Exception {

		/**
		 * Valid pattern URL of any page:
		 * http://www.rtbf.be/culture/"SECTION"/archives?page=XX
		 */

		int initPage = 1; 
		// initial valid url page is "...article?page=1"
		String resCinemaPage = sectionHeaderUrl + patternPage
				+ String.valueOf(initPage);
		
		// List to store all the valid url
		List<String> listAllArticlesSection = new ArrayList<String>();
		ArticleData listRelevantTextsTranslatedFromArticles = new ArticleDataImpl();

		// loop for to scan each "archive" page from RTBF (which have 7-9 links to articles by page)
		for (int i = initPage; i <= numPages; i++) {

			resCinemaPage = sectionHeaderUrl + patternPage
					+ String.valueOf(i);

			// to add counter for articles. DONE
			// to refine searchings for links with bad or repeated links (#videos, etc.) DONE
			// to control the pages without articles (end of "archive" section). DONE
			
			// Connection for a page
			Document doc = null;
			try {
				doc = Jsoup.connect(resCinemaPage).get();
			} catch (IOException e) {
				e.printStackTrace();
			}			
			// Selecting the links with the typical pattern of an article.
			Elements links = doc.select("a[href^=" + sectionHeaderUrl
					+ "/detail_" + "]");
			/** 
			 * Weird exceptions (< 5%) for some articles which are apparently equals to others, 
			 * usually those that appear in the Highlights section. Webmaster??
			 * which don't fix with the typical pattern of an article (.../detail_xxx...).
			 * They are (...#rnf_xxx...). 
			 * 
			 * For to extend the project to these exceptions, is necessary visit these URL, and to find inside them
			 * the real article that fix with the typical pattern (.../detail_xx...). 
			 */
//			Elements links2 = doc.select("a[href^=" + sectionHeaderUrl
//					+ "#rnf_" + "]");
			
			/**
			 * Loop looking for typical articles
			 */
			for (Element link : links) {
				String linkHref = link.absUrl("href");

					// cleaning page which pattern is true but isn't an url post: (..idPost#videoId)
					if (linkHref.contains("?id=") && !linkHref.contains("#video")) {
						// Is a duplicated article?
						if (!listAllArticlesSection.contains(linkHref)) { 
							listAllArticlesSection.add(linkHref);
						}
					}
			}
			
			// for to stop the loop if someone set more pages than exist in the archives of the section of RTBF.
			Elements finishLink = doc.select("p:contains(Aucune info pour l'instant...)");
			// System.out.println("Number of pages to parse: "+page);
			// Display pages to parse. DONE
			if(finishLink.size()!=0){
				System.out.println("out!"); // if there are no items in the page
				break;
			}

		}
		// To show number articles to parse. DONE
		System.out.println("You going to parse: "+listAllArticlesSection.size()+" articles");
//		request.setAttribute("numArticles", listAllArticlesSection.size());
		
		listRelevantTextsTranslatedFromArticles = getRelevantTextOfArticles(listAllArticlesSection);
		
		return listRelevantTextsTranslatedFromArticles;

	}
	
	/**
	 * This method allows to obtain only the relevant text of each article for the NLP tools already translated.
	 * These parts of text from the complete HTML are: h1= title, header= description and the main text.
	 * Here it will be the call for the translation too.
	 * 
	 * @param listAllArticlesSection 
	 * 			List<String> with all the URL from the articles to parse
	 * @return ArticleData composed by:
	 * 			List<String> with the originals titles in French 
	 * 			List<String> with all relevant text of each article in English, without banners, columns, announces, etc.
	 * 			List<String> with the originals text in French
	 * 			List<String> with the translated text to English
	 * 			List<String> with every URL parsed.
	 * 			
	 * @throws Exception 
	 */
	public ArticleData getRelevantTextOfArticles(List<String> listAllArticlesSection) throws Exception{
		
		List<String> listOriginalsTitles = new ArrayList<String>();
		List<String> listTranslatedTexts = new ArrayList<String>();
		List<String> listOriginalsTexts = new ArrayList<String>();
		String originalTitle="";
		String originalRelevantText ="";
		String translatedRelevantText = "";
		
		ArticleData listArticlesData = new ArticleDataImpl();
		
		for(String url: listAllArticlesSection){
						
			Document doc = null;
			try {
				// doc = Jsoup.connect(s).get();
				doc = Jsoup.parse(new URL(url), 5000);
				// only will be one each page
				Elements h1List = doc.getElementsByTag("h1"); // all h1 (one/article)		
				Element mainContent = doc.getElementById("mainContent"); // Getting portion html of mainContent
				Elements headerList = mainContent.getElementsByTag("header"); // All description (one/article)
				Elements pElementsList = mainContent.getElementsByClass("textualContent");
				String p =""; // for main text of article
				String h1AndDesc=""; // for h1 and description in bold.
				
				// Obtaining header description of each article and merging with the title ("h1").
				// this "for" only have 1 item but the "Elements" Class returns a List...
				for(int i=0; i<h1List.size();i++){
					h1AndDesc = h1List.get(i).text();
					originalTitle= h1AndDesc; // The original title in French
					String desc = headerList.get(i).text();
					h1AndDesc = h1AndDesc + ".\n" + desc;

				}
				// Extracting relevant text of main paragraph with space added
				for(Element e: pElementsList){

					p+= "\n" + e.text();
				}
				
				// Merging all relevant text
				originalRelevantText = h1AndDesc+p;
								
				// revertir esto. Debe devolver el texto traducido y añadirse a la lista
				translatedRelevantText = util.Functions.translateFrenchToEnglish(originalRelevantText);
				// Add to the lists
				listTranslatedTexts.add(translatedRelevantText);
				listOriginalsTitles.add(originalTitle);
				listOriginalsTexts.add(originalRelevantText);
				
				
			} catch (IOException e) {
				e.printStackTrace();
			}
		}		
		// Example for no spendending characters from Microsoft Translator API
		
/*		String post = "'Free tango' by Frédéric Fonteyne, will open the FIFF."+
				"The 27th edition of the Festival International du Film Francophone de Namur will take place from September 28 to October 5. Frédéric Fonteyne will present his new film opening, François Damiens, Sergi López, Anne Paulicevich with Zechariah Chasseriaud."+
				"Frédéric Fonteyne with 'Une liaison pornographique', the wife of Gilles and this last opus, 'Free Tango' completes a trilogy about the mechanics of women and love. 'Free tango' features Sergi López, revealed by the Director in a famous link, along with Nathalie Baye."+
				"In short it is the story of Alice who went to the visiting room of the prison to visit two men; her lover and her husband. Portrayed by François Damiens, JC is prison guard and Tango dancer. It was during dance that it will meet Alice and sympathize, even if the prison regulations forbade with the families of detainees."+
				"Frédéric Fonteyne is a loyal follower of the FIFF. His first three films have marked the spectators of the festival, he participated in the official jury in 2001 and was opened in 2004, with Emmanuelle Devos and Clovis Cornillac in 'La femme de Gilles'."+
				"More info on www.fiff.be"; 
	 listOriginalsTexts.add(1,post);
	 listTranslatedTexts.add(1,post);
	*/
	 
	 listArticlesData.setOriginalsTitles(listOriginalsTitles);
	 listArticlesData.setTranslatedTexts(listTranslatedTexts);
	 listArticlesData.setOriginalsTexts(listOriginalsTexts);
	 System.out.println("\n=== lista URL: "+ listAllArticlesSection);
	 listArticlesData.setUrlList(listAllArticlesSection);
	 
	 return listArticlesData;
	}
	

	/**
	 * Open URL with HTMLEditorKit (Java Swing library).
	 */
	/*
	 * URL url = new URL(s);
	 * 
	 * HTMLEditorKit kit = new HTMLEditorKit();
	 * HTMLDocument doc = (HTMLDocument) kit.createDefaultDocument();
	 * doc.putProperty("IgnoreCharsetDirective", Boolean.TRUE); 
	 * Reader HTMLReader = null; 
	 * try {
	 * 		HTMLReader = new InputStreamReader(url.openConnection().getInputStream()); 
	 * } catch (IOException e1) { 
	 * 		e1.printStackTrace(); 
	 * } try { 
	 * 		kit.read(HTMLReader,doc, 0);
	 * } catch (BadLocationException e) {
	 * 		 e.printStackTrace();
	 * }
	 */

	// listHref = retrieveLinks(s); // all valids links of general page
	List<Event> listEvent = new ArrayList<Event>(); // list of events
													// created
	List<String> visited = new ArrayList<String>(); // list of post visited

	/**
	 * next page event of same page section recursively
	 */

	/**
	 * change pattern of regex by Jsoup method for obtaining the data. DONE
	 */
	/*
	 * for (String aux : listHref) { // regex to check if string is for go to
	 * next page Pattern p1 = Pattern
	 * .compile("http://www.rtbf.be/culture/cinema/page/\\.*?/"); Matcher m1 =
	 * p1.matcher(aux); if (!m1.find()) { // if is not for to go next page Event
	 * e = null; if (!visited.contains(aux)) { // if not visited yet e =
	 * this.createEvent(aux); // create a event of post // if(e==null) // throw
	 * new NullPointerException("Error al crear evento"); // listEvent.add(e);
	 * // add event to list visited.add(aux); // add page to visited
	 * 
	 * System.out.println("Web visited: " + aux); //
	 * System.out.println("Event created: "+e.toString()); }
	 * meter el evento en la BD.
	 * 
	 * } else { System.out.println("\nlink to next page " + aux);
	 * this.parseMethod(aux); // call method if link is for go to next // page
	 * 
	 * }
	 * 
	 * }
	 */

	// System.out.println("llega parseMethod");
	// return listHref;
	// }


	/**
	 * Open URL with HTMLEditorKit (Java Swing library).
	 */

	// try{
	//
	// // Finding "a" elements of HTML
	// HTMLDocument.Iterator it = d.getIterator(HTML.Tag.A);
	// while(it.isValid()){
	// SimpleAttributeSet sa = (SimpleAttributeSet)it.getAttributes();
	// String link = (String)sa.getAttribute(HTML.Attribute.HREF);
	// Pattern p =
	// Pattern.compile("http://www.rtbf.be/culture/cinema/\\.*");
	// Matcher m = p.matcher(link);
	// // System.out.println("Links encontrados: "+link);
	//
	// if(m.find()){
	// if(!link.contains("#comment") && !link.contains("/attachment/")){
	// //for a rare href that is true in Pattern.
	// }
	//
	// if(!listLinks.contains(link)){
	// listLinks.add(link);
	// System.out.println("link list: "+listLinks);
	// }
	// }
	//
	//
	// }
	// it.next(); // inc iterator
	//
	//
	// }
	//
	// } catch(Exception e){
	// e.printStackTrace();
	// }
	// return listLinks;
	// }

	/**
	 * CREATE EVENT public Event createEvent(String s) throws
	 * MalformedURLException, IOException {
	 * 
	 * Document d = Jsoup.connect(s).get(); String title = d.title();
	 * 
	 * Event e = new EventImpl(); e.setUrl(s); e.setTitle(title);
	 * 
	 * 
	 * /** Open URL with HTMLEditorKit of Java Swing library. For modify with
	 * Jsoup
	 */

	/*
	 * // Cambiar locura Swing por Jsoup URL url = new URL(s); DONE
	 * 
	 * HTMLEditorKit kit = new HTMLEditorKit(); HTMLDocument doc =
	 * (HTMLDocument) kit.createDefaultDocument();
	 * doc.putProperty("IgnoreCharsetDirective", Boolean.TRUE); Reader
	 * HTMLReader = null; try { HTMLReader = new
	 * InputStreamReader(url.openConnection() .getInputStream()); } catch
	 * (IOException e1) { e1.printStackTrace(); } try { kit.read(HTMLReader,
	 * doc, 0); } catch (BadLocationException ex) { ex.printStackTrace(); }
	 */


	/**
	 * This method will create the Objects Event for each article read from RTBF using the
	 * Natural Language Processing tools (OpenCalais and AlchemyAPI) for his recognition 
	 * and will insert the Event and the other objects into the database.
	 * 
	 * J-Calais and Alchemy API: Two Java Interfaces for the NLP's tools
	 * 
	 * @param translatedText
	 * 			String with the text to analyze
	 * @param originalTitle
	 * 			String with the original title in French of the article
	 * @param originalText
	 * 			String with the original text in French of the article
	 * @param url
	 * 			Url of the article
	 * @param discipline
	 * 			pre-object Discipline from the form of the website
	 * @param request
	 * 			HttpServletRequest object for setting parameters from the html
	 * @throws MalformedURLException
	 * @throws IOException
	 * @throws ParserConfigurationException 
	 * @throws SAXException 
	 * @throws XPathExpressionException 
	 */
	public Event createEvent (String translatedText, String originalTitle, String originalText, String url, 
			Discipline discipline, HttpServletRequest request) throws MalformedURLException, IOException, XPathExpressionException, SAXException, ParserConfigurationException{
		
		System.out.println("Translated Text in create event: "+translatedText);
		System.out.println("Original title in create event: "+originalTitle);
		System.out.println("Original text in create event: "+originalText);
		System.out.println("Url in create event: "+url);
		
		EventStore eventStore = EventStore.getInstance();
		EntityStore entityStore = EntityStore.getInstance();
		TypeStore typeStore = TypeStore.getInstance();
		SubTypeStore subTypeStore = SubTypeStore.getInstance();
		RelationStore relationStore = RelationStore.getInstance();
		CategoryStore categoryStore = CategoryStore.getInstance();
		DisciplineStore disciplineStore = DisciplineStore.getInstance();
		
		Event evRes = new EventImpl(); // Event to return
		evRes.setId(util.UIDGenerator.getInstance().getKey()); // Add key for the DB

		
		/**
		 * API Keys for OpenCalais and Alchemy API
		 */
		AlchemyAPI alchemyObj = AlchemyAPI.GetInstanceFromString("5a1d49a816518b5e99d9a63d49257fb8ed97a108");
		CalaisClient client = new CalaisRestClient("vrwmtgfzzw4k9e69ncqcaxwh");

		/**
		 * Configuring response in OpenCalais
		 */
	
		CalaisConfig config = new CalaisConfig();
		config.set(CalaisConfig.ProcessingParam.CALCULATE_RELEVANCE_SCORE,
				"true");
		config.set(CalaisConfig.ProcessingParam.CONTENT_TYPE, "TEXT/RAW");
		config.set(ConnParam.CONNECT_TIMEOUT, 5000);
		config.set(ConnParam.READ_TIMEOUT, 5000);
	
		/**
		 * Calling response for OpenCalais
		 */
		
		// For to analyze a text

		// to extract a complete web. Not efficient in RTBF nor French. 
		CalaisResponse response;
		try {
			response = client.analyze(translatedText, config);
	
			// Creation of Objects for each Event
			List<Entity> listHighEntities = new ArrayList<Entity>();
			List<Entity> listAllEntities = new ArrayList<Entity>();
			List<Relation> listRelations = new ArrayList<Relation>();
			
			Iterable<CalaisObject> openCalaisEntitiesOutput = response.getEntities();
			Iterable<CalaisObject> listRelationsOutput = response.getRelations();
			
			// Call for Entities in AlchemyAPI
			org.w3c.dom.Document doc = alchemyObj.TextGetRankedNamedEntities(translatedText);
			String alchemyEntitiesOutput = util.Functions.getStringFromDocument(doc);
//			System.out.println(alchemyEntitiesOutput);
						
			// Changing XML results from AlchemyAPI to Elements of Jsoup
			Document doc2 = Jsoup.parseBodyFragment(alchemyEntitiesOutput);
			Elements alchemyEntitiesElements = doc2.select("entity");
								
			/**
			 * "Topic" from OpenCalais API is our "Category" in DB
			 * 
			 *  Display recognized topics:
			 *  Fields:
			 *  		_typeGroup, category, classifierName, categoryName, score, _uri
			 */
			
			List<Category> listCategories = new ArrayList<Category>();
			Category cRes = new CategoryImpl(); // will have the id after to insert
			
			for (CalaisObject topic : response.getTopics()) {
				String nameCategoryOc = "";
				nameCategoryOc = topic.getField("categoryName");
				String score = topic.getField("score");

				System.out.println("\n\nTopics: \n"+ "Category Name: "+nameCategoryOc + "Score: " + score);

				if (nameCategoryOc != null){
					Category c = new CategoryImpl();
					c.setId(util.UIDGenerator.getInstance().getKey());
					c.setName(nameCategoryOc);
					c.setScore(score);
					listCategories.add(c);

				}
				else if (listCategories.isEmpty()){// Extracting Category from Alchemy in case that doesn't exist in OpenCalais
					
					// Calling AlchemyAPI for Categorization
					doc = alchemyObj.TextGetCategory(translatedText);
					String alchCategory = util.Functions.getStringFromDocument(doc);
					
					// Changing XML results from AlchemyAPI to Elements of Jsoup
					Document doc3 = Jsoup.parseBodyFragment(alchCategory);
					Elements alchemyCategoryElements = doc3.select("category");
					
					Category c = new CategoryImpl();
					c.setId(util.UIDGenerator.getInstance().getKey());
					String catAlch = alchemyCategoryElements.first().text();
					c.setName(catAlch);			
					System.out.println("\n\nCategory by Alchemy="+c.getName());
					listCategories.add(c);
	
				}
				cRes.setId(listCategories.get(0).getId());
				cRes.setName(listCategories.get(0).getName());
				cRes.setScore(listCategories.get(0).getScore());
//				System.out.println("\n\n\n ANTES DE INSERTARRRR: cRes: " + cRes.getId()+", " + cRes.getName());

			}
			/**
			 *  Add Category to DB if not exist
			 */

			if(!categoryStore.existCategory(cRes)){ // Exist category with that name?
				cRes = categoryStore.insertCategory(cRes);
			} else{
				cRes = categoryStore.getCategoryByName(cRes.getName());
			}
			
			// Add Discipline to DB
			Discipline dRes = null;
//			System.out.println("\n\n====== CATEGORY cRes: " +"," + cRes.getName());
			discipline.setId(util.UIDGenerator.getInstance().getKey()); // Add key for DB
			if(!disciplineStore.existDiscipline(discipline)){
				dRes = disciplineStore.insertDiscipline(discipline,cRes);
			} else {
				dRes = disciplineStore.getDisciplineByName(discipline.getName());
			}
			cRes.setDiscipline(dRes);
			evRes.setDiscipline(dRes); // Add Discipline
			evRes.setCategory(cRes); // Add Category
			evRes.setUrlEvent(url); // Add URL of the article
			evRes.setTitle(originalTitle); // Add originalTitle ot the article
			evRes.setOriginalText(originalText); // Add originalText (clean) of the article 
			evRes.setTranslatedText(translatedText); // Add translatedText
	
			/**
			 * Relation from OpenCalais is our relation in DB
			 * 
			 * Recognized relations of OpenCalais:
			 */
			for(CalaisObject relOpenCalais : listRelationsOutput){
				String relSubject = relOpenCalais.getField("relationsubject");
				String relVerb = relOpenCalais.getField("verb");
				String relObject = relOpenCalais.getField("relationobject");	
				String uri = relOpenCalais.getField("_uri");
								
//				System.out.println("\nRelation: "+
//						"\nRelation Subject="+relSubject+", verb="+relVerb+
//						", relation Object="+relObject+
//						"\nuri: "+relUri+"\n\n");

				/**
				 *  We'll take only good Relations provided (without nulls in the subject, verb or object).		
				 */
				if(relSubject!=null && relVerb!=null && relObject!=null){
					Relation relation = new RelationImpl();
					relation.setSubject(relSubject);
					relation.setVerb(relVerb);
					relation.setObject(relObject);
					relation.setUri(uri);
					listRelations.add(relation);
								
					}
//					System.out.println("\n\n SIN NULOS: Relation: "+
//						"\nRelation Subject="+relSubject+", verb="+relVerb+
//						", relation Object="+relObject+
//						"\nuri: "+relUri+"\n\n==========");
							
				}
		
			// Add relations to DB (if there are)
			if(!listRelations.isEmpty()){
				for(Relation r: listRelations){
					if(!relationStore.existRelation(r, evRes)){
						relationStore.insertRelation(r, evRes);
					}
				}
				evRes.setListRelations(listRelations);
			}
		
					/**
					 * The data of the Entities are from OpenCalais. Can be selected from Alchemy
					 * but has been chosen OpenCalais in reason to realiable results and ease to use.
					 * 
					 * Only will be taken the Entities with the score relevance desired on the form.
					 * Except the list used for the description 
					 * Fields: 
					 * 			_typeGroup, _type, name, _typeReference, instances, relevance, _uri
					 * 
					 * Another Objects added in each Entity:
					 * 			Type, SubType (if exist disambiguation)
					 */
			
			// Moving through the entities of OpenCalais API and getting fields
			for (CalaisObject calaisEntity: openCalaisEntitiesOutput) {
				String typeEntityOc = calaisEntity.getField("_type");
				String nameEntityOc = calaisEntity.getField("name");
				String relevanceOc = calaisEntity.getField("relevance");
				String uriOc = calaisEntity.getField("_uri");
				
				/**
				 * Getting all entities (only "uri") with any score to using it in the description of an Event.
				 * Low score entities won't be inserted into the database except if achieve the score selected
				 * on the form.
				 */
				Entity e = new EntityImpl();
				e.setUri(uriOc);
				e.setName(nameEntityOc);
				listAllEntities.add(e);
					
				// Moving through entities from Alchemy API and getting fields
				for(Element alchEntity: alchemyEntitiesElements){ 
				
					/**
					 *  Here, We're checking if an Entity got from both NLP tools have Score >= 0.5 
					 *  and if there is disambiguation from AlchemyAPI.
					 *  If there is disambiguation (correct iff there are RDF Links):
					 *  - Create the list with the links of LinkedData iff there is no error in
					 *    the results provided by the NLP tools.
					 *  - Create the List<SubType> for the Entity.
					 */
					String relevanceAlch = alchEntity.getElementsByTag("relevance").get(0).text();
					String nameEntityAlch = alchEntity.getElementsByTag("text").get(0).text();
					
					// Entities in AlchemyAPI and OpenCalais have a score >= than chosen?
					if(Double.valueOf(relevanceOc)>= Double.valueOf(request.getParameter("score"))
							&& Double.valueOf(relevanceAlch)>=Double.valueOf(request.getParameter("score"))
							&& nameEntityAlch.equals(nameEntityOc)){ // We are in the same Entity in each NLP tool?
						
						Entity entRes = new EntityImpl(); // let's put the ID already
						String idEntRes = util.UIDGenerator.getInstance().getKey();
						entRes.setId(idEntRes);
						
						Type tRes = new TypeImpl();
						String idTRes = util.UIDGenerator.getInstance().getKey();
						tRes.setId(idTRes);
						
						
						entRes.setName(nameEntityOc); // Add name for Entity
						entRes.setUri(uriOc); 		// Add URI for Entity
						
						tRes.setName(typeEntityOc); // Add name for Type

						
						/**
						 * Looking for SubType from AlchemyAPI
						 * 
						 * Exist a correct SubType (no with "charset-error" iff there is RDF links too)
						 * 
						 */
						
						if(alchEntity.getElementsByTag("subType").first()!=null){
							
							// Linked Data sources provided by AlchemyAPI iff exist a correct disambiguation
							List<String> typeLinkedDataSource = new ArrayList<String>();
							typeLinkedDataSource.add("website");
							typeLinkedDataSource.add("dbpedia");
							typeLinkedDataSource.add("yago");
							typeLinkedDataSource.add("opencyc");
							typeLinkedDataSource.add("umbel");
							typeLinkedDataSource.add("freebase");
							typeLinkedDataSource.add("ciaFactbook");
							typeLinkedDataSource.add("census");
							typeLinkedDataSource.add("geonames");
							typeLinkedDataSource.add("musicBrainz");
							typeLinkedDataSource.add("crunchbase");
							typeLinkedDataSource.add("semanticCrunchbase");
							
							List<String> listRdfResources = new ArrayList<String>(); // List with the possible RDF Resources
	
							for(String s: typeLinkedDataSource){
								if(alchEntity.getElementsByTag(s).first()!=null){
									listRdfResources.add(alchEntity.getElementsByTag(s).first().text());
								}
							}// If there is some RDF Link
							if(!listRdfResources.isEmpty()){
//								System.out.println("\nTHERE IS RDF: " + listRdfResources.toString());
								entRes.setListRDFLinks(listRdfResources);
							
							// There exist RDF links => There exist subTypes from Alchemy
								List<SubType> listSubTypes = new ArrayList<SubType>();
		
								for(Element subtype: alchEntity.getElementsByTag("subType")){ 
									SubType s = new SubTypeImpl();	
									s.setName(subtype.text());
									listSubTypes.add(s);
								}
								if(!listSubTypes.isEmpty()){
//									entRes.setListSubType(listSubTypes); // Add list SubType (if exist) to the Entity
//									tRes.setListSubTypes(listSubTypes); // Add list SubType (if exist) to the Type
								
									// Add SubType to DB (if exist)
									List<SubType> listStRes = new ArrayList<SubType>();
									for(SubType s: listSubTypes){
										SubType stRes = null;
										String idSubType = util.UIDGenerator.getInstance().getKey();
										
										if(s.getId()==null || !subTypeStore.existSubType(s, tRes, entRes)){
											s.setId(idSubType);
											stRes = subTypeStore.insertSubType(s, tRes,entRes);
										}
										else {
											stRes = subTypeStore.getSubTypeByName(s.getName());
										}
										listStRes.add(stRes);
										entRes.setListSubType(listStRes);
										tRes.setListSubTypes(listStRes);
								}
							}		
							
						}
					
					}
					/**
					 *  Add Type to DB (mandatory name, idEntity)
					 */
					if(!typeStore.existType(tRes, entRes)){
						tRes = typeStore.insertType(tRes,entRes);
					}

					entRes.setType(tRes);
					
					/**
					 *  Add Entity to DB (mandatory idEvent, name) if not exist
					 *  RDF Links and henceforth SubTypes neither
					 *  
					 */
					if(!entityStore.existEntity(entRes,evRes)){
						entRes = entityStore.insertEntity(entRes,evRes);
					} else {
						entRes = entityStore.getEntityById(entRes.getId());

					}
					// Finally, add the valid Entity to the list of High score Entities
					listHighEntities.add(entRes); 
	
					}
			
				}
			
//				if (typeEntityOc != null && nameEntityOc != null) {
//					System.out.println("\n\nEntity: "+typeEntityOc + ": " + nameEntityOc +
//							"\nRelevance: "+relevanceOc+
////							"\nuri: "+uriOc);
//				}
				
			}			
			evRes.setListEntities(listHighEntities); // Relate the list of Entities to the Event
			
			/**
			 *  Add description of the event combining Relations gotten and high score Entities
			 */
			String descAux = "";
			// list Relations related with High score entities
			List<Relation> lrelHigh = new ArrayList<Relation>(); 
			for(Relation r: listRelations){
				String subject = r.getSubject();
				String object = r.getObject();
				for(Entity entity: listHighEntities){ // High score Entities
					if(subject.contains(entity.getUri()) || object.contains(entity.getUri())){
						lrelHigh.add(r);
					}
				}

			}
			
			for(Relation r: lrelHigh){
				String subject = r.getSubject();
				String verb = r.getVerb();
				String object = r.getObject();
				descAux += "\n"+subject + " " + verb + " " + object + ".";

			}
//			System.out.println("\n\n== Primitive Description: "+ descAux);
//			System.out.println("\n\n== Num entities : "+ listAllEntities.size());
//			System.out.println("\n\n== Entities : "+ listAllEntities.toString());

			for(Entity entity: listAllEntities){
				String name = entity.getName();
				String uri = entity.getUri();
				System.out.println("\n\n== Name entity: "+ name);
				System.out.println("\n\n== Uri entity: "+ uri);

				descAux = descAux.replaceAll(uri, name);

			}
							
//			System.out.println("\n\n== Description modified: "+ descAux);

			String description = descAux;


//				System.out.println("\nRelation: "+
//						"\nRelation Subject="+r.getSubject()+", verb="+r.getVerb()+
//						", relation Object="+r.getObject()+"\n\n");
	
			

		// Add description to the Event
		evRes.setDescription(description); 
		
		// Add the Event to the DB (mandatory title, idCategory, url, originalText)
		if(!eventStore.existEvent(evRes)){
			evRes = eventStore.insertEvent(evRes,cRes,dRes);
		} else{
			evRes = eventStore.getEventByTitle(evRes.getTitle());
		}	
		
		
//			System.out.println("\n=========\nLista de Entidades: "+ 
//			listEntities.toString() + "\nSubList: "
//				//	listEntities.get(0).getListSubType().isEmpty()
//			+ "\nType: "+listEntities.get(0).getType()+"\nName: "+listEntities.get(0).getName());				

		
		/**
		 *  Display recognized Social Tags: Not used because the triviality of the results
		 *  Fields: 
		 *  		_typeGroup, id, socialTag, name, importance, originalValue, _uri
		 *  
		 *  Has been discarded for no providing good results with high score, yet.
		 */

		/*
		for (CalaisObject tags : response.getSocialTags()) {
			String nameGroup = tags.getField("name");
			String id = tags.getField("id");
			String socialTag = tags.getField("socialTag");
			String importance = tags.getField("importance");
			String originalValue = tags.getField("originalValue");
			String uri = tags.getField("_uri");
			System.out.println("\nName group: " + nameGroup+
					"\nId: "+id+"\nCampo Social Tag: "+socialTag+"\nImportance: "+importance+
					"\nOriginal Value: "+originalValue+"\nUri: "+uri);
		}
		
		*/
					
			
		} catch (Exception e1) {
			System.out.println("Backend-server busy, try again later");
			e1.printStackTrace();
		}
				
		return evRes;
		
	}
}