/*
	This file is part of "Ants on the web".

    "Ants on the web" is free software: you can redistribute it and/or modify
    it under the terms of the GNU General Public License as published by
    the Free Software Foundation, either version 3 of the License, or
    (at your option) any later version.

    "Ants on the web" is distributed in the hope that it will be useful,
    but WITHOUT ANY WARRANTY; without even the implied warranty of
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    GNU General Public License for more details.

    You should have received a copy of the GNU General Public License
    along with "Ants on the web".  If not, see <http://www.gnu.org/licenses/>.
*/

package org.utc.ia04.antsontheweb.knowledgeBase;

import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.StringWriter;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;

import org.codehaus.jackson.map.ObjectMapper;
import org.utc.ia04.antsontheweb.main.DefaultClient;
import org.utc.ia04.antsontheweb.utilities.Requete;


import com.hp.hpl.jena.query.Query;
import com.hp.hpl.jena.query.QueryExecution;
import com.hp.hpl.jena.query.QueryExecutionFactory;
import com.hp.hpl.jena.query.QueryFactory;
import com.hp.hpl.jena.query.QuerySolution;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.query.ResultSetFormatter;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Property;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.Statement;

import jade.core.Agent;
import jade.domain.DFService;
import jade.domain.FIPAException;
import jade.domain.FIPAAgentManagement.DFAgentDescription;
import jade.domain.FIPAAgentManagement.ServiceDescription;
import jade.lang.acl.ACLMessage;

/**
 * @author Ronan
 * @category Agent
 * 
 * Requests :
 *    utilities.Requete
 *        supported type :
 *           -> KB_LEARN : learn keywords for the give website type
 *           -> KB_GIVE_WEBSITE_TYPE : indicates whether keywords correspond to a particular website type
 *                                     return "true" or "false"
 *        supported content
 *        	 -> Serialized KnowledgeBaseRequest :
 *        			website type supported :
 *        				-> BUYING_SITE
 *        			keywords supported
 *        				-> any
 *        
 * Answers :
 * 		-> UNFORMATTED (FAILURE)
 *      -> UNKNOWN_WEBSITE_TYPE (FAILURE)
 *      -> UNEXCEPTED_ERROR (FAILURE)
 *      -> TRUE or FALSE (INFORM)
 *      -> DONE (INFORM)
 *      
 * DF Information :
 * 		Type : dfType
 * 		Name : dfName
 */
public class KnowledgeBaseAgent extends Agent
{
	//serialVersionUID
	private static final long serialVersionUID = -3302573697369358944L;
	
	//DF Information
	public static final String dfType = "knowledgeBaseServices";
	
	//Known website types
	public static final int BUYING_SITE = 0;
	public static final int OTHER = 1;
	public static final int LAST = 2;
	
	public static final String BUYING_SITE_NAME = "BUYING_SITE";
	public static final String OTHER_NAME = "OTHER";
	
	//Error messages
	public static final String UNFORMATTED = "Unformatted message...";
	public static final String UNKNOWN_WEBSITE_TYPE = "Unknown website type...";
	public static final String UNEXCEPTED_ERROR = "Unexcepted error...";
	
	//Inform messages
	public static final String DONE = "Done...";
	public static final String TRUE = Boolean.toString(true);
	public static final String FALSE = Boolean.toString(false);

	//Known website list
	private static final String OTHER_LIST = "OTHER_SITE_LIST";
	private static final String BUYING_SITE_LIST = "BUYING_SITE_LIST";

	//Files
	public static final String USED_KEYWORD_FILE = "usedKeywords.txt";
	private static final String ONTOLOGY_FILE = "kbOntology.n3";
	
	//Ontology
	private Model ontology;
	private String prefix = "http://www.utc.fr/ia04#";
	
	private Property member;
	private Property quantite;
	private Property concerne;
	private Property a;
	private RDFNode keywordNode;
	
	//Give website type
	private HashMap<String, UsedKeyword> keywordToAttribute;
	
	//METHODES
	public void setup()
	{
		/* --- DEBUG ---
		System.out.println("KBAgent cree !");
		//-------------*/	
		
		//Behaviour
		addBehaviour(new ReceiveMessage());
		
		//DF Registration
		register();
		
		//Load Ontology
		loadOntology();
		
		//Prepare to give website
		prepareToGiveType();
		
		System.out.println("I'm ready !");
	}

	private void register()
	{
		DFAgentDescription dfd = new DFAgentDescription();
		dfd.setName(getAID());
		ServiceDescription sd = new ServiceDescription();
		sd.setType(KnowledgeBaseAgent.dfType); sd.setName(getName());
		dfd.addServices(sd);
		try { DFService.register(this, dfd); }
		catch (Exception fe) { System.out.println("Error during registration..."); }
	}
	
	private void loadOntology()
	{
		try
		{
			//Create Model
			ontology = ModelFactory.createDefaultModel();
			FileInputStream fis= new FileInputStream(DefaultClient.LOCAL_FOLDER + KnowledgeBaseAgent.ONTOLOGY_FILE);
			ontology.read(fis, "http://utc/","N3");
			
			//Initialisations diverses
			member = ontology.createProperty(prefix + "member");
			quantite= ontology.createProperty(prefix + "quantite");
			a= ontology.createProperty("http://www.w3.org/1999/02/22-rdf-syntax-ns#type");
			concerne = ontology.createProperty(prefix + "concerne");
			keywordNode = ontology.getResource(prefix + "keyword");
		}
		catch(Exception ex) { System.out.println("Loading fails : " + ex.toString()); }
	}
	
	private void addKeyWordToOntology(String keyword, String liste)
	{
		try
		{
			//Create or retrieve resource
			Resource ressource = ontology.createResource(prefix + keyword);
			
			//Get
			String sarqlRequete = "PREFIX ia04: <" + prefix + "> PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> SELECT ?s WHERE { ia04:" + keyword +  " ia04:member ?s. ?s ia04:concerne ia04:" + liste + ". }";
			
			//Execution de la requête...
			Query query = QueryFactory.create(sarqlRequete);
			QueryExecution queryExecution = QueryExecutionFactory.create(query, ontology);
			ResultSet res = queryExecution.execSelect();
			
			//Mise à jour...
			Iterator<QuerySolution> sol = ResultSetFormatter.toList(res).iterator();
			if(!sol.hasNext())
			{
				Resource anonymousResource = ontology.createResource();
				ressource.addProperty(a, keywordNode)
						 .addProperty(member, anonymousResource);
				anonymousResource.addProperty(concerne, ontology.createResource(prefix + liste))
								 .addProperty(quantite, "1");
			}
			else
			{
				Statement s = sol.next().getResource("s").getProperty(quantite);
				s.changeObject(String.valueOf(Integer.valueOf(s.getString()) + 1));
			}
		}
		catch(Exception e)
		{
			//System.out.println("Erreur adding a keyword...");
		}
	}
	
	public void learn(int websiteType, HashSet<String> keywords, ACLMessage message)
	{
		switch(websiteType)
		{
			case KnowledgeBaseAgent.BUYING_SITE :
				learn(KnowledgeBaseAgent.BUYING_SITE_NAME, KnowledgeBaseAgent.BUYING_SITE_LIST, keywords, message);
				break;
			case KnowledgeBaseAgent.OTHER :
				learn(KnowledgeBaseAgent.OTHER_NAME, KnowledgeBaseAgent.OTHER_LIST, keywords, message);
				break;
		}
	}
	
	private void learn(String websiteType, String websiteTypeList, HashSet<String> keywords, ACLMessage message)
	{
		//Learning
		Iterator<String> it = keywords.iterator();
		while(it.hasNext())
			addKeyWordToOntology(it.next(), websiteTypeList);
		
		try
		{
			Statement s = ontology.getResource(prefix + websiteType).getProperty(quantite);
			s.changeObject(String.valueOf(Integer.valueOf(s.getString()) + 1));
		
			saveOntology(message);
	        updateUsedKeywordsFile();
		}
		catch(Exception e)
		{
			System.out.println("Erreur innatendu dans learn : " + e.toString());
			answer(KnowledgeBaseAgent.UNEXCEPTED_ERROR, message, true);
		}
	}
	
	public void giveWebsiteType(int websiteType, HashSet<String> keywords, ACLMessage message)
	{
		double seuilDecison = 0;
		Iterator<String> it = keywords.iterator();
		
		while(it.hasNext())
		{
			UsedKeyword current = keywordToAttribute.get(it.next());
			if(current != null)
				seuilDecison += current.getWj(websiteType);
		}

		seuilDecison += UsedKeyword.getW0(websiteType);
		answer(seuilDecison >= 0 ? KnowledgeBaseAgent.FALSE : KnowledgeBaseAgent.TRUE, message, false);
	}
	
	private void prepareToGiveType()
	{	
		//Create HashMap
		keywordToAttribute = new HashMap<String, UsedKeyword>();
		
		//Get keywords
		HashSet<String> keywords = getUsedKeyWord();
		Iterator<String> iter = keywords.iterator();
		
		//Get the number of time we have learnt for this kind of website
		int[] nbApprentissageWebsiteType = new int[KnowledgeBaseAgent.LAST];
		nbApprentissageWebsiteType[KnowledgeBaseAgent.BUYING_SITE] = getQuantity(KnowledgeBaseAgent.BUYING_SITE_NAME);
		nbApprentissageWebsiteType[KnowledgeBaseAgent.OTHER] = getQuantity(KnowledgeBaseAgent.OTHER_NAME);
		
		//Nombre total apprentissage
		int nbTotalApprentissage = 0;
		for(int i = 0; i < KnowledgeBaseAgent.LAST; ++i)
			nbTotalApprentissage += nbApprentissageWebsiteType[i];
		
		//Calcul des PI
		UsedKeyword.computePi(nbApprentissageWebsiteType, nbTotalApprentissage);
		
		//Contruct the UsedKeyword objects
		while(iter.hasNext())
		{
			String keyword = iter.next();
			
			//Nb occurence par type de site
			int[] nbOccurence =  new int[KnowledgeBaseAgent.LAST];
			nbOccurence[KnowledgeBaseAgent.BUYING_SITE] = getQuantity(keyword, BUYING_SITE_LIST);
			nbOccurence[KnowledgeBaseAgent.OTHER] = getQuantity(keyword, OTHER_LIST);
			
			//Nombre total d'occurence du mot
			int nbTotalOccurence = 0;
			for(int i = 0; i < KnowledgeBaseAgent.LAST; ++i)
				nbTotalOccurence += nbOccurence[i];
			
			//Probabilite -- faire des tests ou un try / catch pour les divisions par 0
			double[][] probabilite = new double[KnowledgeBaseAgent.LAST][2];
			probabilite[KnowledgeBaseAgent.BUYING_SITE][0] = (double)(nbTotalOccurence - nbOccurence[KnowledgeBaseAgent.BUYING_SITE]) / (double)(nbTotalApprentissage - nbApprentissageWebsiteType[KnowledgeBaseAgent.BUYING_SITE]);
			probabilite[KnowledgeBaseAgent.BUYING_SITE][1] = (double)(nbOccurence[KnowledgeBaseAgent.BUYING_SITE]) / (double)(nbApprentissageWebsiteType[KnowledgeBaseAgent.BUYING_SITE]);
			probabilite[KnowledgeBaseAgent.OTHER][0] = (double)(nbTotalOccurence - nbOccurence[KnowledgeBaseAgent.OTHER]) / (double)(nbTotalApprentissage - nbApprentissageWebsiteType[KnowledgeBaseAgent.OTHER]);
			probabilite[KnowledgeBaseAgent.OTHER][1] = (double)(nbOccurence[KnowledgeBaseAgent.OTHER]) / (double)(nbApprentissageWebsiteType[KnowledgeBaseAgent.OTHER]);

			//Ajout
			keywordToAttribute.put(keyword, new UsedKeyword(keyword, probabilite));
		}
		
		//Calcul des W0
		UsedKeyword.computeW0(keywordToAttribute);
	}
	
	private void  answer(String content, ACLMessage message, boolean isAnError)
	{
		ACLMessage rep = message.createReply();
		rep.setPerformative(isAnError ? ACLMessage.FAILURE : ACLMessage.INFORM);
		
		Requete r = new Requete(isAnError ? Requete.KB_ERROR : Requete.KB_ANSWER, content);
		rep.setContent(r.serialize());
		//DEBUG
		//System.out.println(this.getName()+" send the message: "+ r);
		send(rep);
	}
	
	private void updateUsedKeywordsFile()
	{
		try{
			//A) Get keywords
			HashSet<String> keywords = getUsedKeyWord();

			//B) Sérialisation
			ObjectMapper m = new ObjectMapper();
			StringWriter sw = new StringWriter();
			m.writeValue(sw, keywords);
			
			//C) Enregistrement dans le fichier
			FileOutputStream file = new FileOutputStream(DefaultClient.LOCAL_FOLDER + KnowledgeBaseAgent.USED_KEYWORD_FILE);
			file.write(sw.toString().getBytes());
		}
		catch(Exception ex)
		{
			System.out.println("Erreur dans updateUsedKeywordsFile : " + ex.toString());
		}
	}
	
	private void saveOntology(ACLMessage message)
	{
		try
        {
        	FileOutputStream fileStream = new FileOutputStream(DefaultClient.LOCAL_FOLDER + KnowledgeBaseAgent.ONTOLOGY_FILE);
            ontology.write(fileStream, "N3");
            answer(KnowledgeBaseAgent.DONE, message, false);
        }
        catch(Exception e)
        {
        	answer(KnowledgeBaseAgent.UNEXCEPTED_ERROR, message, true);
            System.out.println(e.toString());
        }
	}
	
	private HashSet<String> getUsedKeyWord()
	{
		//Resultat
		HashSet<String> resultat = new HashSet<String>();
		
		//Requete
		try
		{
			//Requête select...
			String sparqlQuery = "PREFIX ia04: <" + prefix + "> PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> SELECT ?x WHERE { ?x rdf:type ia04:keyword. }";
				
			//Execution de la requête...
			Query query = QueryFactory.create(sparqlQuery);
			QueryExecution queryExecution = QueryExecutionFactory.create(query, ontology);
			ResultSet r = queryExecution.execSelect();
				
			//Formattage du résultat...
			Iterator<QuerySolution> sol = ResultSetFormatter.toList(r).listIterator();
			while(sol.hasNext())
				resultat.add(sol.next().getResource("x").getLocalName());
		}
		catch(Exception e)
		{
			System.out.println("Erreur innatendu dans getUsedKeyWord : " + e.toString());
		}
		
		return resultat;
	}
	
	private int getQuantity(String keyword, String liste)
	{
		try
		{
			//Requête select...
			String sparqlQuery = "PREFIX ia04: <" + prefix + "> PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> SELECT ?q WHERE { ia04:" + keyword + " ia04:member ?a. ?a ia04:concerne ia04:" + liste + ". ?a ia04:quantite ?q. }";
	
			//Execution de la requête...
			Query query = QueryFactory.create(sparqlQuery);
			QueryExecution queryExecution = QueryExecutionFactory.create(query, ontology);
			ResultSet r = queryExecution.execSelect();
				
			//Formattage du résultat...
			Iterator<QuerySolution> sol = ResultSetFormatter.toList(r).listIterator();
			return sol.hasNext() ? sol.next().getLiteral("q").getInt() : 0;
		}
		catch(Exception e)
		{
			System.out.println("Erreur innatendu dans getQuantity : " + e.toString());
			return 0;
		}
	}
	
	private int getQuantity(String websiteType)
	{
		try
		{
			//Requête select...
			String sparqlQuery = "PREFIX ia04: <" + prefix + "> PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> SELECT ?q WHERE { ia04:" + websiteType + " ia04:quantite ?q. }";
	
			//Execution de la requête...
			Query query = QueryFactory.create(sparqlQuery);
			QueryExecution queryExecution = QueryExecutionFactory.create(query, ontology);
			ResultSet r = queryExecution.execSelect();
				
			//Formattage du résultat...
			Iterator<QuerySolution> sol = ResultSetFormatter.toList(r).listIterator();
			return sol.hasNext() ? sol.next().getLiteral("q").getInt() : 0;
		}
		catch(Exception e)
		{
			System.out.println("Erreur innatendu dans getQuantity : " + e.toString());
			return 0;
		}
	}
	
	public void takeDown()
	{
		DFAgentDescription dfd = new DFAgentDescription();
		dfd.setName(getAID());

		try { DFService.deregister(this, dfd); }
		catch (FIPAException e) { }
	}
}
