package org.NooLab.itexx.retrieval;

import java.util.ArrayList;

import org.NooLab.itexx.ITexxWindowCommons;
import org.NooLab.itexx.ITexxWindowIntf;
import org.NooLab.itexx.app.controller.ItexxModelController;
import org.NooLab.itexx.app.controller.analyzer.PoSTaggerWrapper;
import org.NooLab.itexx.app.controller.parser.HTMLParser;
import org.NooLab.itexx.app.gui.definition.ITexxWindowGuiIntf;
import org.NooLab.itexx.app.processes.LinguisticsPort;
import org.NooLab.itexx.comm.tcp.box.TcpBox;
import org.NooLab.itexx.stuctures.GuiPersistenceData;
import org.NooLab.itexx.stuctures.RequestFocusContext;
import org.NooLab.itexx.stuctures.RequestHistory;

import org.NooLab.structures.AppCommBox;
import org.NooLab.utilities.files.DFutils;
import org.NooLab.utilities.logging.PrintLog;
import org.NooLab.utilities.strings.StringsUtil;





/**
 * 
 * 
 * TODO: ALL requests should be sent to this object for proper queuing!!!
 * 
 * 
 * This object provides the functionality to just drop in any data for building
 * a request and to deliver it to the search engine
 * 
 * Note that here we only provide the queue, the handling of the input and creating 
 * of the RequestItem is done in the object "RequestGenerator"
 * Such, the "RequestGenerator" also serves as observer for the LinguisticsPort
 * 
 * 
 * 
 */
//public class RequestDispatcher {

public class RequestGenerator implements Runnable {



	ITexxWindowIntf app;
	private ITexxWindowCommons appCommons;
	private ITexxWindowGuiIntf windowGui;

	RequestFocusContext requestFocus;
	ItexxModelController itexxModelCtrl;
	RequestPreProcessingIntf requestPreProc;
	RequestHistory rqHistory;

	private String groupGuidStr;
	private String requestStr;
	ArrayList<String> requestMultiStr = new ArrayList<String>();
	
	int srcType = 0;

	GuiPersistenceData guiData;
	String amorphousRequestData, textualRequestData;

	LinguisticsPort linguisticsPort ;
	
	private LanguageDetector ld;
	
	
	StringsUtil strgutil = new StringsUtil();
	PrintLog out;
	
	RequestDispatcher rqDispatcher;

	
	// ==============================================================
	public RequestGenerator(ITexxWindowIntf app, RequestDispatcher dispatcher, String guid, String requestData) {
		//
		int r;
		
		this.app = app;

		rqDispatcher = dispatcher ;
		
		windowGui = app.getWindowGui();
		appCommons = app.getAppCommons();

		requestFocus = appCommons.getModelController().getRequestFocusContext() ; 
			// new RequestFocusContext(app);
		itexxModelCtrl = appCommons.getModelController();

		// CommBoxArrivalIntf
		linguisticsPort = appCommons.getRequestDispatcher().getLinguisticsPort();
			
		Thread rqgenThrd ;
		out = appCommons.out;

		groupGuidStr = guid ;
		
		// so far, the request data are not specified
		amorphousRequestData = requestData;

		
		rqgenThrd = new Thread(this,"rqgenThrd");
		rqgenThrd.start();
	}	
	
	
	
	@Override
	public void run() {
	
		perform();
	}
	
	
	private void perform(){
		int r;
		
		try {
		
			String initialText = initialHandling();

								out.print(2, "RequestGenerator :: perform()... preparing text and registering as prepared query...\n"+
										     "            maybe also sending it to the linguistic pre-processing (mainly PoS-tagging.");
			
			r = prepare(initialText);
			// if r==2, it will be sent to NLP, from where it returns to RequestDispatcher
			if (r==1){
				rqDispatcher.registerPreparedQueries( groupGuidStr, initialText);
			} // else we deliver it upon the results returned from the linguistic analysis 
			else{
				out.print(2, "initial preparing of text in RequestGenerator failed, code="+r+".");
			}
		} catch (Exception e) {
			e.printStackTrace();
		}
		
	}

	/**
	 * note that each request creates a RequestGenerator, along with its internal processes.
	 * thus, if the requests are all delivered to the WebBrowser, we have to close 
	 */
	public void close(){
		
	}
	
	private String  initialHandling() {

		
		textualRequestData = "";
		
		// this we could also get from itexxModelCtrl ... if there is something
		// expensive to load
		// but we also need the possibility for concurrent processes
		requestPreProc = new RequestPreProcessing(app);
		
		// default for "textualRequestData"=unprocessed, if we recognize some type, we overwrite it
		textualRequestData = amorphousRequestData ; 
		
		srcType = determineTypeOfStringInfo(amorphousRequestData);

		if (srcType == SearchRequestIntf._REQUEST_SRC_TYPE_URL) {
			// download it and treat it as text
			// save the peculiarity of such a request in the request item !!!
			// we have to issue a intermittent request and wait for it
			// this can NOT be used for addressing a search engine... complete search-URLs are directly used for "navigate()"
			String url = amorphousRequestData;
			textualRequestData = (new HttpUrlPage(app, 1)).get(url);
			srcType = SearchRequestIntf._REQUEST_SRC_TYPE_REMOTE_TEXT;
		}
		
		if (srcType == SearchRequestIntf._REQUEST_SRC_TYPE_FILE) {
			// check for size !!! 
			// if large, we need options, whether to take everything all together, or 
			// whether we split it into sections/paragraphs (which requires "simple" clustering to identify section)
			
			// get textual content, via tika
			String filecontent = "";
			
			if (filecontent.length()>0){
				
				textualRequestData = filecontent;
				
				srcType = SearchRequestIntf._REQUEST_SRC_TYPE_TEXT;
				srcType = SearchRequestIntf._REQUEST_SRC_TYPE_TEXTGROUP;
			}else{
				return "";
			}
		}

		// the user may copy the content of a browser page
		if (srcType == SearchRequestIntf._REQUEST_SRC_TYPE_TAGSOUP_HTML) {
			// we should recognize whether it is a google result page...
			// in this case we set it to the WebBrowser as html content, then starting to work
			// on this as if there would have been a search engine request
			
			// organizing the parsing
			HtmlContentProvider hcp = new HtmlContentProvider(textualRequestData);
			
			if (hcp.getSearchEngineSignature()>0){
				// fo sth different
				return textualRequestData;
			}
			
			textualRequestData = hcp.getHtmlContentStr();
		}
		
		
		return textualRequestData;
	}

	private int prepare( String targettext ) throws Exception {
		// 
		//tcp:
		int state=-1;
		boolean textIsLong = false;
		
		TcpBox tcp;
		AppCommBox cbox=null;
		String langCode="" ;
		String[] strs;
		
		
		// detect language
	
		// long texts must be split
		
		// if "long", then first we apply a PoSTagger
		if (textualIsText(textualRequestData)){
			
			requestMultiStr = buildRequestMultiPart(textualRequestData);
		}else{
			requestMultiStr.add( textualRequestData);
		}

		targettext = strgutil.replaceAll(targettext, "  ", " ").trim();
		int f = strgutil.frequencyOfStr(targettext, " ");
		int n =  targettext.length();
			
		textIsLong = (n>80) || (f>=11);
		ld = appCommons.getLanguageDetector() ;
		
		if ((appCommons.getLanguageDetector()!=null) && (textIsLong )){
			langCode = ld.getCode( targettext );
		}
		// e.g. = "de" , en pt, nl, 
		
		if ((langCode.length()==0) && (textIsLong)){
			//throw(new Exception("Unable to recognize language."));
			out.printErr(1, "Unable to recognize language, input can't be prepared. "+
							"Please restrict the input to at most 11 words");
		}else{
			// select respective tcp box, which is connected to the satellite application 

			// TODO: is this language allowed for our application?
			if ((ld!=null) && (ld.getAvailableLanguages()!=null)){
				
				if ( ld.getAvailableLanguages().indexOf(langCode)>=0 ){
					cbox = appCommons.getProcessAdmin().getTcpBox(langCode);
				}else{
					if (textIsLong){	
						out.printErr(1, "Language has been recognized ("+langCode+"), but it is not available, hence your input can't be prepared. "+
								    	"Please restrict the input to at most 11 words");
					}
				}	
			}else{
				out.printErr(1,"Languag detector is not avalable.");
			}
			
		}
		if (cbox!=null){
											out.print(2, "provided textual input has been long enough to detect a language, sending data to NLP processing.." );
			tcp = cbox.tcpBox ;
			LinguisticsPort lp;
			tcp.getTcpSender().send( targettext );
			// we need to observe LinguisticsPort
			state = 2; // delivery to request engine will be called asynchronous
		}// ?
		else{
											out.print(2, "provided textual input has not been long enough to detect a language, proceeding the standard way...");
			state = 1; // will be called directly
		}
		return state;
	}

	// ==============================================================
	
	
		

	private ArrayList<String> buildRequestMultiPart(String textualRequest) {
		// 
		ArrayList<String> rvt, rmp = new ArrayList<String>();
		
		
		rvt = determineSupectedRelevantterms( textualRequest );
		
		PoSTaggerWrapper posT = new PoSTaggerWrapper(textualRequest);
		
		
		return rmp;
	}

	private ArrayList<String> determineSupectedRelevantterms(String textualRequest) {
		// 
		
		ArrayList<String> rvt = new ArrayList<String>();
		
		return null;
	}

	private boolean textualIsText(String textualRequest) {
		// 
		boolean rB = false;
		
		String str = textualRequest.trim() ;
		
		str = strgutil.replaceAll(str, "  ", " ");
		int fsc = strgutil.frequencyOfStrings(str, new String[]{".",":",",","!","?"});
		int fb  = strgutil.frequencyOfStrings(str, new String[]{" "});
		int len = str.length() ;
		
		if (fb<=6){
			rB=false;
		}
		
		return rB;
	}

	private int determineTypeOfStringInfo(String inStr) {

		int _srcType = -1;

		String str = inStr.trim();

		if (strgutil.isUrl(str, 0)) {
			_srcType = SearchRequestIntf._REQUEST_SRC_TYPE_URL;
			return _srcType;
		}

		if (DFutils.fileExists(inStr)) {
			_srcType = SearchRequestIntf._REQUEST_SRC_TYPE_FILE;
			return _srcType;
		}

		if (strgutil.isHtmlDoc(str)) { 
			_srcType = SearchRequestIntf._REQUEST_SRC_TYPE_TAGSOUP_HTML;
			return _srcType;
		}
		if (strgutil.isXmlDoc(str)) {
			_srcType = SearchRequestIntf._REQUEST_SRC_TYPE_TAGSOUP_XML;
			return _srcType;
		}
		if (strgutil.isIniFileContent(str)) {
			_srcType = SearchRequestIntf._REQUEST_SRC_TYPE_INIFILE;
			return _srcType;
		}
		
		_srcType = SearchRequestIntf._REQUEST_SRC_TYPE_TEXT;

		return _srcType;
	}

}


class HtmlContentProvider{
	String inText="",contentStr="";
	private HTMLParser hParser;
	PageIntf page;
	
	public HtmlContentProvider(String inText) {
		// 
		this.inText = inText;
	
		page = new Page();
		page.setHTML(inText) ;
		
		hParser = new HTMLParser(page) ;
		hParser.parse();
		
		contentStr= hParser.getText();
	}

	public String getHtmlContentStr() {
		return contentStr;
	}

	public int getSearchEngineSignature() {

		// check the raw html = inText for certain signatures
		// signatures should come from a signature file
		
		// assign a code like SearchRequestIntf._SEARCHENGINE_SIGNATURE_CODE_GOOGLE
		
		return 0;
	}
	
	
	
	
	
}



