package org.NooLab.itexx.app.controller.analyzer;

import java.util.ArrayList;

import org.NooLab.itexx.ITexxWindowCommons;
import org.NooLab.itexx.app.controller.SessionManager;
import org.NooLab.itexx.app.controller.results.SearchEngineResultItem;
import org.NooLab.itexx.stuctures.RequestFocusSpec;
import org.NooLab.utilities.clipboard.ClipBoardListener;
import org.NooLab.utilities.datatypes.IndexedDistances;
import org.NooLab.utilities.logging.PrintLog;
import org.NooLab.utilities.strings.StringsUtil;
import org.NooLab.utilities.strings.XMap;


import org.apache.commons.codec.language.Metaphone;
// import org.apache.commons.lang.builder.EqualsBuilder;
// import org.apache.commons.lang.builder.HashCodeBuilder;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;








public class SerDocumentScoring {

	ITexxWindowCommons appCommons;
	SessionManager sessionMgr;
	SearchEngineResultItem serItem;
	
	TermScore termScore;
	
	private RequestFocusSpec requestFocus;
	
	private AnalyticsPort analyticsPort; // hosted by appCommons
	
	ArrayList<XMap> sentenceMap ;
	
	PrintLog out;
	StringsUtil strgutil = new StringsUtil();
	
	
	
	// ========================================================================
	public SerDocumentScoring( ITexxWindowCommons commons, SessionManager smgr,
							   SearchEngineResultItem serItem) {
		//
		appCommons = commons;
		out = appCommons.out;
		
		analyticsPort = appCommons.getAnalyticsPort();
		
		sessionMgr = smgr ;
		this.serItem = serItem;
		
		requestFocus = serItem.getRequestFocus() ;
		String seQuery = serItem.getSeQuery();
		
		
		// if plural, add singular, remove genitiv
		// create rich soundex = stringsimplifier.superphone
		 
		// note that we need a background process, which sifts through "all available" documents
		// it is important to select the appropriate reference set !!
		
		calculate();
	}


	public void calculate() {
		// 
		int iPos;
											out.printErr(2, ">>>>>  now calculating scoring for : "+
															serItem.getTargetDocUrl() +" <<<<<") ;
		
		double termScoreValue = 0.0 ;
		
		// is a global SER list available?
		
		// prepare text
		prepareDocumentText();
		
		ClipBoardListener cll;
		// ............................
		
		// this document
		termScore = new TermScore(appCommons, sessionMgr ) ;
		
		termScore.setQueryTerms( serItem.getSeQuery() ) ;
		
		termScore.addContext( serItem.getRequestFocus()) ;
		
		/* Object obj = ()*/ termScore.provideGlobalFrequencies( analyticsPort.getGlobaleTermFrequencies() ) ; // so far not used
		
		termScore.provideSentenceMap( sentenceMap ); 
		
		termScore.determine(serItem);
		// now the serItem contains a score value
		
		// ............................
		// termScore.simplifyStrings(); for relaxed matching

		// ............................
		
		// update the global frequency list (will be "partially" reset in a new session)
		IndexedDistances _wordFrequencies = termScore.buildFrequencyList();

				// in SessionManager : sub-process 
				out.printErr(2, "\n  >>>>>  calculations of scoring have been completed.\n");
	}


	private void prepareDocumentText() {
		
		String text,str;
		
		text = serItem.getTargetDocContentText() ;
		
		text = text.trim();
		text = strgutil.replaceAll(text, "  ", " ");
		
		serItem.setTargetDocContentText(text) ;
		
		
		sentenceMap = strgutil.splitStringby(text, new String[]{".","!",":","?"}, new String[]{".","!",":","?"}, false, false);
		// XMap is a bean with index, position, and string
		
		// now sections
		int[] availableHeaderLevels = checkForAvailableHeaderLevels();
		
		String html = serItem.getTargetDocHtml() ;
		
		Document soupdoc = Jsoup.parse(html) ;
		Element headelement;
		Elements hElements, section  ;
		String headersMarker, htext ="";
		ArrayList<String> headertext ;
		
		str=""; 
		for (int i=1;i<availableHeaderLevels.length;i++){
			if (availableHeaderLevels[i]<0){
				continue;
			}
			headertext =new ArrayList<String> ();
			headersMarker = "h"+(i);
			hElements = soupdoc.select( headersMarker ) ;
			
			for (int h=0;h<hElements.size();h++){
				headelement = hElements.get(h); htext = headelement.text();
				headertext.add( htext);
				// get outer html = tag + text + endtag 
				if ((htext!=null) && (htext.length()>0)){
					section = soupdoc.select("headersMarker:contains("+htext+") ~ *") ;
				}
			}
			
		}// i-> all levels
		
		// 
		
	}


	/**
	 * this checks just whether a level is available, and by that also the frequency of it, 
	 * it DOES NOT determine positions or content
	 * 
	 * 
	 * @param html
	 * @return an int[], which contains the frequencies for each level
	 */
	private int[] checkForAvailableHeaderLevels() {
		
		int[] hLevels = new int[6] ;  // -> L1 .. L5
		    
		String hsig , html = serItem.getTargetDocHtml() ;;
		int p=-1;

		html = strgutil.replaceAll(html, "<H", "<h") ;
		
		for (int i=1;i<=5;i++){
			hsig = "<h"+i; p = html.indexOf(hsig);
			if (p<0){
				hsig = "<H"+i; p = html.indexOf(hsig);
			}
			if (p>0){
				int n = Math.max(1, strgutil.frequencyOfStr(html, hsig = "<h"+i)) ;
				hLevels[i] = n; 	
			}
			
		}// i_>
		
		serItem.setTargetDocHtml(html) ;
		return hLevels;
	}


	public SearchEngineResultItem getSer() {
		// 
		
		return serItem;
	}
	
	

}
