package com.globalwave.filemanage.text.analyzer;

import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.text.BreakIterator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Locale;
import java.util.Map;
import java.util.Set;

import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.snowball.SnowballAnalyzer;
import org.apache.lucene.util.Version;

import com.globalwave.filemanage.FMCodeHelper;
import com.globalwave.filemanage.searchengine.Indexer;
import com.globalwave.util.LocaleUtil;




/**
 * @author Michael Scholz
 * @author Alessandro Gasparini
 */
public class AnalyzerAdapter extends WordRanker {

	protected static Log log = LogFactory.getLog(AnalyzerAdapter.class);

	private String locale;

	
	/**
	 * Creates a new instance of Analyzer.
	 * 
	 * @param language
	 */
	public AnalyzerAdapter(String locale) {
		this.locale = locale;
		stoptable = AnalyzerAdapter.getAsMap(getStopwords(locale));
	}

	/**
	 * Creates a new instance of Analyzer.
	 * 
	 * @param language
	 * @param len Minimum length of words which should analyzed.
	 */
	AnalyzerAdapter(final String locale, int len) {
		this.locale = locale;
		minlen = len;
		stoptable = AnalyzerAdapter.getAsMap(stopwordMap.get(locale));
	}

	/**
	 * This method transforms the array of stop words into a map of stop words.
	 * 
	 * @param stopwords - Map of stop words.
	 */
	final static Map<String, String> getAsMap(Set<String> stopwords) {
		Hashtable<String, String> stoptable = new Hashtable<String, String>();

		for (String stopword : stopwords) {
			stoptable.put(stopword, stopword);
		}

		return stoptable;
	}

	/**
	 * This method analyzes a given text an fills a hitlist.
	 * 
	 * @param text Text which should analyzed.
	 * @throws Exception In case of exception during Stemmer instantation
	 */
	public void analyze(String text) throws Exception {
		BreakIterator boundary = BreakIterator.getWordInstance();
		boundary.setText(text);

		Stemmer stemmer = null;
		try {
			stemmer = new Stemmer(LocaleUtil.toLocale(this.locale));
		} catch (Exception e) {
			log.error("Unable to instantiate a Stemmer for language " + this.locale, e);
			throw e;
		}
		AnalyzeResult result = performAnalysis(boundary, new StringBuffer(text), stoptable, minlen, stemmer);
		wordcount = result.getWordCount();
		wordtable = result.getWordTable();
	}

	/**
	 * Analyses a text and builds a table with each unique word stem, number of
	 * stem presence in the text and original word.
	 */
	AnalyzeResult performAnalysis(
			BreakIterator boundary, 
			StringBuffer source, 
			Map<String, String> stopwords,
			int minlen, Stemmer stemmer) 
	throws IllegalArgumentException, IllegalAccessException, InvocationTargetException {

		int start = boundary.first();
		long wordcount = 0;
		AnalyzeResult result = new AnalyzeResult();
		Hashtable<String, WordEntry> wordtable = new Hashtable<String, WordEntry>(source.length() / 6);

		for (int end = boundary.next(); end != BreakIterator.DONE; start = end, end = boundary.next()) {

			String word = source.substring(start, end).trim();
			char next = ' ';
			try {
				next = source.charAt(end);
			} catch (Exception e) {
			}

			if (word.length() > minlen) {
				String stem = stemmer.stem(word);
				WordEntry entry = new WordEntry();

				if ((word.length() >= minlen) && !stopwords.containsKey(word) && !stopwords.containsKey(stem)) {
					wordcount++;

					if (wordtable.containsKey(stem)) {
						entry = (WordEntry) wordtable.get(stem);
						entry.incValue();

						if ((word.length() < entry.getOriginWord().length()) && (next != (char) 45)) {
							entry.setOriginWord(word);
						}
					} else {
						entry.incValue();
						entry.setOriginWord(word);
					}
					wordtable.put(stem, entry);
				}
			}
		}

		result.setWordCount(wordcount);
		result.setWordTable(wordtable);

		return result;
	}

	private static Map<String, Set<String>> stopwordMap = new HashMap<String, Set<String>>() ;
	
	
	/**
	 * Populates the field stopWords reading the resource
	 * /stopwords/stopwords_<locale>.txt
	 */
	private Set<String> getStopwords(final String locale) {

		Set<String> stopWords = stopwordMap.get(this.locale) ;
		
		if (stopWords != null) {
			return stopWords ;
		}
		
		try {
			
			stopWords = new HashSet<String>();
			String stopwordsResource = "/stopwords/stopwords_" + locale.toString() + ".txt";
			
			log.debug("Loading stopwords from: " + stopwordsResource);
			
			InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(stopwordsResource);
			if (is == null)
				is = getClass().getResourceAsStream(stopwordsResource);

			InputStreamReader isr = new InputStreamReader(is, "UTF-8");
			BufferedReader br = new BufferedReader(isr);
			String line = null;
			while ((line = br.readLine()) != null) {
				line = line.trim();
				if (line.indexOf("|") != -1) {
					line = line.substring(0, line.indexOf("|"));
					line = line.trim();
				}
				if (line != null && line.length() > 0 && !stopWords.contains(line)) {
					stopWords.add(line);
				}
			}

			stopwordMap.put(locale, stopWords);
			return stopWords ;
		} catch (Throwable e) {
			log.warn(e.getMessage(), e);
			
			stopwordMap.put(locale, stopWords);
			return stopWords ;
		}
	}
	
	private Analyzer analyzer ;
	
	@SuppressWarnings("unchecked")
	public Analyzer getLuceneAnalyzer() {
		
		String analyzerClass = (String)FMCodeHelper.getAnalyzerMap().get(this.locale) ;
		
		Set<String> stopWords = getStopwords(this.locale) ;
		if (analyzer == null && !StringUtils.isEmpty(analyzerClass)) {

			// Try to instantiate the specified analyzer (Using default
			// constructor)
			Class aClass = null;
			try {
				aClass = Class.forName(analyzerClass);
			} catch (Throwable t) {
				log.error(analyzerClass + " not found");
			}

			
			// Try to use constructor (Version matchVersion, Set<?> stopwords)
			if (stopWords != null && (!stopWords.isEmpty())) {
				try {
					Constructor constructor = aClass.getConstructor(new Class[] {Version.class,
							Set.class });
					if (constructor != null)
						analyzer = (Analyzer) constructor.newInstance(Version.LUCENE_30, stopWords);
				} catch (Throwable e) {
					log.debug("constructor (Version matchVersion, Set<?> stopwords)  not found");
				}
			}

			// Try to use constructor (Version matchVersion)
			if (analyzer == null) {
				try {
					Constructor constructor = aClass
							.getConstructor(new Class[] { Version.class });
					if (constructor != null)
						analyzer = (Analyzer) constructor.newInstance(Version.LUCENE_30);
				} catch (Throwable t) {
					log.debug("constructor (Version matchVersion) not found");
				}
			}

			// Try with default constructor
			if (analyzer == null) {
				try {
					analyzer = (Analyzer) aClass.newInstance();
				} catch (Throwable e) {
					log.debug("constructor without arguments not found");
				}
			}
		}

		if (analyzer == null) {
			analyzer = new SnowballAnalyzer(
					Indexer.LUCENE_VERSION, 
					Locale.ENGLISH.getDisplayName(),
					stopWords);
			log.debug("Using default snowball analyzer");
		}
 
		return analyzer;
	}

}