/*******************************************************************************
 * Copyright 2011 Alexandre Zglav and Morphiastic
 * 
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 * 
 *   http://www.apache.org/licenses/LICENSE-2.0
 * 
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 ******************************************************************************/
package com.morphiastic.analysis.tokenizers;

import java.util.HashMap;
import java.util.Map;
import org.elasticsearch.common.settings.Settings;
import com.morphiastic.annotations.Parameter;
import com.morphiastic.annotations.TokenizerDefinition;

public class ElasticTokenizerFactory {


	public static ElasticTokenizer getTokenizerFromAnnotation(TokenizerDefinition tokenizerDefAnnotation) {
		if(tokenizerDefAnnotation == null ) return null;
		String defName = tokenizerDefAnnotation.defName();
		HashMap<String,String> params = null;
		if(tokenizerDefAnnotation.parameters().length>0){
			params = new HashMap<String, String>();
			for (Parameter parameter : tokenizerDefAnnotation.parameters()) {
				params.put(parameter.key(),parameter.value());
			}
		}
		ElasticTokenizerType type  = tokenizerDefAnnotation.type();
		return buildTokenizer(defName, type, params);
	}
	
	
	public static ElasticTokenizer buildTokenizer(String defName, ElasticTokenizerType type , Map<String,String> params){
		if (defName.isEmpty()){
			if(params!=null){//we have custom params so this is a customized analyzer and we want to give it a propper name
				throw new IllegalArgumentException("No definition provided for custom ElasticTokenizer");
			}else{
				defName = type.toString();
			}
		}
		
		switch (type) {
			case edgeNGram:{
				EdgeNGramTokenizer tok = new EdgeNGramTokenizer(params, defName);
				return tok;
			}
			case keyword:{
				KeywordTokenizer tok = new KeywordTokenizer(params, defName);
				return tok;
			}
			case letter:{
				LetterTokenizer tok = new LetterTokenizer(params, defName);
				return tok;
			}
			case lowercase:{
				LowercaseTokenizer tok = new LowercaseTokenizer(params, defName);
				return tok;
			}
			case nGram:{
				NGramTokenizer tok = new NGramTokenizer(params, defName);
				return tok;
			}
			case path_hierarchy:{
				PathHierarchyTokenizer tok = new PathHierarchyTokenizer(params, defName);
				return tok;
			}
			case pattern:{
				PatternTokenizer tok = new PatternTokenizer(params, defName);
				return tok;
			}
			case standard:{
				StandardTokenizer tok = new StandardTokenizer(params, defName);
				return tok;
			}
			case uax_url_email:{
				UAXURLEmailTokenizer tok = new UAXURLEmailTokenizer(params, defName);
				return tok;
			}
			case whitespace:{
				WhitespaceTokenizer tok = new WhitespaceTokenizer(params, defName);
				return tok;
			}
			default:
				return null;
		}
	}
	
	
	
	
	/**
	 * Method used to build an ElasticTokenizer from the Settings returned from ES server 
	 * @param tokenizerName
	 */
	public static ElasticTokenizer getTokenizerFromSettings(String tokenizerName, Settings tokenizerSettings){
		
		String typeVal = tokenizerSettings.get("type");
		
//		System.err.println("getTokenizerFromSettings type = " + typeVal );
//		
//		Map<String,String> foo = tokenizerSettings.getAsMap();
//		System.out.println("foo lengh" + foo.size());
//		for (String key : foo.keySet()) {
//			System.out.println(key);
//		}
//	
		ElasticTokenizerType type = ElasticTokenizerType.valueOf(typeVal);
		ElasticTokenizer tokenizer = buildTokenizer(tokenizerName, type, null);
		tokenizer.setParamsFromSettings(tokenizerSettings);
		return tokenizer;
	}



	
	
	
	
	
}
