/**
 * Copyright 2006 The Apache Software Foundation
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

package org.apache.ocean.main.fields;

import java.io.IOException;
import java.io.Reader;
import java.text.ParseException;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;

import org.apache.commons.lang.builder.ReflectionToStringBuilder;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.Payload;
import org.apache.lucene.index.Term;
import org.apache.ocean.analysis.SolrAnalyzer;

/**
 * 
 */
public final class SchemaField {
	private final String name;
	private final FieldType fieldType;

	public static enum Attribute {
		SORTMISSINGLAST, INDEXED, TOKENIZED, STORED, BINARY, OMITNORMS, TERMVECTOR, TERMPOSITIONS, TERMOFFSETS, MULTIVALUED, PAYLOAD
	}
	private EnumSet<Attribute> attributes = EnumSet.noneOf(Attribute.class);
  
	public static Map<String,Attribute> attributeMap = new HashMap<String,Attribute>(Attribute.values().length);
	static {
		attributeMap.put("sortMissingLast", Attribute.SORTMISSINGLAST);
		attributeMap.put("indexed", Attribute.INDEXED);
		attributeMap.put("tokenized", Attribute.TOKENIZED);
		attributeMap.put("stored", Attribute.STORED);
		attributeMap.put("binary", Attribute.BINARY);
		attributeMap.put("omitNorms", Attribute.OMITNORMS);
		attributeMap.put("termVectors", Attribute.TERMVECTOR);
		attributeMap.put("termPositions", Attribute.TERMPOSITIONS);
		attributeMap.put("termOffsets", Attribute.TERMOFFSETS);
		attributeMap.put("multiValued", Attribute.MULTIVALUED);
		attributeMap.put("payload", Attribute.PAYLOAD);
	}
	
	public SchemaField(String name, FieldType fieldType) {
		this.name = name;
		this.fieldType = fieldType;
	}
	
	public boolean isFieldIndexable() {
		if (!attributes.contains(Attribute.INDEXED)) {
			return false;
		}
		if (fieldType instanceof StringFieldType) {
			return true;
		} else if (fieldType instanceof DoubleFieldType) {
			return true;
		} else if (fieldType instanceof LongFieldType) {
			return true;
		}
		return false;
	}
	
	public Term getCachedTerm() {
		String field = name + ".fieldcache";
		String text = "value";
		return new Term(field, text);
	}

	public void addAttribute(Attribute attribute) {
		attributes.add(attribute);
	}

	public boolean has(Attribute attribute) {
		return attributes.contains(attribute);
	}

	/**
	 * Default analyzer for types that only produce 1 verbatim token... A maximum
	 * size of chars to be read must be specified
	 */
	public class DefaultAnalyzer extends SolrAnalyzer {
		final int maxChars;

		DefaultAnalyzer(int maxChars) {
			this.maxChars = maxChars;
		}

		public TokenStream tokenStream(String fieldName, Reader reader) {
			return new Tokenizer(reader) {
				char[] chars = new char[maxChars];

				public Token next() throws IOException {
					try {
						int n = input.read(chars, 0, maxChars);
						if (n <= 0)
							return null;
						String string = new String(chars, 0, n);
						String internalString = fieldType.toInternal(string);
						Token token = new Token(internalString, 0, n);
						if (attributes.contains(Attribute.PAYLOAD)) {
							Object object = fieldType.stringToObject(string);
							byte[] payload = fieldType.objectToBytes(object);
							if (payload != null) {
								token.setPayload(new Payload(payload));
							}
						}
						return token;
					} catch (ParseException parseException) {
            throw new RuntimeException(parseException);
					}
				};
			};
		}
	}

	public Analyzer getAnalyzer() {
    // TODO: needs to return analyzer
		//return null;
	}

	public String toString() {
		return ReflectionToStringBuilder.toString(this);
	}

	public String getName() {
		return name;
	}

	public FieldType getFieldType() {
		return fieldType;
	}

	public Fieldable createField(String val, float boost) {
		return fieldType.createField(this, val, boost);
	}
}
