package cn.edu.dutir.model.vsm;

import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.io.Serializable;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;

import com.aliasi.corpus.TextHandler;
import com.aliasi.symbol.MapSymbolTable;
import com.aliasi.symbol.SymbolTable;
import com.aliasi.tokenizer.Tokenizer;
import com.aliasi.tokenizer.TokenizerFactory;
import com.aliasi.util.AbstractExternalizable;
import com.aliasi.util.Compilable;
import com.aliasi.util.ObjectToCounterMap;
import com.aliasi.util.Strings;

public class IdfTrainer implements TextHandler, Compilable, Serializable {

	/**
	 * 
	 */
	private static final long serialVersionUID = -8282088176122178306L;
	private int mDocCount;
	private ObjectToCounterMap<String> mDocFrequencyTable;
	private SymbolTable mTokenSymbolTable;
	private final TokenizerFactory mTokenizerFactory;
	
	public IdfTrainer(TokenizerFactory tokenizerFactory) {
		this(tokenizerFactory, 0);
	}

	public IdfTrainer(TokenizerFactory tokenizerFactory, int startId) {
		this(tokenizerFactory, new MapSymbolTable(startId),
				new ObjectToCounterMap<String>(), 0);
	}

	public IdfTrainer(TokenizerFactory tokenizerFactory,
			SymbolTable tokenSymbolTable,
			ObjectToCounterMap<String> docFrequencyTable, int docCount) {
		mTokenizerFactory = tokenizerFactory;
		mTokenSymbolTable = tokenSymbolTable;
		mDocFrequencyTable = docFrequencyTable;
		mDocCount = docCount;
	}

	public void trainIdf(CharSequence doc) {
		char[] cs = Strings.toCharArray(doc);
		handle(cs, 0, cs.length);
	}

	public void trainDocument(ObjectToCounterMap<String> termFrequency) {
		Set<String> termSet = termFrequency.keySet();
		for (String term : termSet) {
			mTokenSymbolTable.getOrAddSymbol(term);
			mDocFrequencyTable.increment(term);
		}
		mDocCount++;
	}
	
	@Override
	public void handle(char[] cs, int start, int length) {
		for (String token : tokenSet(cs, start, length)) {
			mTokenSymbolTable.getOrAddSymbol(token);
			mDocFrequencyTable.increment(token);
		}
		mDocCount++;
	}

	public Set<String> symbolSet() {
		return Collections.<String> unmodifiableSet(mDocFrequencyTable.keySet());
	}
	
	public Set<Integer> idSet() {
		return ((MapSymbolTable) mTokenSymbolTable).idSet();
	}
	
	public Set<String> tokenSet(CharSequence cSeq) {
		char[] cs = Strings.toCharArray(cSeq);
		return tokenSet(cs, 0, cs.length);
	}

	public Set<String> tokenSet(char[] cs, int start, int length) {
		Tokenizer tokenizer = mTokenizerFactory.tokenizer(cs, start, length);
		Set<String> tokenSet = new HashSet<String>();
		String token;
		while ((token = tokenizer.nextToken()) != null) {
			tokenSet.add(token);
		}
		return tokenSet;
	}

	public ObjectToCounterMap<String> termFrequencyVector(char[] cs, int start,
			int length) {
		ObjectToCounterMap<String> termFrequency = new ObjectToCounterMap<String>();
		Tokenizer tokenizer = mTokenizerFactory.tokenizer(cs, 0, cs.length);
		String token;
		while ((token = tokenizer.nextToken()) != null) {
			termFrequency.increment(token);
		}
		return termFrequency;
	}

	public ObjectToCounterMap<String> termFrequencyVector(CharSequence cSeq) {
		char[] cs = Strings.toCharArray(cSeq);
		return termFrequencyVector(cs, 0, cs.length);
	}

	public TokenizerFactory tokenizerFactory() {
		return mTokenizerFactory;
	}

	public ObjectToCounterMap<String> docFrequencyTable() {
		return mDocFrequencyTable;
	}

	public SymbolTable tokenSymbolTable(){
		return mTokenSymbolTable;
	}
	
	public void tokenSymbolTable(SymbolTable tokenSymbolTable) {
		mTokenSymbolTable = tokenSymbolTable;
	}
	
	public int docFrequency(String term) {
		return mDocFrequencyTable.getCount(term);
	}

	public double idf(String term) {
		return idf(docFrequency(term));
	}

	public double idf(int df) {
		if (df == 0) return 0.0;
		return Math.log((double) mDocCount / (double) df + 0.01);
	}

	public int numDocuments() {
		return mDocCount;
	}

	public int numTerms() {
		return mDocFrequencyTable.size();
	}
	
	public void clear() {
		mDocCount = 0;
		mTokenSymbolTable.clear();
		mDocFrequencyTable.clear();
	}
	
	@Override
	public void compileTo(ObjectOutput objOut) throws IOException {
		objOut.writeObject(new Externalizer(this));
	}

	static final class Externalizer extends AbstractExternalizable {
		final IdfTrainer mIdfTrainer;
		public Externalizer() {
			this(null);
		}
		public Externalizer(IdfTrainer idfTrainer) {
			mIdfTrainer = idfTrainer;
		}

		@Override
		protected Object read(ObjectInput in) throws ClassNotFoundException,
				IOException {
			int numDocs = in.readInt();
			ObjectToCounterMap<String> docFrequencyTable = (ObjectToCounterMap<String>) in.readObject();
			SymbolTable tokenSymbolTable = (SymbolTable)in.readObject();
			TokenizerFactory tokenizerFactory = (TokenizerFactory) in.readObject();
			return new IdfTrainer(tokenizerFactory, tokenSymbolTable, docFrequencyTable, numDocs);
		}

		@Override
		public void writeExternal(ObjectOutput out) throws IOException {
			out.writeInt(mIdfTrainer.numDocuments());
			out.writeObject(mIdfTrainer.docFrequencyTable());
			out.writeObject(mIdfTrainer.tokenSymbolTable());
			out.writeObject(mIdfTrainer.tokenizerFactory());
		}
	}
}
