package dp.sgd;

import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.util.Collection;
import java.util.Random;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.util.Version;
import org.apache.mahout.common.RandomUtils;
import org.apache.mahout.math.RandomAccessSparseVector;
import org.apache.mahout.math.Vector;
import org.apache.mahout.vectorizer.encoders.ConstantValueEncoder;
import org.apache.mahout.vectorizer.encoders.FeatureVectorEncoder;
import org.apache.mahout.vectorizer.encoders.StaticWordValueEncoder;

import com.google.common.collect.ConcurrentHashMultiset;
import com.google.common.collect.Multiset;
import com.google.common.io.Closeables;

public class Helper {

	private static final Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_31);
	private static final FeatureVectorEncoder encoder = new StaticWordValueEncoder("text");
	private static final FeatureVectorEncoder bias = new ConstantValueEncoder("Intercept");
	private final Random rand = RandomUtils.getRandom();  
	
	 public static final int FEATURES = 10000;
	
	FeatureVectorEncoder getEncoder() {
		return encoder;
	}
	
	public Vector encodeFeatureVectorRaw(String string, Multiset<String> overallCounts) {
		Multiset<String> words = ConcurrentHashMultiset.create();
		Reader reader = new StringReader(string); 
		try {
			countWords(words, reader, overallCounts);
		} finally {
			Closeables.closeQuietly(reader);
		}
		
		Vector v = new RandomAccessSparseVector(FEATURES);
		bias.addToVector("", 1, v);
		for (String word : words.elementSet()) {
			encoder.addToVector(word, Math.log1p(words.count(word)), v);
		}
		return v;
	}
	
	public Vector encodeFeatureVector(Record record, int actual, Multiset<String> overallCounts) {
			
		Multiset<String> words = ConcurrentHashMultiset.create();
		Reader reader = new StringReader(record.getText()); 
		try {
			countWords(words, reader, overallCounts);
		} finally {
			Closeables.closeQuietly(reader);
		}
		
		Vector v = new RandomAccessSparseVector(FEATURES);
		bias.addToVector("", 1, v);		// TODO: mozno nemusi byt??
		for (String word : words.elementSet()) {
			encoder.addToVector(word, Math.log1p(words.count(word)), v);
		}
		return v;
		
	}
	
	private void countWords(Collection<String> words, Reader reader, Multiset<String> overallCounts) {
		try {
			TokenStream ts = analyzer.reusableTokenStream("text", reader);
			ts.addAttribute(CharTermAttribute.class);
			ts.reset();
			while (ts.incrementToken()) {
				String s = ts.getAttribute(CharTermAttribute.class).toString();
				words.add(s);
			}
			overallCounts.addAll(words);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}

	public Analyzer getAnalyzer() {
		return analyzer;
	}

	public FeatureVectorEncoder getBias() {
		return bias;
	}

	Random getRandom() {
		return rand;
	}
	
}
