package com.hollycrm.hollysqm.analyzer.ansj;

import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.util.TokenizerFactory;
import org.apache.lucene.util.AttributeFactory;

import java.io.*;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;

public class AnsjTokenizerFactory extends TokenizerFactory {

	boolean pstemming;
	boolean isQuery;
	private String stopwordsDir;
	public Set<String> filter;

	/** Creates a new ANSJTokenizerFactory */
	public AnsjTokenizerFactory(Map<String, String> args) {
		super(args);

		//assureMatchVersion();
		isQuery = getBoolean(args, "isQuery", true);
		pstemming = getBoolean(args, "pstemming", false);
		stopwordsDir = get(args, "words");
		addStopwords(stopwordsDir);

		System.out.println("AnsjTokenizerFactory::AnsjTokenizerFactory");

		if (!args.isEmpty()) {
			throw new IllegalArgumentException("Unknown parameters:" + args);
		}
	}

	// add stopwords list to filter
	private void addStopwords(String dir) {
		if (dir == null) {
			System.out.println("no stopwords dir");
			return;
		}
		// read stoplist
		System.out.println("stopwords: " + dir);
		filter = new HashSet<String>();
		File file = new File(dir);
		InputStreamReader reader;
		try {
			reader = new InputStreamReader(new FileInputStream(file), "UTF-8");
			BufferedReader br = new BufferedReader(reader);
			String word = br.readLine();
			while (word != null) {
				filter.add(word);
				word = br.readLine();
			}
		} catch (FileNotFoundException e) {
			System.out.println("No stopword file found");
		} catch (IOException e) {
			System.out.println("stopword file io exception");
		}
	}

	@Override
	public Tokenizer create(AttributeFactory factory) {
		System.out.println("AnsjTokenizerFactory::create");
		return new AnsjTokenizer(isQuery, filter, pstemming);
	}

/*	@Override
	public AnsjTokenizer create(AttributeFactory factory, Reader input) {

		AnsjTokenizer tokenizer = tokenizerLocal.get();
		if (tokenizer == null) {
			tokenizer = newTokenizer(factory, input);
		}
		try {
			tokenizer.setReader(input);
		} catch (IOException e) {
			tokenizer = newTokenizer(factory, input);
		}

		return tokenizer;

	}

	private AnsjTokenizer newTokenizer(AttributeFactory factory, Reader input) {
		AnsjTokenizer tokenizer = new AnsjTokenizer(factory, input);
		tokenizerLocal.set(tokenizer);
		return tokenizer;
	}*/
}