package ie.ucc.cs.anggao.finalproject.server.AI;

import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Stack;

import opennlp.maxent.MaxentModel;
import opennlp.maxent.io.SuffixSensitiveGISModelReader;
import opennlp.tools.chunker.ChunkerME;
import opennlp.tools.dictionary.Dictionary;
import opennlp.tools.postag.DefaultPOSContextGenerator;
import opennlp.tools.postag.POSTaggerME;
import opennlp.tools.tokenize.TokenizerME;

public class chunker {

	private static final String TOKENS = "models/tokenize/EnglishTok.bin.gz";
	private static final String DICT = "models/parser/dict.bin.gz";
	private static final String TAGDICT = "models/parser/tag.bin.gz";
	private static final String ChunkDICT = "models/chunker/EnglishChunk.bin.gz";

	private Stack<String> stack;
	TokenizerME tokenizer;
	POSTaggerME postagger;
	ChunkerME chunker;

	public static void main(String[] args) {
		chunker chunkerme = new chunker();
		System.out
				.println(chunkerme
						.getNP("This isn't the greatest example sentence in the world because I've seen better. "));
		System.out
				.println(chunkerme
						.getNPVP("This isn't the greatest example sentence in the world because I've seen better. "));
	}

	public chunker() {
		try {
			tokenizer = new TokenizerME((new SuffixSensitiveGISModelReader(
					new File(TOKENS))).getModel());

			postagger = new POSTaggerME(getModel(TAGDICT),
					new DefaultPOSContextGenerator(new Dictionary(
							new FileInputStream(DICT))));

			chunker = new ChunkerME(getModel(ChunkDICT));
			stack = new Stack<String>();
		} catch (Exception e) {

			e.printStackTrace();
		}
		tokenizer.setAlphaNumericOptimization(true);

	}

	public String getNP(String text) {
		String[] tokens = tokenizer.tokenize(text);
		String[] tags = postagger.tag(tokens);
		String[] chunkers = chunker.chunk(tokens, tags);
		String NPString = "";
		System.out.println("\n");
		for (int i = 0; i < chunkers.length; i++) {
			if (chunkers[i].contains("-NP")) {

				NPString = NPString + " " + tokens[i];
			}

		}
		return NPString;
	}

	public Stack<String> getNPs(String text) {
		
		stack.clear();
		String[] tokens = tokenizer.tokenize(text);
		String[] tags = postagger.tag(tokens);
		String[] chunkers = chunker.chunk(tokens, tags);
		String NPString = "";
		for (int i = 0; i < chunkers.length; i++) {
			if (chunkers[i].contains("-NP")) {
				if (chunkers[i].equals("B-NP")) {
					if (NPString != "") {
						stack.add(NPString);
					}
					NPString = tokens[i];
				} else {
					NPString = NPString + " " + tokens[i];
				}

			}
		}
		stack.add(NPString);
		
		return stack;
	}

	public String getNPVP(String text) {

		String[] tokens = tokenizer.tokenize(text);
		String[] tags = postagger.tag(tokens);
		String[] chunkers = chunker.chunk(tokens, tags);
		String NVPString = "";
		System.out.println("\n");
		for (int i = 0; i < chunkers.length; i++) {
			if (chunkers[i].contains("-NP") || chunkers[i].contains("-VP")) {

				NVPString = NVPString + " " + tokens[i];
			}

		}
		
		return NVPString;

	}

	private static MaxentModel getModel(String name) {
		try {
			return new SuffixSensitiveGISModelReader(new File(name)).getModel();
		} catch (IOException e) {
			e.printStackTrace();
			return null;
		}
	}
}
