package il.ac.technion.wiki.impl.queries;

import il.ac.technion.wiki.api.search.AndSearchExpression;
import il.ac.technion.wiki.api.search.ISearchExpression;
import il.ac.technion.wiki.api.search.NotSearchExpression;
import il.ac.technion.wiki.api.search.OrSearchExpression;
import il.ac.technion.wiki.api.search.TextSearchExpression;

import java.util.EmptyStackException;
import java.util.Stack;
import java.util.Vector;


/**
 * Generates ISearchExpression query trees from string queries.
 *
 */
public class QueryParser {
	public static class IllegalQueryException extends Exception {
		public IllegalQueryException() { super(); }
		public IllegalQueryException(String message, Throwable cause) { super(message, cause); }
		public IllegalQueryException(String message) { super(message); }
		public IllegalQueryException(Throwable cause) { super(cause); }
		private static final long serialVersionUID = -8100716775600254408L;
	}
	
	public static ISearchExpression parse(String query) throws IllegalQueryException {
		TokenScope token = tokenize(query);
		return processTokenScope(token);
	}
	
	private static class NeedOperands extends Exception {
		private static final long serialVersionUID = -9052653932013731315L;
	}
	
	private static ISearchExpression processToken(Token t, ISearchExpression left, ISearchExpression right)
										throws NeedOperands {
		if (t.text.charAt(0) == '"') {
			String tokenText = t.text.substring(0, t.text.length());	// get rid of the double-quotes
			return new TextSearchExpression(tokenText);
		} else if (t.text.equalsIgnoreCase("AND")) {
			if (left == null) throw new NeedOperands();
			if (right == null) throw new NeedOperands();
			return new AndSearchExpression(left, right);
		} else if (t.text.equalsIgnoreCase("OR")) {
			if (left == null) throw new NeedOperands();
			if (right == null) throw new NeedOperands();
			return new OrSearchExpression(left, right);
		} else if (t.text.equalsIgnoreCase("NOT")) {
			if (right == null) throw new NeedOperands();
			return new NotSearchExpression(right);
		} else {
			return new TextSearchExpression(t.text);
		}
	}
	
	private static ISearchExpression processTokenScope(TokenScope ts) throws IllegalQueryException {
		Boolean notDoneYet = true;
		Vector<Object> l = new Vector<Object>(ts.subTokens);
		
		while (notDoneYet) {
			notDoneYet = false;	// presume done until we find otherwise
			Boolean progress = false;
			
			/* our strategy is to iteratively try to process items until all items have been processed
			 * or no progress has been made for an entire iteration (which means the query cannot be processed)
			 */
			for (int i = 0; i < l.size(); ++i) {
				Object cur = l.get(i);
				
				if (cur instanceof ISearchExpression)
					continue;	// we already successfully parsed this one
				
				notDoneYet = true;
				
				if (cur.getClass() == TokenScope.class) {
					ISearchExpression expr = processTokenScope((TokenScope) cur);
					l.set(i, expr);
					progress = true;
				} else {
					// a token
					try {
						ISearchExpression left = null, right = null;
						try {
							Object o = l.get(i-1);
							if (o instanceof ISearchExpression) left = (ISearchExpression) o;
						} catch (ArrayIndexOutOfBoundsException e) {}
						try {
							Object o = l.get(i+1);
							if (o instanceof ISearchExpression) right = (ISearchExpression) o;
						} catch (ArrayIndexOutOfBoundsException e) {}
						
						ISearchExpression expr = processToken((Token) cur, left, right);
						l.set(i, expr);
						
						switch (expr.numberOfOperands()) {
							case 1:
								l.remove(i+1);
								break;
							case 2:
								l.remove(i-1);
								i--;
								l.remove(i+1);
								break;
						}
						progress = true;
					} catch (NeedOperands e) {
						// ignore. we'll try again later
					}
				}
			}
			
			if (notDoneYet && !progress) {
				throw new IllegalQueryException();
			}
		}
		
		// now just AND all the expressions that weren't explicitly ANDed
		ISearchExpression root = null;
		
		for (int i = 0; i < l.size(); ++i) {
			ISearchExpression expr = (ISearchExpression) l.get(i);
			root = AndSearchExpression.append(root, expr);
		}
		
		return root;
	}
	
	/**
	 * This is public for testing purposes only. <b>Do not use!</b>
	 */
	public static TokenScope tokenize(String query) throws IllegalQueryException {
		Stack<TokenScope> openTokenScopes = new Stack<TokenScope>();
		Boolean escaped = false,
		quoted = false;
		int tokenStartPos = -1;
		
		openTokenScopes.push(new TokenScope());	// create the root scope
		
		for (int i = 0; i < query.length(); ++i) {
			char c = query.charAt(i);
			
			if (escaped) {
				// this character is escaped with a '\' - skip it and proceed with processing the next one as usual
				escaped = false;
				continue;
			} else if (quoted && c != '"')
				// this is a quoted token - take it as is!
				continue;
			
			switch (c) {
				case ' ':
				case '\t':
				case '\r':
				case '\n':	// whitespace
					if (tokenStartPos < 0) {
						// no token being processed - just ignore it
					} else {
						// newline marks the end of a non-quoted token
						Token newToken = new Token(query.substring(tokenStartPos, i));
						openTokenScopes.peek().subTokens.add(newToken);
						tokenStartPos = -1;
					}
					break;
				case '\\':
					escaped = true;
					break;
				case '"':
					if (!quoted) {
						if (tokenStartPos >= 0) {
							// we're already in the middle of parsing a non-quoted token - close it
							Token newToken = new Token(query.substring(tokenStartPos, i));
							openTokenScopes.peek().subTokens.add(newToken);
							tokenStartPos = -1;
						}
						
						// this opens a quoted token
						quoted = true;
						tokenStartPos = i;
					} else {
						// this closes a quoted token. save it!
						Token newToken = new Token(query.substring(tokenStartPos, i+1)); // include the opening and closing quotes
						openTokenScopes.peek().subTokens.add(newToken);
						tokenStartPos = -1;
						quoted = false;
					}
					break;
				case '(':
					if (tokenStartPos >= 0) {
						// this should close the token
						Token newToken = new Token(query.substring(tokenStartPos, i));
						openTokenScopes.peek().subTokens.add(newToken);
						tokenStartPos = -1;
					}
					openTokenScopes.push(new TokenScope());
					break;
				case ')':
					if (tokenStartPos >= 0) {
						// this should close the token
						Token newToken = new Token(query.substring(tokenStartPos, i));
						openTokenScopes.peek().subTokens.add(newToken);
						tokenStartPos = -1;
					}
					try {
						TokenScope currentScope = openTokenScopes.pop();
						if (currentScope.subTokens.size() == 1) {
							// compact, so we don't end up with deeply nested scopes
							currentScope = currentScope.subTokens.get(0);
						}
						openTokenScopes.peek().subTokens.add(currentScope);
					} catch (EmptyStackException e) {
						throw new IllegalQueryException(Integer.toString(i) + ": no matching open parenthesis");
					}
					break;
				default:
					if (tokenStartPos < 0)
						// all other characters are assumed to be part of a token
						tokenStartPos = i;
					break;
			}
		}
		
		if (openTokenScopes.size() != 1)
			throw new IllegalQueryException("unmatched number of opening and closing parentheses");
		if (quoted)
			throw new IllegalQueryException("unclosed double-quote");
		if (escaped)
			throw new IllegalQueryException("backslash illegal at end of query");
		
		if (tokenStartPos >= 0) {
			// close the last token
			Token newToken = new Token(query.substring(tokenStartPos, query.length()));
			openTokenScopes.peek().subTokens.add(newToken);
			tokenStartPos = -1;
		}
		
		return openTokenScopes.pop();
	}
}