/*
*   Meaningful Sentence Realizer
*
*   @author: Sampat Biswas
*   Copyright 2011 Sampat Biswas
*
*   This file is part of Sentence-Realizer.
*
*   Sentence-Realizer is free software: you can redistribute it and/or modify
*   it under the terms of the GNU General Public License as published by
*   the Free Software Foundation, either version 3 of the License, or
*   (at your option) any later version.
*
*   Sentence-Realizer is distributed in the hope that it will be useful,
*   but WITHOUT ANY WARRANTY; without even the implied warranty of
*   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
*   GNU General Public License for more details.
*
*   You should have received a copy of the GNU General Public License
*   along with Sentence-Realizer.  If not, see <http://www.gnu.org/licenses/>.
*	
*
*/

import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Stack;

class TOP {
	String label;
}

class OBJECT extends TOP {
	
}

class EVENT extends TOP {
	
}

class QUALITY extends TOP {
	
}

class STATE extends TOP {
	
}

class STATE_CHANGE extends TOP {
	
}

class SPEECHACT extends TOP {
	
}

class RELATION extends TOP {
	
}

class ENTITY extends TOP {
	
}

class LOCATION extends TOP {
	public LOCATION() {
		label = "location";
	}
}
/////////////////////////////////////////////////////////

class LIKE extends STATE {
	public LIKE() {
		label = "like";
	}
}

class ANIMAL extends ENTITY {
	QUALITY quality1;
	QUALITY quality2;
}

class MAMMAL extends ANIMAL {
	
}

class WHALE extends MAMMAL {
	public WHALE() {
		label = "whale";
	}
}

class HUMAN extends ANIMAL {
	
}

class PEOPLE extends HUMAN {
	public PEOPLE() {
		label = "people";
	}
}

class KANGAROO extends ANIMAL {
	public KANGAROO() {
		label = "kangaroo";
	}
}

class SMALL extends QUALITY {
	public SMALL() {
		label = "small";
	}
}

class FRIENDLY extends QUALITY {
	public FRIENDLY() {
		label = "friendly";
	}
}

class WILD extends QUALITY {
	public WILD() {
		label = "wild";
	}
}

class DANGEROUS extends QUALITY {
	public DANGEROUS() {
		label = "dangerous";
	}
}
/////////////////////////////////////////////////////////

class S {
	public String getHead() {
		return head;
	}

	public void setHead(String head) {
		this.head = head;
	}
	
	public String getLexiconKey() {
		return lexiconKey;
	}
	
	public String getGrammarKey() {
		return grammarKey;
	}

	protected String head;
	protected String lexiconKey;
	protected String grammarKey;
	
	public S() {
		head = "";
		lexiconKey = "";
		grammarKey = "S";
	}
}

class NP extends S {
	public NP() {
		grammarKey = "NP";
	}
	
	
	String id, experiencer, _agent, attribute, patient, quality, contract;
}

class VP extends S {
	

	public VP() {
		grammarKey = "VP";
	}
	
	String id;
	TOP type;
	
	
	String attribute, location, time, degree, patient, chance;
}

class PP extends S {
	public PP() {
		grammarKey = "PP";
	}
	String link;
}

class Auxuliary extends S {
	public static Object label_id;

	public String getAux() {
		return aux;
	}

	public void setAux(String aux) {
		this.aux = aux;
	}

	private String aux;
	
	public Auxuliary() {
		lexiconKey = "Aux";
	}
}

class Adjective extends S {
	public static Object label_id;
	public Adjective() {
		lexiconKey = "Aj";
	}
}

class Adverb extends S {
	public static Object label_id;
	public Adverb() {
		lexiconKey = "Adv";
	}
}

class Pronoun extends S {
	public static Object label_id;
	public Pronoun() {
		lexiconKey = "Pro";
	}
}

class Quant extends S {
	public static Object label_id;
	public Quant() {
		lexiconKey = "Quant";
	}
}

class N extends NP {
	public N() {
		lexiconKey = "N";
	}
	TOP type;
}

class V extends VP {
	public V() {
		lexiconKey = "V";
	}
	String label_id;
}

class P extends PP {
	public P() {
		lexiconKey = "P";
	}
	String label_id;
}


public class MeanigfulSentenceRealizer {
	
	static HashMap<String, String> _Noun = new HashMap<String, String>(); 
	static HashMap<String, String> _Verb = new HashMap<String, String>(); 
	static HashMap<String, String> _Adjective = new HashMap<String, String>();
	static HashMap<String, String> _Determiner = new HashMap<String, String>();
	static HashMap<String, String> _Auxuliary = new HashMap<String, String>();
	static HashMap<String, String> _Preposition = new HashMap<String, String>();
	static HashMap<String, String> _Adverb = new HashMap<String, String>();
	static HashMap<String, String> _Pronoun = new HashMap<String, String>();
	static HashMap<String, String> _Quantifier = new HashMap<String, String>();
	
	
	
	static HashMap<String, HashMap<String, String>> _Lexicon = new HashMap<String, HashMap<String, String>>();
	
	static HashMap<String, ArrayList<String>> _Grammar = new HashMap<String, ArrayList<String>>();
	
	static HashMap<String, String> _Punctuation = new HashMap<String, String>();
	
	static final String PERIOD = "Period";
	static final String PERIOD_SYMBOL = ".";
	static final String QUESTION = "Question";
	static final String QUESTION_SYMBOL = "?";
	static final String COMMA = "Comma";
	static final String COMMA_SYMBOL = ",";
	static final String EXCLAMATION = "Exclamation";
	static final String EXCLAMATION_SYMBOL = "!";
	static final String PRESENT = "Present";
	static final String PAST = "Past";
	static final String FUTURE = "Future";
	static final String CHANCE_HIGH = "HIGH";
	static final String CHANCE_MID = "MID";
	static final String CHANCE_LOW = "LOW";
	
	static Stack<Object> sentenceStack = new Stack<Object>();
	
	static int number = -1;
	static String sentenceEndType = PERIOD;
	static boolean isPunctuationSet=false;
	static String tense = PRESENT;
	static boolean isFirstPrint=true;
	
	static HashMap<String, String> _location_ids = new HashMap<String, String>();
	
	public static void main(String[] args) {
		
		/**
		 *  Construct the Lexicon
		 */
		
		// Populate Noun, Verb & Adjective
		
		
		_location_ids.put("Australia", "Au");
		
		_Noun.put("Au", "Australia");
		_Noun.put("kangaroo", "kangaroo");
		_Noun.put("kangaroos", "kangaroos");
		_Noun.put("people", "people");
		_Noun.put("whale", "whale");
		_Noun.put("whales", "whales");
		_Noun.put("away", "away");
		
		_Verb.put("like", "like");
		_Verb.put("S2", "careful");
		_Verb.put("S3", "stay");
		_Verb.put("S4", "safe");
		_Verb.put("kill", "kill");
		
		_Preposition.put("L1", "in");
		_Preposition.put("L2", "but");
		_Preposition.put("L3", "however");
		_Preposition.put("L4", "then");
		_Preposition.put("L5", "will");
		
		_Adjective.put("Q1", "dangerous");
		_Adjective.put("Q2", "small");
		_Adjective.put("Q3", "friendly");
		_Adjective.put("Q4", "wild");
		
		_Pronoun.put("P1", "you");
		_Pronoun.put("P2", "they");
		_Pronoun.put("P3", "them");
		
		_Quantifier.put("some", "some");
		_Quantifier.put("vm", "very much");
		
		_Auxuliary.put("can", "can");
		_Auxuliary.put("may", "may");
		_Auxuliary.put("will", "will");
		_Auxuliary.put("be", "be");
		
		_Adverb.put("easily", "easily");
		
		// Add Noun, Verb & Adjective list to the Lexicon
		
		_Lexicon.put("N", _Noun);
		_Lexicon.put("V", _Verb);
		_Lexicon.put("Aj", _Adjective);
		_Lexicon.put("D", _Determiner);
		_Lexicon.put("Aux", _Auxuliary);
		_Lexicon.put("P", _Preposition);
		_Lexicon.put("Adv", _Adverb);
		_Lexicon.put("Pro", _Pronoun);
		_Lexicon.put("Quant", _Quantifier);
		
		
		
		/**
		 *  Construct the Grammar
		 */
		
		ArrayList<String> S_rule = new ArrayList<String>();
		S_rule.add("NP[head:agent] VP[head]");
		S_rule.add("NP[head:agent] VP[head] PP[head:link]");
		S_rule.add("PP[head:link] NP[head:agent] Aux[head:aux] VP[head]");
		S_rule.add("PP[head:link] NP[head:agent] VP[head]");
		S_rule.add("NP[head:agent] VP[head] Conj[head] NP[head:agent]");//!!
		S_rule.add("NP[head:agent] Aux[head:aux] VP[head]");
		S_rule.add("VP[head]");
		S_rule.add("VP[head] NP[head:agent]");
		S_rule.add("VP[head] PP[head:link] NP[head:agent]");
		
		ArrayList<String> NP_rule = new ArrayList<String>();
		NP_rule.add("N");
		NP_rule.add("Pro");
		NP_rule.add("Aj N");
		NP_rule.add("Aj Aj N");
		NP_rule.add("Aj P Pro");
		NP_rule.add("N PP");
		NP_rule.add("Quant N");
		
		
		ArrayList<String> VP_rule = new ArrayList<String>();
		VP_rule.add("V");
		VP_rule.add("V NP");
		VP_rule.add("V Adv");
		VP_rule.add("V NP Adv");
		
		ArrayList<String> PP_rule = new ArrayList<String>();
		PP_rule.add("P NP");
		
		_Grammar.put("S", S_rule);
		_Grammar.put("NP", NP_rule);
		_Grammar.put("VP", VP_rule);
		_Grammar.put("PP", PP_rule);
		
		
		// Populate punctuation symbols
		_Punctuation.put(PERIOD, PERIOD_SYMBOL);
		_Punctuation.put(QUESTION, QUESTION_SYMBOL);
		_Punctuation.put(COMMA, COMMA_SYMBOL);
		_Punctuation.put(EXCLAMATION, EXCLAMATION_SYMBOL);
		
		
		////////////////////////////////////////////////////////////////////////
		
		String filename = args[0];
		File file = new File(filename);
		String inputLine="", str;
		try {
		    
			FileInputStream fis = new FileInputStream(file);
			DataInputStream dis = new DataInputStream(fis);
			BufferedReader br = new BufferedReader(new InputStreamReader(dis));
			
			
			while((str=br.readLine())!=null) {
				inputLine = str;
			}
			
		} catch(Exception e) {
			e.printStackTrace();
		}
		
		
		// Bind the head of sentence to the head of the input
		S s= new S();
		s.setHead(inputLine);
		// Push it into the stack
		sentenceStack.push(s);
		
		Generate();
		
	}
	
	private static void Generate() {
		while(sentenceStack.size() != 0) {
			// Pop the top of stack
			S item = (S)sentenceStack.pop();
			
			// If ITEM is a lexical term, Print it
			if(!item.getLexiconKey().equals("") && _Lexicon.containsKey(item.getLexiconKey())) {
				HashMap<String, String> lexicon_map = _Lexicon.get(item.getLexiconKey());
				String wordToPrint="";
				if(item instanceof N) {
					wordToPrint = lexicon_map.get(((TOP)(((N)item).type)).label);
				}
				else if(item instanceof V) {
					wordToPrint = lexicon_map.get(((TOP)(((V)item).type)).label);
				}
				else if(item instanceof P) {
					wordToPrint = lexicon_map.get(((P)item).label_id);
				}
				else if(item instanceof Auxuliary) {
					wordToPrint = lexicon_map.get(((Auxuliary)item).label_id);
				}
				else if(item instanceof Adjective) {
					wordToPrint = lexicon_map.get(((Adjective)item).label_id);
				}
				else if(item instanceof Pronoun) {
					wordToPrint = lexicon_map.get(((Pronoun)item).label_id);
				}
				else if(item instanceof Adverb) {
					wordToPrint = lexicon_map.get(((Adverb)item).label_id);
				}
				else if(item instanceof Quant) {
					wordToPrint = lexicon_map.get(((Quant)item).label_id);
				}
				if(isFirstPrint) {
					System.out.print(wordToPrint.substring(0,1).toUpperCase() + wordToPrint.substring(1) + " ");
					isFirstPrint=false;
				}
				else {
					System.out.print(wordToPrint+" ");
				}
			}
			
			// Otherwise
			else {
				// Find an appropriate expanded rule for the ITEM from the grammar
				// (if there are alternatives, execute some selection function)
				if(item instanceof NP) {
					// Expected types: :experiencer, :agent, :contract, ENTITY
					// [NP->N]; [NP->Pro]; [NP->Aj N]; [NP->Aj Aj N]; [NP->Aj P Pro]; [NP->N PP]; [NP->Quant N];
					
					ArrayList<String> list = _Grammar.get(item.getGrammarKey());
					
					for(String rule:list) {
						//id, experiencer, _agent, attribute, patient, quality, contract
						ArrayList<String> tokens_to_be_consumed = new ArrayList<String>();
						if(((NP)item).experiencer != null && !((NP)item).experiencer.equals("")) {
							tokens_to_be_consumed.add(":experiencer");
						}
						if(((NP)item)._agent != null && !((NP)item)._agent.equals("")) {
							tokens_to_be_consumed.add(":agent");
						}
						if(((NP)item).contract != null && !((NP)item).contract.equals("")) {
							tokens_to_be_consumed.add(":contract");
						}
						int pushCount=0;
						String rule_tokens[] = rule.split(" ");
						for(int i=rule_tokens.length-1; i>=0; i--) {
							N n_obj = new N();
							if(rule_tokens[i].equals("N")) {
								if((tokens_to_be_consumed.contains(":experiencer")) || tokens_to_be_consumed.contains(":agent")) {
									
									if(((NP)item).id != null && ((NP)item).id.startsWith("Entity")) {
										
										if(((NP)item).id.equals("Entity1")) {
											n_obj.type = new HUMAN();
										}
										else if(((NP)item).id.equals("Entity2")) {
											n_obj.type = new PEOPLE();
										}
										else if(((NP)item).id.equals("Entity3")) {
											n_obj.type = new WHALE();
										}
										else if(((NP)item).id.equals("Entity4")) {
											n_obj.type = new KANGAROO();
										}
										
										if(((NP)item).quality != null) {
											// Fetch qualities from ((NP)item).quality
											String quality_id = "Q1";
											
											if(quality_id.equals("Q1")) {
												((ANIMAL)n_obj.type).quality1 = new QUALITY();
											}
										}
									}
									if(tokens_to_be_consumed.contains(":experiencer")) {
										tokens_to_be_consumed.remove(":experiencer");
									}
									else if(tokens_to_be_consumed.contains(":agent")) {
										tokens_to_be_consumed.remove(":agent");
									}
								}
							}
							
							else if(rule_tokens[i].equals("Pro")) {
								
							}
							
							else if(rule_tokens[i].equals("Aj")) {
								
							}
							
							else if(rule_tokens[i].equals("Quant")) {
								
							}
							
							//n_obj.setAgent(((NP)item).getAgent());
							sentenceStack.push(n_obj);
							pushCount++;
						}
						
						if(tokens_to_be_consumed.isEmpty()) {
							//for(String s:rule_tokens) System.out.print(s+ " ");
							break;
						}
						else {
							for(int i=0; i<pushCount; i++) {
								sentenceStack.pop();
							}
						}
					}
					
				}
				else if(item instanceof VP) {
					// [VP->V], [VP->V NP], [VP->V Adv], [VP->V NP Adv]
					// Expected: attribute, location, time, degree, patient, chance
					
					ArrayList<String> list = _Grammar.get(item.getGrammarKey());
					
					for(String rule:list) {
						ArrayList<String> tokens_to_be_consumed = new ArrayList<String>();
						if(((VP)item).attribute != null && !((VP)item).attribute.equals("")) {
							tokens_to_be_consumed.add(":attribute");
						}
						if(((VP)item).location != null && !((VP)item).location.equals("")) {
							tokens_to_be_consumed.add(":location");
						}
						if(((VP)item).time != null && !((VP)item).time.equals("")) {
							tokens_to_be_consumed.add(":time");
						}
						if(((VP)item).degree != null && !((VP)item).degree.equals("")) {
							tokens_to_be_consumed.add(":degree");
						}
						if(((VP)item).patient != null && !((VP)item).patient.equals("")) {
							tokens_to_be_consumed.add(":patient");
						}
						if(((VP)item).chance != null && !((VP)item).chance.equals("")) {
							tokens_to_be_consumed.add(":chance");
						}
						
						int pushCount=0;
						String rule_tokens[] = rule.split(" ");
						for(int i=rule_tokens.length-1; i>=0; i--) {
							if(rule_tokens[i].equals("V")) {
								V v_obj = new V();
								if(((VP)item).id.equals("S1")){
									v_obj.type = new LIKE();
									sentenceStack.push(v_obj);
									pushCount++;
								}
								if(tokens_to_be_consumed.contains(":time") ) {
									String time = ((VP)item).time.substring(((VP)item).time.indexOf(":time")+6,((VP)item).time.indexOf(")"));
									if(time.equals(PAST)) {
										tense = PAST;
									}
									else if(time.equals(FUTURE)) {
										tense = FUTURE;
									}
									tokens_to_be_consumed.remove(":time");
								}
							}
							else if(rule_tokens[i].equals("NP")) {//System.out.println("Got to");
								if(tokens_to_be_consumed.contains(":location") ) {
									/*N n_obj = new N();
									n_obj.type = new LOCATION();
									n_obj.type.label = ((VP)item).location.substring(((VP)item).location.indexOf(":location")+10, ((VP)item).location.indexOf(")"));
									sentenceStack.push(n_obj);
									pushCount++;*/
									tokens_to_be_consumed.remove(":location");
								}
								if(tokens_to_be_consumed.contains(":attribute")) {
									N n_obj = new N();
									String id = ((VP)item).attribute.substring(((VP)item).attribute.indexOf(":id")+4, ((VP)item).attribute.indexOf(")"));
									if(id.equals("Entity1")) {
										n_obj.type = new HUMAN();
									}
									else if(id.equals("Entity2")) {
										n_obj.type = new PEOPLE();
									}
									else if(id.equals("Entity3")) {
										n_obj.type = new WHALE();
									}
									else if(id.equals("Entity4")) {
										n_obj.type = new KANGAROO();
									}
									
									sentenceStack.push(n_obj);
									pushCount++;
									tokens_to_be_consumed.remove(":attribute");
								}
							}
						}
						
						if(tokens_to_be_consumed.isEmpty()) {
							break;
						}
						else {
							for(int i=0; i<pushCount; i++) {
								sentenceStack.pop();
							}
						}
					}
				}
				else if(item instanceof PP) {
					if(_Grammar.containsKey(item.getGrammarKey())) {
						ArrayList<String> list = _Grammar.get(item.getGrammarKey());
						
						String link_id = ((PP)item).link.substring(((PP)item).link.indexOf(":id")+4, ((PP)item).link.indexOf(")"));
						String loc_str = ((PP)item).link.substring(((PP)item).link.indexOf(":location")-1);
						String link_location = loc_str.substring(loc_str.indexOf(":location")+10, loc_str.indexOf(")"));
						
						String rule = list.get(0);
						String rule_tokens[] = rule.split(" ");
						for(int i=rule_tokens.length-1; i>=0; i--) {
							if(rule_tokens[i].equals("P")) {
								P p_obj = new P();
								p_obj.label_id = link_id;
								sentenceStack.push(p_obj);
							}
							else if(rule_tokens[i].equals("NP")) {
								N n_obj = new N();
								n_obj.type = new LOCATION();
								if(_location_ids.containsKey(link_location)) {
									n_obj.type.label = _location_ids.get(link_location);
								}
								sentenceStack.push(n_obj);
							}
							
						}
						
					}
				}
				else if(item instanceof S) {
					if(_Grammar.containsKey(item.getGrammarKey())) {
						ArrayList<String> list = _Grammar.get(item.getGrammarKey());
						
						for(String rule:list) {
							int pushCount=0;
							String rule_tokens[] = rule.split(" ");
							// for each portion of this rule, find the approp. portion of the input, as specified
							// in the portion (this may be a word) and bind it into the rule
							// perform any side effects (number agreement) also
							// push the list of bound portions back onto the stack
							String head_string = item.getHead();
							String toks[] = head_string.split(":* ");
							ArrayList<String> tokens_to_be_consumed = new ArrayList<String>();
							for(String ss:toks) {
								if(ss.indexOf(":") > 0) {
									tokens_to_be_consumed.add(ss.substring(ss.indexOf(":")));
								}
							}
							for(int i=rule_tokens.length-1; i>=0; i--) {
								if(rule_tokens[i].equals("NP[head:agent]")) {
									
									if(head_string.substring(head_string.indexOf(":id")+4,head_string.indexOf(")")).startsWith("Entity")) {
										// ENTITY
										// Consume :quality
										NP _np = new NP();
										if(tokens_to_be_consumed.contains(":quality")) {
											_np.quality = head_string.substring(head_string.indexOf(":quality")-1);
											tokens_to_be_consumed.remove(":quality");
											tokens_to_be_consumed.remove(":id");
											tokens_to_be_consumed.remove(":type");
										}
										_np.id = head_string.substring(head_string.indexOf(":id")+4,head_string.indexOf(")"));
										tokens_to_be_consumed.remove(":id");
										tokens_to_be_consumed.remove(":type");
										sentenceStack.push(_np);
										pushCount++;
									}
									else {
										if(tokens_to_be_consumed.contains(":experiencer")) {
											NP _np = new NP();
											_np.experiencer = head_string.substring(head_string.indexOf(":experiencer")-1);
											_np.id = _np.experiencer.substring(_np.experiencer.indexOf(":id")+4,_np.experiencer.indexOf(")"));
											tokens_to_be_consumed.remove(":experiencer");
											tokens_to_be_consumed.remove(":id");
											tokens_to_be_consumed.remove(":type");
											sentenceStack.push(_np);
											pushCount++;
										}
										else if(tokens_to_be_consumed.contains(":agent")) {
											NP _np = new NP();
											_np._agent = head_string.substring(head_string.indexOf(":agent")-1);
											_np.id = _np._agent.substring(_np._agent.indexOf(":id")+4,_np._agent.indexOf(")"));
											tokens_to_be_consumed.remove(":agent");
											tokens_to_be_consumed.remove(":id");
											tokens_to_be_consumed.remove(":type");
											sentenceStack.push(_np);
											pushCount++;
										}
										
									}
								}
								
								else if(rule_tokens[i].equals("VP[head]")) {
									
									String action = new String(head_string);
									
									String id = action.substring(action.indexOf(":id")+4,action.indexOf(")"));
									tokens_to_be_consumed.remove(":id");
									
									action = action.substring(action.indexOf(":type")-1);
									String type = action.substring(action.indexOf(":type")+6,action.indexOf(")"));
									tokens_to_be_consumed.remove(":type");
									
									VP vp = new VP();
									vp.id = id;
									
									if(id.startsWith("S")) {
										// STATE
										// Consume attribute, location, time, degree
										if(tokens_to_be_consumed.contains(":attribute")) {
											vp.attribute = head_string.substring(head_string.indexOf(":attribute")-1);
											tokens_to_be_consumed.remove(":attribute");
											tokens_to_be_consumed.remove(":id");
											tokens_to_be_consumed.remove(":type");
										}
										if(tokens_to_be_consumed.contains(":location")) {
											vp.location = head_string.substring(head_string.indexOf(":location")-1);
											tokens_to_be_consumed.remove(":location");
											tokens_to_be_consumed.remove(":id");
										}
										if(tokens_to_be_consumed.contains(":time")) {
											vp.time = head_string.substring(head_string.indexOf(":time")-1);
											tokens_to_be_consumed.remove(":time");
										}
										if(tokens_to_be_consumed.contains(":degree")) {
											vp.degree = head_string.substring(head_string.indexOf(":degree")-1);
											tokens_to_be_consumed.remove(":degree");
										}
										
									}
									else if(id.startsWith("Event")) {
										// EVENT
										// Consume patient, chance, time
										if(tokens_to_be_consumed.contains(":patient")) {
											vp.patient = head_string.substring(head_string.indexOf(":patient")-1);
											tokens_to_be_consumed.remove(":patient");
											tokens_to_be_consumed.remove(":id");
											tokens_to_be_consumed.remove(":type");
										}
										if(tokens_to_be_consumed.contains(":chance")) {
											vp.chance = head_string.substring(head_string.indexOf(":chance")-1);
											tokens_to_be_consumed.remove(":chance");
										}
										if(tokens_to_be_consumed.contains(":time")) {
											vp.time = head_string.substring(head_string.indexOf(":time")-1);
											tokens_to_be_consumed.remove(":time");
										}
									}
									
									sentenceStack.push(vp);
									pushCount++;
								}
								else if(rule_tokens[i].equals("PP[head:link]")) {
									PP pp = new PP();
									pp.link = head_string.substring(head_string.indexOf(":link")-1);
									sentenceStack.push(pp);
									pushCount++;
									tokens_to_be_consumed.remove(":link");
								}
								else if(rule_tokens[i].equals("Aux[head:aux]")) {
									Auxuliary aux = new Auxuliary();
									aux.setAux(CHANCE_MID);
									if(head_string.indexOf(":chance") > 0) {
										
									}
									sentenceStack.push(aux);
									pushCount++;
								}
							}
							
							// If all parts of the input frame has been consumed, break from the loop
							if(tokens_to_be_consumed.isEmpty()) {
								//for(String s:rule_tokens) System.out.print(s+ " ");
								break;
							}
							else {
								// Backtrack: Remove all the elements pushed into the stack
								//for(String s:tokens_to_be_consumed) System.out.print(s+ " ");
								//System.out.println();
								for(int i=0; i<pushCount; i++) {
									sentenceStack.pop();
								}
							}
						}
					}
				}
				
			}
		}
		
		System.out.println(_Punctuation.get(sentenceEndType));
	}
	
}