package warthog.parsegen;

import java.util.*;
import java.util.Map.Entry;
import java.io.*;

import warthog.cradle.JavaClass;

public class Generator {
	
	// A parser generator must first be able to track a bunch of symbols.
	final Map<String, Sym> symbols = new HashMap<String, Sym>();
	final Sym endSymbol = sym(null);
	private Sym startSymbol = null;
	public void setStart(String start) { this.startSymbol = sym(start); }
	public Sym getStart() { return startSymbol; }
	public Sym sym(String name) {
		if (!symbols.containsKey(name)) symbols.put(name, new Sym(name)); 
		return symbols.get(name);
	}
	
	
	// It must also track the production rules that have been entered. 
	final List<Rule> rules = new ArrayList<Rule>();
	private Rule ruleForStep(int i) {
		return rules.get(-1-i);
	}
	// There are N ways to get a production rule into the system.
	// They all delegate to this first routine.
	public Rule rule(Sym head, Sym[] rhs) {
		// The first rule given conventionally names the start symbol.
		if (rules.isEmpty() && startSymbol==null) startSymbol=head;
		// The precedence of a rule will be the highest level of any symbol in its RHS.
		int precedence = 0;
		for (Sym s:rhs) if (s.precedence>precedence) precedence = s.precedence;
		
		Rule aRule = new Rule(rules.size(), head, rhs, precedence);
		rules.add(aRule);
		return aRule;
	}
	public Rule rule(String lhs, Collection<String> rhs) {
		Sym L = sym(lhs);
		Sym[] R = new Sym[rhs.size()];
		int i=0;
		for (String t:rhs) R[i++] = sym(t);
		return this.rule(L, R);
	}
	public Rule rule(String lhs, String[] rhs) {
		Sym L = sym(lhs);
		Sym[] R = new Sym[rhs.length];
		for (int i=0; i<rhs.length; i++) R[i] = sym(rhs[i]);
		return this.rule(L, R);
	}
	public Rule rule(String lhs, String rhs) {
		return this.rule(lhs, split(rhs));
	}
	public Rule rule(String lhs, List<String> rhs) {
		return this.rule(lhs, rhs.toArray(array_of_strings));
	}
	
	/////  A section on dealing with precedence levels sensibly for precedence parsing.
	List<Assoc> precLevels = new ArrayList<Assoc>();
	{
		precLevels.add(Assoc.RIGHT); // This makes the default (empty) prec level prefer shifts.
	}
	public void assoc(Assoc assoc, Collection<String> tokenNames) {
		int pLevel = precLevels.size();
		precLevels.add(assoc);
		for (String s: tokenNames) {
			sym(s).setPrecedence(pLevel);
		}
	}
	Assoc decideShiftReduce(Rule rule, Sym look) {
		int ruleLevel = rule.precedence;
		int shiftLevel = look.precedence;
		if (ruleLevel > shiftLevel) return Assoc.LEFT;
		if (ruleLevel < shiftLevel) return Assoc.RIGHT;
		if (ruleLevel == 0) {
			System.err.println("Shift-reduce conflict:");
			System.err.println(rule);
			System.err.println("with lookahead "+look.toString());
		}
		return precLevels.get(ruleLevel);
	}
		
	/// Reduce/Reduce conflicts are not presently dealt with. They are
	/// fairly rare in useful grammars anyway, so we treat them as
	/// unrecoverable errors.
	private static void RR_Conflict(Rule a, Rule b, Sym look) {
		System.err.printf("Reduce-reduce conflict:%n");
		System.err.println(a);
		System.err.println(b);
		System.err.println("with lookahead "+look.toString());
		System.exit(1);
	}
	
	// At some point, we'll have to work out which symbols produce
	// the empty string.
	private void findLambda() {
		boolean change;
		do {
			change = false;
			for (Sym s:symbols.values()) change = change || s.check_lambda();
		} while (change);
	}
	
	/// Here begins the main algorithm.
	Map<Set<Dot>, State> stateMap = new HashMap<Set<Dot>, State>();
	List<State> stateList = new ArrayList<State>();
	public void build() {
		inferTypes();
		findLambda();
		// Having lambda, the (naive) first sets for symbols are
		// fairly easy to calculate. That being said, I am not convinced
		// it would save any work over doing it the present way.
		final Collection<Dot> core = startSymbol.front();
		final State qI = findState(new HashSet<Dot>(core));
		assert qI.getID()==0;
		for (Dot d:core) qI.items.get(d).FOLLOW.add_terminal(endSymbol);
		for (State s: stateList) s.checkUnit();
		for (State s: stateList) s.buildLinks();
		State qF = qI.dig(startSymbol);
		qF.edge.put(endSymbol, qF);
		Relay.pump();
		for (State s: stateList) s.prepareActions();
	}
	private void inferTypes() {
		// Infer semantic classes of non-terminals from rules, where possible.
		boolean change;
		do {
			change = false;
			for (Rule r:rules) change = change || r.inferType();
		} while (change);
	}
	private State findState(Set<Dot> core) {
		if (!stateMap.containsKey(core)) {
			State q = new State(core);
			stateMap.put(core, q);
			q.buildShifts();	// Thus, this is a depth-first exploration of the state space.
		}
		return stateMap.get(core);
	}
	class State {
		final Set<Dot> core;
		private Map<Sym, State> edge = new HashMap<Sym, State>();
		private Map<Dot, Item> items = new HashMap<Dot, Item>();
		Map<Sym, Integer> action = new HashMap<Sym, Integer>();
		Sym silliness = null;
		private int _id;
		public State(Set<Dot> core) {
			stateList.add(this);
			_id = stateList.size();
			this.core = core;
			// In theory, I could build the vocabulary (mapping shifts to cores)
			// right here in the constructor.
		}
		public State dig(Sym label) {
			if (!edge.containsKey(label)) edge.put(label, new State(null));
			return edge.get(label);
		}
		public void checkUnit() {
			if (!edge.isEmpty()) return;	// Only leaf nodes can be "silly". 
			Iterator<Dot> dots = items.keySet().iterator();
			Dot d = dots.next();
			Sym LHS = d.rule.unitness();
			if (LHS==null) return;
			while (dots.hasNext()) {
				d = dots.next();
				if (LHS != d.rule.unitness()) return;
			}
			silliness = LHS;
		}
		void buildShifts() {
			Map<Sym, Set<Dot>> vocab = new TreeMap<Sym, Set<Dot>>();
			// Use a TreeMap instead of a HashMap so that we get a stable DFA.
			// It follows from processing the symbols of the state's
			// "vocabulary" in the same order every time. 
			Queue<Dot> work = new ArrayDeque<Dot>(core);
			Dot d;
			while (null != (d = work.poll())) {
				if (items.containsKey(d)) continue;
				Item anItem = new Item();
				items.put(d, anItem);

				Sym head = d.head();
				if (null == head) continue;

				if (head.term) anItem.FIRST.add_terminal(head);
				else work.addAll(head.front());

				if (!vocab.containsKey(head)) vocab.put(head, new HashSet<Dot>());
				vocab.get(head).add(d.tail());
			}
			// Convert vocabulary into states
			for (Entry<Sym, Set<Dot>> e:vocab.entrySet()) {
				edge.put(e.getKey(), findState(e.getValue()));
			}
		}
		void buildLinks() {
			for (Entry<Dot, Item> e:items.entrySet()) {
				Dot dot = e.getKey();
				Item item = e.getValue();
				// recognizing dots have a null head.
				if (dot.atEnd()) {
					// This is an accepting item, and the first/follow sets must
					// be as one.
					item.FOLLOW.informs(item.FIRST);
				} else {
					Sym label = dot.head();
					State target = edge.get(label);
					// in any event, the FOLLOW sets must flow:
					Item tail = target.items.get(dot.tail());
					item.FOLLOW.informs(tail.FOLLOW);
					if (label.term) {
						item.FIRST.add_terminal(label);
					} else {
						// Nonterminal, now.
						if (label.lambda) tail.FIRST.informs(item.FIRST);
						for (Dot dEpsilon: label.front()) {
							Item iEpsilon = items.get(dEpsilon);
							iEpsilon.FIRST.informs(item.FIRST);
							tail.FIRST.informs(iEpsilon.FOLLOW);
						}
					}
				}
			}
		}
		void prepareActions() {
			// Create reduce instructions:
			for (Entry<Dot, Item> e:items.entrySet()) {
				Dot dot = e.getKey();
				Item item = e.getValue();
				if (dot.atEnd()) {
					// We have a reducing dot
					for (Sym look:item.FOLLOW.tokens) addReduce(look, dot.rule);
				}
			}
			
			// Create shift instructions:
			for (Entry<Sym, State> e:edge.entrySet()) {
				Sym label = e.getKey();
				State target = e.getValue();
				Sym unit;
				while ((unit = target.silliness) != null) target = edge.get(unit); // Cycles can't exist.
				if (action.containsKey(label)) {
					Rule rule = ruleForStep(action.get(label));
					switch(decideShiftReduce(rule, label)) {
					case LEFT: break; // Reduce
					case RIGHT: action.put(label, target.getID()); break; // Shift
					case NON: action.put(label, 0); break; // Error
					}
				} else action.put(label, target.getID());
			}
		}
		int getID() {
			return _id;
		}
		private void addReduce(Sym look, Rule rule) {
			Integer s = action.get(look);
			if (s==null) action.put(look, rule.number());
			else RR_Conflict(ruleForStep(s), rule, look);
		}
		Integer[] actMap(Sym[] ss) {
			Integer[] a = new Integer[ss.length];
			for (int i=0;i<ss.length;i++) a[i] = action.get(ss[i]);
			return a;
		}
	}
	private static class Relay {
		Set<Sym> tokens = new HashSet<Sym>();
		Set<Relay> _informs = new HashSet<Relay>();

		static Set<Relay> plumbing = new HashSet<Relay>();
		static void pump() {
			while (!plumbing.isEmpty()) {
				List<Relay> work = new ArrayList<Relay>(plumbing);
				plumbing.clear();
				for (Relay r:work) r.pumpTokens();
			}
		}
		public void add_terminal(Sym s) {
			if (tokens.add(s)) {
				//System.out.println("-("+s.name+")-");
				plumbing.add(this);
			}
		}
		public void informs(Relay that) {
			_informs.add(that);
		}
		private void pumpTokens() {
			for (Relay r: _informs) r.add_many(tokens);
		}
		private void add_many(Set<Sym> them) {
			if (tokens.addAll(them)) plumbing.add(this);
		}
	}
	private static class Item {
		public Relay FIRST = new Relay();
		public Relay FOLLOW = new Relay();
	}

	// A utility function....
	static ArrayList<String> split(String str) {
		Scanner s = new Scanner(str);
		ArrayList<String> al = new ArrayList<String>();
		while (s.hasNext()) al.add(s.next());
		s.close();
		return al;
	}
	static String[] array_of_strings = new String[0];

	private JavaClass cw;
	// Code generators
	private void makeTables(GeneratorData data) {
		data.prepareSymbols(symbols.values());
		data.writeStaticFinalFields(cw);
		// Generate semantic check field for each terminal.
		data.makeSemanticCheckArray(cw);
		// Cook up the DFA table.
//		cw.codeln("private int yyAct(int type) { return yyACT[yyq][type]; }");
//		cw.codeln("private int yyGo(int type) { return yyGO[yyq][type]; }");
//		Integer[][] acts = new Integer[stateList.size()][];
		Integer[][] gos = new Integer[stateList.size()+1][];
		CompressedTable ct = new CompressedTable(stateList.size()+1, data.nonterms.length);
		for (State q:stateList) {
//			acts[stateID] = q.actMap(data.terms);
			Integer[] act = q.actMap(data.terms);
//			cw.code("// "+q._id+":  ");  // Uncomment these lines if you
//			cw.code(act);                // want to see a Cartesian action
//			cw.codeln();                 // table commented into your parser  
			ct.addActionRow(q._id, act);
			gos[q._id]  = q.actMap(data.nonterms);
		}
		for (int i=0; i<data.nonterms.length; i++) {
			Integer[] go = new Integer[stateList.size()+1];
			for (State q:stateList) {
				go[q._id] = q.action.get(data.nonterms[i]);
			}
			ct.addGotoColumn(i, go);
		}
		ct.writeCode(cw);
//		cw.codeln("private static final int[][] yyACT = {");
//		int state=0;
//		for (Integer[] act:acts) { cw.code("\t\t"); cw.code(act); cw.codeln(", // "+(state++)); }
//		cw.codeln("};");
//		cw.codeln("private static final int[][] yyGO = {");
//		boolean bol=true;
//		for (Integer[] go:gos) {
//			if (bol) cw.code("\t\t");
//			if (allNull(go)) { cw.code("null, "); bol = false; }
//			else {
//				if (!bol) cw.code("\n\t\t");
//				cw.code(go);
//				cw.codeln(",");
//				bol=true;
//			}
//		}
//		cw.codeln("};");
	}
//	private static boolean allNull(Integer[] is) {
//		if (null == is) return true;
//		for (Integer i:is) if (null!=i) return false;
//		return true;
//	}
	private void makeReduceArray(List<Rule> rules) {
		cw.code("private static final int[] yyLHS = {0,");
		for (Rule r:rules) cw.code(Integer.toString(r.head.id)+", ");
		cw.codeln("};");
		
		cw.codeln("private final YYR[] yyRules = {null,");
		for (Rule r:rules) {
			cw.codeln();
			cw.codeln("// rule "+r.number()+": "+r.toString());
			cw.codeln("new YYR() { public void call(){");
			r.reduceCode(cw);
			cw.codeln("}},");
		}
		cw.codeln("};");
	}
	private void writeBoilerPlate() {
		cw.codeln("	private int yyq, yysp;");
		cw.codeln("	private int[] yyqs;");
		cw.codeln("	public void reset(){ this.reset(256); }");
		cw.codeln("	private void yyShift(int qN){yyq=qN;yyqs[++yysp]=yyq;}");
		cw.codeln("	private int yyfindShift(int type) throws Error {");
		cw.codeln("		int i = yyAct(type);");
		cw.codeln("		while (i<0) { yyReduce(-i); i = yyAct(type); }");
		cw.codeln("		if (0 == i) throw new Error();");
		cw.codeln("		return i;");
		cw.codeln("	}");
		cw.codeln("	private void yyReduce(int rn) {");
		cw.codeln("		yyRules[rn].call();");
		cw.codeln("		yyShift(yyGo(yyLHS[rn]));");
		cw.codeln("	}");
		cw.codeln("	private void yy_wants(int type, int sc) {");
		cw.codeln("		if(yy_sc[type]!=sc) throw new IllegalArgumentException(\"Wrong type of semantic value for given token.\");");
		cw.codeln("	}");
		cw.codeln("	{reset();}");
	}
	private void writeSemanticParts() {
		Set<SemanticClass> classes = new HashSet<SemanticClass>();
		for (Sym s:symbols.values()) classes.add(s.semanticClass);
		for (SemanticClass sc:classes) cw.codeln("\t"+sc.yyStackDecl());
		
		cw.codeln("	public void reset(int size) {");
		cw.codeln("		yyqs=new int[size]; yyqs[0]=yyq=1; yysp=0;");
		for (SemanticClass sc:classes) cw.codeln("\t\t"+sc.yyStackReset());
		cw.codeln("	}");
		for (SemanticClass sc:classes) sc.writeTake(cw);
		startSymbol.semanticClass.writeFinish(cw);
	}
	public void writeAllCode(JavaClass javaClass, GeneratorData data) throws IOException {
		cw = javaClass;
		javaClass.codeln("@SuppressWarnings(\"serial\")");
		javaClass.codeln("public static class Error extends Exception {}");
		javaClass.codeln("private abstract class YYR { abstract void call(); }");
		makeTables(data);
		makeReduceArray(rules);
		writeSemanticParts();
		writeBoilerPlate();
	}
}
