package project.parser;

import project.parser.ast.Prog;
import project.parser.ast.ProgAST;
import project.parser.ast.aux.*;
import project.parser.ast.exps.*;
import project.parser.ast.exps.op.binary.*;
import project.parser.ast.exps.op.unary.Decr;
import project.parser.ast.exps.op.unary.Inc;
import project.parser.ast.exps.op.unary.Minus;
import project.parser.ast.exps.op.unary.Not;
import project.parser.ast.literals.BoolLiteral;
import project.parser.ast.literals.IntLiteral;
import project.parser.ast.stats.*;
import project.parser.ast.types.PrimType;
import project.parser.ast.types.RecordType;
import project.parser.ast.types.Type;
import project.parser.tokenizer.TokenType;
import project.parser.tokenizer.Tokenizer;

import java.util.*;

import static project.parser.tokenizer.TokenType.*;

public class Parser {

	private final Tokenizer tok;

	public Parser(Tokenizer tokenizer) {
		tok = tokenizer;
		tok.next();
	}

	public Prog parseProg() {
		return new ProgAST(parseFuncs(), parseStats(EOF)); //prima parse funzioni
	}

	private Funcs parseFuncs() {
		final List<FuncDec> funcs = new LinkedList<>();
		Ident id;
		Params params;
		Stats stats;
		Exp retExp;
		while (tok.tokenType() == FUNCTION) {
			consume(FUNCTION);
			id = parseIdentName();
			consume(OPEN_PAR);
			params = parseParams();
			consume(CLOSED_PAR);
			consume(OPEN_CURLY);
			stats = parseStats(RETURN);
			consume(RETURN);
			retExp = parseExp();
			try {
				consume(SEMI_COL);
			} catch (ParseException e) {
			}
			consume(CLOSED_CURLY);
			funcs.add(new FuncDec(id, params, stats, retExp));
		}
		return new Funcs(funcs);
	}

	private Stats parseStats(TokenType terminatorType) {
		final List<Stat> stats = new LinkedList<>();
		while (tok.tokenType() != terminatorType) {
			stats.add(parseStat());
		}
		return new Stats(stats);
	}

	private List<Exp> parseExps(TokenType delimiter) { //parse di liste di espressioni
		final List<Exp> exps = new LinkedList<>();
		do {
			exps.add(parseExp());
		} while (tok.tokenType() == delimiter && consume(delimiter));
		return exps;
	}

	private Stat parseStat() {
		switch (tok.tokenType()) {
			default:
				return parseNonVarStat();
			case DEC:
				return parseDec();
		}
	}

	private Dec parseDec() {
		tok.next();
		Ident id = parseIdentName();
		consume(ASSGN);
		Exp exp = parseExp();
		consume(SEMI_COL);
		return new DecAST(id, exp);
	}

	private Stat parseNonVarStat() {
		switch (tok.tokenType()) {
			default:
				return parseExpStat();
			case IF:
				return parseIf();
			case WHILE:
				return parseWhile();
			case OPEN_CURLY:
				return parseBlock();
			case PRINT:
				return parsePrint();
		}
	}

	private Stat parseIf() {
		consume(IF);
		consume(OPEN_PAR);
		Exp cond = parseExp();
		consume(CLOSED_PAR);
		Stat trueStat = parseNonVarStat();
		try {
			consume(ELSE);
		} catch (ParseException e) {
			return new IfThenElse(cond, trueStat);
		}
		return new IfThenElse(cond, trueStat, parseNonVarStat());
	}

	private Stat parseWhile() {
		consume(WHILE);
		consume(OPEN_PAR);
		Exp cond = parseExp();
		consume(CLOSED_PAR);
		Stat body = parseNonVarStat();
		return new While(cond, body);
	}

	private Params parseParams() {
		List<Param> params = new LinkedList<>();
		Type type;
		Ident id;
		if (tok.tokenType() != CLOSED_PAR && tok.tokenType() != CLOSED_CURLY)
			do {
				type = parseType();
				id = parseIdentName();
				params.add(new Param(type, id));
			} while (tok.tokenType() == COMMA && consume(COMMA));
		return new Params(params);
	}

	private Args parseArgs() {
		consume(OPEN_PAR);
		if (tok.tokenType() == CLOSED_PAR) {
			consume(CLOSED_PAR);
			return new Args(null);
		}
		else {
			Args args = new Args(parseExps(COMMA));
			consume(CLOSED_PAR);
			return args;
		}
	}

	private Stat parsePrint() {
		tok.next();
		Args args = parseArgs();
		consume(SEMI_COL);
		return new Print(args);
	}

	private Stat parseBlock() {
		tok.next();
		Stats stats = new Stats(null);
		if (tok.tokenType() != CLOSED_CURLY)
			stats = parseStats(CLOSED_CURLY);
		tok.next();
		return new Block(stats);
	}

	private Stat parseExpStat() { //converte Exp in Stat
		ExpStat expStat = new ExpStat(parseExp());
		try { //consuma e controlla ';' alla fine di ogni espressione
			consume(SEMI_COL);
		} catch (ParseException e) {
			if (!expStat.isNull()) //se l'espressione è nulla il ';' e' opzionale
				throw e;
		}
		return expStat;
	}

	private Type parseType() {
		TokenType tt = tok.tokenType();
		tok.next();
		switch (tt) {
			default:
				fatalError();
			case NUMBER_TYPE:
				return PrimType.NUMBER_TYPE;
			case BOOLEAN_TYPE:
				return PrimType.BOOLEAN_TYPE;
			case OPEN_CURLY:
				return parseRecordType();
		}
	}

	private RecordType parseRecordType() {
		Params params = parseParams(); //parse come parametri
		Map<Ident,Type> typesMap = new HashMap<>();
		for (Param param : params) //poi convertiti in una Map
			typesMap.put(param.getId(), param.getType());
		consume(CLOSED_CURLY);
		return new RecordType(typesMap);
	}

	private Exp parseExp() {
		Exp result = parseOr();
		if (tok.tokenType() == ASSGN) {
			tok.next();
			Stack<Exp> assignStack = new Stack<>();
			assignStack.push(result);
			Exp aux = parseOr();
			while (tok.tokenType() == ASSGN) {
				assignStack.push(aux);
				tok.next();
				aux = parseOr();
			}
			result = aux;
			while (!assignStack.isEmpty())
				result = new Assign(assignStack.pop(), result);
		}
		return result;
	}

	private Exp parseOr() {
		Exp result = parseAnd();
		while (tok.tokenType() == OR) {
			tok.next();
			result = new Or(result, parseAnd());
		}
		return result;
	}

	private Exp parseAnd() {
		Exp result = parseEqual();
		while (tok.tokenType() == AND) {
			tok.next();
			result = new And(result, parseEqual());
		}
		return result;
	}

	private Exp buildEqualExp(TokenType tt, Exp left, Exp right) {
		switch (tt) {
			default:
				fatalError();
			case EQ:
				return new Equal(left, right);
			case UNEQ:
				return new Unequal(left, right);
		}
	}

	private Exp parseEqual() {
		Exp result = parseComp();
		TokenType tt;
		while (EQUAL_GROUP.contains(tt = tok.tokenType())) {
			tok.next();
			result = buildEqualExp(tt, result, parseComp());
		}
		return result;
	}

	private Exp buildCompExp(TokenType tt, Exp left, Exp right) {
		switch (tt) {
			default:
				fatalError();
			case GTH:
				return new Gth(left, right);
			case GEQ:
				return new Geq(left, right);
			case LTH:
				return new Lth(left, right);
			case LEQ:
				return new Leq(left, right);
		}
	}

	private Exp parseComp() {
		Exp result = parseAdd();
		TokenType tt;
		while (COMP_GROUP.contains(tt = tok.tokenType())) {
			tok.next();
			result = buildCompExp(tt, result, parseAdd());
		}
		return result;
	}

	private Exp buildSumExp(TokenType tt, Exp left, Exp right) {
		switch (tt) {
			default:
				fatalError();
			case ADD:
				return new Add(left, right);
			case SUB:
				return new Sub(left, right);
		}
	}

	private Exp parseAdd() {
		Exp result = parseMul();
		TokenType tt;
		while (ADD_GROUP.contains(tt = tok.tokenType())) {
			tok.next();
			result = buildSumExp(tt, result, parseMul());
		}
		return result;
	}

	private Exp buildMulExp(TokenType tt, Exp left, Exp right) {
		switch (tt) {
			default:
				fatalError();
			case MUL:
				return new Mul(left, right);
			case DIV:
				return new Div(left, right);
			case MOD:
				return new Mod(left, right);
		}
	}

	private Exp parseMul() {
		Exp result = parseDot();
		TokenType tt;
		while (MUL_GROUP.contains(tt = tok.tokenType())) {
			tok.next();
			result = buildMulExp(tt, result, parseDot());
		}
		return result;
	}

	private Exp parseDot() {
		Exp result = parseAtom();
		if (tok.tokenType() == DOT) {
			consume(DOT);
			result = parseFieldAccess(result);
		}
		return result;
	}

	private Exp parseAtom() {
		switch (tok.tokenType()) {
			default:
				//se trova un fine file o un punto e virgola torna null (espressione vuota), altrimenti lancia un'eccezione
				if (tok.tokenType() == EOF)
					return null;
				if (tok.tokenType() == SEMI_COL) {
					consume(SEMI_COL);
					return null;
				}
				unexpectedTokenError(tok.tokenString());
			case NUM:
				return parseNum();
			case BOOL:
				return parseBool();
			case INC:
			case DECR:
			case NOT:
			case SUB:
				return parseUnaryPre();
			case IDENT:
				return parseUnaryPost();
			case OPEN_CURLY:
				tok.next();
				final Exp rec = parseFields();
				consume(CLOSED_CURLY);
				return rec;
			case OPEN_PAR:
				tok.next(); // synchronizes the tokenizer
				final Exp res = parseExp();
				consume(CLOSED_PAR);
				return res;
		}
	}

	private Exp parseFields() { // parse dei campi
		final List<Field> fields = new LinkedList<>();
		Set<Ident> idNames = new HashSet<>(); //insieme per memorizzare i nomi dei campi, serve per lanciare eccezione se c'e' un nome doppio
		Ident currId;
		Field currField;
		while (true) {
			if (tok.tokenType() == CLOSED_CURLY)
				break;
			if (tok.tokenType() != IDENT) //se il primo token non e' un ident lancia eccezione
				unexpectedTokenError(tok.tokenString());
			currId = parseIdentName();
			if (!idNames.add(currId)) // se e' gia' presente l'id corrente in questo record, lancia eccezione
				throw new ParseException("Field '" + currId.getName() + "' already exists");
			consume(COLON);
			currField = new FieldAST(currId, parseExp()); //crea il nuovo campo
			fields.add(currField); //e lo aggiunge alla lista
			try {
				consume(COMMA);
			} catch (ParseException e) {
				break;
			}
		}
		return new Fields(fields);
	}

	// pre-condition: initial token already consumed

	private IntLiteral parseNum() {
		final IntLiteral result = new IntLiteral(tok.intValue());
		tok.next();
		return result;
	}

	// pre-condition: initial token already consumed

	private BoolLiteral parseBool() {
		final BoolLiteral result = new BoolLiteral(tok.boolValue());
		tok.next();
		return result;
	}

	// pre-condition: initial token already consumed

	private Exp parseUnaryPre() {
		TokenType tt = tok.tokenType();
		tok.next();
		switch (tt) {
			default:
				fatalError();
			case NOT:
				return new Not(parseDot());
			case SUB:
				return new Minus(parseDot());
			case INC:
				return new Inc(parseDot(), true); //pre-incremento
			case DECR:
				return new Decr(parseDot(), true); //pre-decremento
		}
	}

	// pre-condition: initial token already consumed

	private Exp parseUnaryPost() { //parsing dell'id ed eventuale post-operazione o assegnamento
		final Exp id = parseIdentName();
		switch (tok.tokenType()) {
			case INC:
				consume(INC);
				return new Inc(id, false); //post-incremento
			case DECR:
				consume(DECR);
				return new Decr(id, false); //post-decremento
			case OPEN_PAR:
				return new FuncCall((Ident)id, parseArgs());
		}
		return id; //nessuna post-operazione, ritorno Ident
	}

	private Exp parseFieldAccess(Exp expRec) { //parsing dell'accesso a un campo di un record
		Ident fieldId = parseIdentName();
		Exp result = new FieldAccess(expRec, fieldId);
		while (tok.tokenType() == DOT) {
			consume(DOT);
			fieldId = parseIdentName();
			result = new FieldAccess(result, fieldId);
		}
		if (tok.tokenType() == INC) {
			consume(INC);
			return new Inc(result, false);
		}
		if (tok.tokenType() == DECR) {
			consume(DECR);
			return new Decr(result, false);
		}
		return result;
	}

	private Ident parseIdentName() { //parsing del solo nome dell'id
		final Ident result = new Ident(tok.tokenString());
		tok.next();
		return result;
	}

	private void unexpectedTokenError(String token) {
		throw new ParseException("Unexpected token \'" + token + '\'');
	}

	private void fatalError() {
		throw new ParseException("Fatal error");
	}

	private void match(TokenType expected) {
		final TokenType found = tok.tokenType();
		if (found != expected)
			throw new ParseException("Expecting \'" + expected + "\', found \'"
					+ found + '\'');
	}

	private boolean consume(TokenType expected) {
		match(expected);
		tok.next();
		return true; //aggiunto tipo di ritorno booleano (true) per poter usare il metodo all'interno di espressioni booleane
	}
}
