package parser


import . "compiler/ast"
import . "compiler/lexer"


// prefixexp ::= var | functioncall | ‘(’ exp ‘)’
// var ::=  Name | prefixexp ‘[’ exp ‘]’ | prefixexp ‘.’ Name
// functioncall ::=  prefixexp args | prefixexp ‘:’ Name args

/*
prefixexp ::= Name
	| ‘(’ exp ‘)’
	| prefixexp ‘[’ exp ‘]’
	| prefixexp ‘.’ Name
	| prefixexp [‘:’ Name] args
*/
// 前缀表达式只能以标识符或者左圆括号开始
// 所以我们前瞻一个token
// 根据情况解析出标识符或者圆括号表达式
// 然后调用_finishPrefixExp()函数完成后续解析
func parsePrefixExp(lexer *Lexer) Exp {

	var exp Exp
	if lexer.LookAhead() == TOKEN_IDENTIFIER {
		line, name := lexer.NextIdentifier()		// Name
		exp = &NameExp{line, name}
	} else {		// '(' exp ')'
		exp = parseParensExp(lexer)
	}

	return _finishPrefixExp(lexer, exp)
}


// 圆括号会改变vararg和函数调用表达式的语义
// 所以需要保留这两种语句的圆括号  --> 在多重赋值时将vararg和函数调用的返回值固定为1
// 对于var表达式，也需要保留圆括号
// 否则_checkVar()函数就会出现问题
// TODO ?
func parseParensExp(lexer *Lexer) Exp {

	lexer.NextTokenOfKind(TOKEN_SEP_LPAREN)		// (
	exp := parseExp(lexer)						// exp
	lexer.NextTokenOfKind(TOKEN_SEP_RPAREN)		// )

	switch exp.(type) {
	case *VarargExp, *FuncCallExp, *NameExp, *TableAccessExp:
		return &ParensExp{exp}
	}

	// no need to keep parens
	return exp
}


func _finishPrefixExp(lexer *Lexer, exp Exp) Exp {

	for {
		switch lexer.LookAhead() {
		case TOKEN_SEP_LBRACK:		//prefixexp '[' exp ']'
			lexer.NextToken()			// '['
			keyExp := parseExp(lexer)	// exp
			lexer.NextTokenOfKind(TOKEN_SEP_RBRACK)		// ']'
			exp = &TableAccessExp{lexer.Line(), exp, keyExp}
		case TOKEN_SEP_DOT:			// prefixexp '.' Name
			lexer.NextToken()		// '.'
			line, name := lexer.NextIdentifier()		// Name
			keyExp := &StringExp{line, name}
			exp = &TableAccessExp{line, exp, keyExp}
		case TOKEN_SEP_COLON,		// prefixexp ':' Name args
			// obj:func(), func string, func table, func() 都是合法的
			TOKEN_SEP_LPAREN, TOKEN_SEP_LCURLY, TOKEN_STRING:		// prefixexp args
			exp = _finishFuncCallExp(lexer, exp)
		default:
			return exp
		}
	}

	return exp
}


// functioncall ::=  prefixexp args | prefixexp ‘:’ Name args
func _finishFuncCallExp(lexer *Lexer, prefixExp Exp) *FuncCallExp {

	nameExp := _parseNameExp(lexer)
	line := lexer.Line()		// TODO
	args := _parseArgs(lexer)
	lastLine := lexer.Line()

	return &FuncCallExp{line, lastLine, prefixExp, nameExp, args}
}


// ':' Name
func _parseNameExp(lexer *Lexer) *StringExp {

	if lexer.LookAhead() == TOKEN_SEP_COLON {
		lexer.NextToken()		// :
		line, name := lexer.NextIdentifier()
		return &StringExp{line, name}
	}
	return nil
}


// args ::=  ‘(’ [explist] ‘)’ | tableconstructor | LiteralString
func _parseArgs(lexer *Lexer) (args []Exp) {
	switch lexer.LookAhead() {
	case TOKEN_SEP_LPAREN:	// '(' [explist] ')'
		lexer.NextToken()		// TOKEN_SEP_LPAREN
		if lexer.LookAhead() != TOKEN_SEP_RPAREN {
			args = parseExpList(lexer)
		}
		lexer.NextTokenOfKind(TOKEN_SEP_RPAREN)

	case TOKEN_SEP_LCURLY:	// '{' [fieldlist] '}'
		args = []Exp{parseTableConstructorExp(lexer)}

	default:		// LiteralString
		line, str := lexer.NextTokenOfKind(TOKEN_STRING)
		args = []Exp{&StringExp{line, str}}
	}

	return
}