package parser


import . "compiler/ast"
import . "compiler/lexer"


var _statEmpty = &EmptyStat{}		// 空语句使用一个变量，节约空间


/*
stat ::=  ‘;’
	| break
	| ‘::’ Name ‘::’
	| goto Name
	| do block end
	| while exp do block end
	| repeat block until exp
	| if exp then block {elseif exp then block} [else block] end
	| for Name ‘=’ exp ‘,’ exp [‘,’ exp] do block end
	| for namelist in explist do block end
	| function funcname funcbody
	| local function Name funcbody
	| local namelist [‘=’ explist]
	| varlist ‘=’ explist
	| functioncall
*/

// 通过前瞻一个token，可以锁定前13条语句
// 对于局部变量声明和局部函数定义，需要前瞻2个token才能确定要解析哪一种
// 对于for循环语句，则需要前瞻3个token才能确定要解析数值for循环还是通用for循环
// 对于函数调用和赋值，无论前瞻多少个token(其有共同的递归子结构)都无法确定，只能另想办法
func parseStat(lexer *Lexer) Stat {

	switch lexer.LookAhead() {

		case TOKEN_SEP_SEMI:	return parseEmptyStat(lexer)

		case TOKEN_KW_BREAK:	return parseBreakStat(lexer)

		case TOKEN_SEP_LABEL:	return parseLabelStat(lexer)

		case TOKEN_KW_GOTO:		return parseGotoStat(lexer)

		case TOKEN_KW_DO:		return parseDoStat(lexer)

		case TOKEN_KW_WHILE:	return parseWhileStat(lexer)

		case TOKEN_KW_REPEAT:	return parseRepeatStat(lexer)

		case TOKEN_KW_IF:		return parseIfStat(lexer)

		case TOKEN_KW_FOR:		return parseForStat(lexer)

		case TOKEN_KW_FUNCTION:	return parseFuncDefStat(lexer)

		case TOKEN_KW_LOCAL:	return parseLocalAssignOrFuncDefStat(lexer)

		default:				return parseAssignOrFuncCallStat(lexer)
	}
}


// 对于空语句，跳过分号即可
// ;
func parseEmptyStat(lexer *Lexer) *EmptyStat {

	lexer.NextTokenOfKind(TOKEN_SEP_SEMI)		// ;

	return _statEmpty
}


// 对于break语句，跳过关键字并记录行号即可
// break
func parseBreakStat(lexer *Lexer) *BreakStat {

	lexer.NextTokenOfKind(TOKEN_KW_BREAK)

	return &BreakStat{lexer.Line()}
}


// 对于label语句，跳过分隔符并记录标签名即可
// ‘::’ Name ‘::’
func parseLabelStat(lexer *Lexer) *LabelStat {

	lexer.NextTokenOfKind(TOKEN_SEP_LABEL)		// ::

	_, name := lexer.NextIdentifier()			// Name

	lexer.NextTokenOfKind(TOKEN_SEP_LABEL)		// ::

	return &LabelStat{name}
}

// 对于goto语句，跳过关键字并记录标签名即可
// goto Name
func parseGotoStat(lexer *Lexer) *GotoStat {

	lexer.NextTokenOfKind(TOKEN_KW_GOTO)	// goto

	_, name := lexer.NextIdentifier()		// Name

	return &GotoStat{name}
}


// 对于do语句，先跳过关键字do，然后调用parseBlock()函数解析块
// 最后跳过关键字end
// do block end
func parseDoStat(lexer *Lexer) *DoStat {

	lexer.NextTokenOfKind(TOKEN_KW_DO)		// do

	block := parseBlock(lexer)			// block

	lexer.NextTokenOfKind(TOKEN_KW_END)		// end

	return &DoStat{block}
}


// while语句解析
// while exp do block end
func parseWhileStat(lexer *Lexer) *WhileStat {

	lexer.NextTokenOfKind(TOKEN_KW_WHILE)		// while

	exp := parseExp(lexer)			// exp
	lexer.NextTokenOfKind(TOKEN_KW_DO)    // do
	block := parseBlock(lexer)		// block

	lexer.NextTokenOfKind(TOKEN_KW_END)			// end

	return &WhileStat{exp, block}
}


// repeat语句解析
// repeat block until exp
func parseRepeatStat(lexer *Lexer) *RepeatStat {

	lexer.NextTokenOfKind(TOKEN_KW_REPEAT)		// repeat

	block := parseBlock(lexer)			// block

	lexer.NextTokenOfKind(TOKEN_KW_UNTIL)		// until

	exp := parseExp(lexer)

	return &RepeatStat{block, exp}
}


// if exp then block {elseif exp then block} [else block] end
func parseIfStat(lexer *Lexer) *IfStat {

	exps := make([]Exp, 0, 4)
	blocks := make([]*Block, 0, 4)

	lexer.NextTokenOfKind(TOKEN_KW_IF)		// if
	exps = append(exps, parseExp(lexer))	// exp
	lexer.NextTokenOfKind(TOKEN_KW_THEN)	// then
	blocks = append(blocks, parseBlock(lexer))		// block

	for lexer.LookAhead() == TOKEN_KW_ELSEIF {
		lexer.NextToken()		// elseif
		exps = append(exps, parseExp(lexer))	// exp
		lexer.NextTokenOfKind(TOKEN_KW_THEN)	// then
		blocks = append(blocks, parseBlock(lexer))	// block
	}

	// else block => elseif true then block
	if lexer.LookAhead() == TOKEN_KW_ELSE {
		lexer.NextToken()			// else
		exps = append(exps, &TrueExp{lexer.Line()})
		blocks = append(blocks, parseBlock(lexer))	// block
	}

	lexer.NextTokenOfKind(TOKEN_KW_END)		// end

	return &IfStat{exps, blocks}
}


// 由于两种for循环都以关键字for开始，后跟一个标识符
// 所以跳过关键字for以后，还需要前瞻2个token才能判断到底是哪种for循环
// 但是对于已实现的词法分析器来说，仅能LookAhead一个token(多次LookAhead其实返回的都是第一个LookAhead)
// 因此尽力通过前瞻一个token来完成解析
// 这里采取了不同的做法
// 先跳过关键字for，接下来提取标识符，然后前瞻一个token
// 如果是等号就按照数值for循环来解析
// 否则，按照通用for循环来解析
// for Name ‘=’ exp ‘,’ exp [‘,’ exp] do block end
// for namelist in explist do block end
func parseForStat(lexer *Lexer) Stat {

	lineOfFor, _ := lexer.NextTokenOfKind(TOKEN_KW_FOR)
	_, name := lexer.NextIdentifier()

	if lexer.LookAhead() == TOKEN_OP_ASSIGN {

		return _finishForNumStat(lexer, lineOfFor, name)
	} else {

		return _finishForInStat(lexer, name)
	}
}


// for Name ‘=’ exp ‘,’ exp [‘,’ exp] do block end
func _finishForNumStat(lexer *Lexer, lineOfFor int, varName string) *ForNumStat {

	lexer.NextTokenOfKind(TOKEN_OP_ASSIGN)	// for name =
	initExp := parseExp(lexer)				// exp
	lexer.NextTokenOfKind(TOKEN_SEP_COMMA)	// ,
	limitExp := parseExp(lexer)				// exp

	var stepExp Exp
	if lexer.LookAhead() == TOKEN_SEP_COMMA {
		lexer.NextToken()
		stepExp = parseExp(lexer)			// exp
	} else {
		// 给步长加上缺省值:1
		stepExp = &IntegerExp{lexer.Line(), 1}
	}

	lineOfDo, _ := lexer.NextTokenOfKind(TOKEN_KW_DO)	// do
	block := parseBlock(lexer)				// block
	lexer.NextTokenOfKind(TOKEN_KW_END)					// end

	return &ForNumStat{lineOfFor, lineOfDo, varName, initExp, limitExp, stepExp, block}
}


// for namelist in explist do block end
// namelist ::= Name {‘,’ Name}
// explist ::= exp {‘,’ exp}
func _finishForInStat(lexer *Lexer, name0 string) *ForInStat {

	nameList := _finishNameList(lexer, name0)	// for namelist
	lexer.NextTokenOfKind(TOKEN_KW_IN)		// in
	expList := parseExpList(lexer)			// explist
	lineOfDo, _ := lexer.NextTokenOfKind(TOKEN_KW_DO)	// do
	block := parseBlock(lexer)		// block
	lexer.NextTokenOfKind(TOKEN_KW_END)		// end

	return &ForInStat{lineOfDo, nameList, expList, block}
}


// 关键字for和第一个标识符已经读取，继续把标识符列表解析完
// 然后按部就班解析其它语法元素即可
// namelist ::= Name {‘,’ Name}
func _finishNameList(lexer *Lexer, name0 string) []string {

	names := []string{name0}			// Name
	for lexer.LookAhead() == TOKEN_SEP_COMMA {
		lexer.NextToken()		// ,
		_, name := lexer.NextIdentifier()	// Name
		names = append(names, name)
	}

	return names
}


// local function Name funcbody
// local namelist [‘=’ explist]
func parseLocalAssignOrFuncDefStat(lexer *Lexer) Stat {

	lexer.NextTokenOfKind(TOKEN_KW_LOCAL)		// 'local'
	if lexer.LookAhead() == TOKEN_KW_FUNCTION {
		return _finishLocalFuncDefStat(lexer)
	} else {
		return _finishLocalVarDeclStat(lexer)
	}
}


/*
http://www.lua.org/manual/5.3/manual.html#3.4.11

function f() end          =>  f = function() end
function t.a.b.c.f() end  =>  t.a.b.c.f = function() end
function t.a.b.c:f() end  =>  t.a.b.c.f = function(self) end
local function f() end    =>  local f; f = function() end

The statement `local function f () body end`
translates to `local f; f = function () body end`
not to `local f = function () body end`
(This only makes a difference when the body of the function
 contains references to f.)
*/
// local function Name funcbody
func _finishLocalFuncDefStat(lexer *Lexer) *LocalFuncDefStat {

	lexer.NextTokenOfKind(TOKEN_KW_FUNCTION) 		// local function
	_, name := lexer.NextIdentifier()				// name
	fdExp := parseFuncDefExp(lexer)				// funcbody

	return &LocalFuncDefStat{name, fdExp}
}


// local namelist [‘=’ explist]
func _finishLocalVarDeclStat(lexer *Lexer) *LocalVarDeclStat {

	_, name0 := lexer.NextIdentifier()		// local name
	nameList := _finishNameList(lexer, name0)		// { ,Name }
	var expList []Exp = nil

	if lexer.LookAhead() == TOKEN_OP_ASSIGN {
		lexer.NextToken()		// =
		expList = parseExpList(lexer)	// explist
	}
	lastLine := lexer.Line()

	return &LocalVarDeclStat{lastLine, nameList, expList}
}


// 赋值语句和函数调用语句都以前缀表达式开始
// 而前缀表达式又是任意长
// 所以我们需要有前瞻无限个token的能力才能区分这两种语句，或者借助回溯来解析
// 不过分析这两种语句的语法规则不难发现
// 函数调用既可以是语句，也可以是前缀表达式，但一定不是var表达式
// 据此我们可以先解析一个前缀表达式，然后看他是否是函数调用:
// 如果是，那么解析出来的实际上是一个前缀表达式，然后看它是否是函数调用
// 否则，解析出来的必须是一个var表达式，继续解析剩余的赋值语句即可
// varlist ‘=’ explist
// functioncall
func parseAssignOrFuncCallStat(lexer *Lexer) Stat {

	prefixExp := parsePrefixExp(lexer)
	if fc, ok := prefixExp.(*FuncCallExp); ok {
		return fc
	} else {
		return parseAssignStat(lexer, prefixExp)
	}
}


// varlist ‘=’ explist |
func parseAssignStat(lexer *Lexer, var0 Exp) *AssignStat {

	varList := _finishVarList(lexer, var0)		// varlist
	lexer.NextTokenOfKind(TOKEN_OP_ASSIGN)		// =
	expList := parseExpList(lexer)				// explist

	lastLine := lexer.Line()
	return &AssignStat{lastLine, varList, expList}
}


// varlist ::= var {‘,’ var}
func _finishVarList(lexer *Lexer, var0 Exp) []Exp {

	vars := []Exp{_checkVar(lexer, var0)}		// var
	for lexer.LookAhead() == TOKEN_SEP_COMMA {
		lexer.NextToken()		// ,
		exp := parsePrefixExp(lexer)		// var
		vars = append(vars, _checkVar(lexer, exp))
	}

	return vars
}


// var要么是标识符
// 要么是表访问语句，不可能是函数调用语句
// var ::=  Name | prefixexp ‘[’ exp ‘]’ | prefixexp ‘.’ Name
func _checkVar(lexer *Lexer, exp Exp) Exp {
	switch exp.(type) {
	case *NameExp, *TableAccessExp:
		return exp
	}
	lexer.NextTokenOfKind(-1)		// trigger error
	panic("unreachable!")
}


// function funcname funcbody
// funcname ::= Name {‘.’ Name} [‘:’ Name]
// funcbody ::= ‘(’ [parlist] ‘)’ block end
// parlist ::= namelist [‘,’ ‘...’] | ‘...’
// namelist ::= Name {‘,’ Name}
func parseFuncDefStat(lexer *Lexer) *AssignStat {

	lexer.NextTokenOfKind(TOKEN_KW_FUNCTION)		// function
	fnExp, hasColon := _parseFuncName(lexer)		// funcname
	fdExp := parseFuncDefExp(lexer)			// funcbody
	if hasColon {    // v:name(args) => v.name(self, args)
		fdExp.ParList = append(fdExp.ParList, "")  // 扩充一个空间，用于存放self
		copy(fdExp.ParList[1:], fdExp.ParList)  // 只会拷贝len(fdExp.ParList)-1个元素
		fdExp.ParList[0] = "self"
	}

	return &AssignStat{
		LastLine:	fdExp.Line,
		VarList:	[]Exp{fnExp},
		ExpList:	[]Exp{fdExp},
	}
}


// funcname ::= Name {‘.’ Name} [‘:’ Name]
func _parseFuncName(lexer *Lexer) (exp Exp, hasColon bool) {

	line, name := lexer.NextIdentifier()
	exp = &NameExp{line, name}

	for lexer.LookAhead() == TOKEN_SEP_DOT {
		lexer.NextToken()
		line, name := lexer.NextIdentifier()
		idx := &StringExp{line, name}
		exp = &TableAccessExp{line, exp, idx}
	}

	if lexer.LookAhead() == TOKEN_SEP_COLON {
		lexer.NextToken()
		line, name := lexer.NextIdentifier()
		idx := &StringExp{line, name}
		exp = &TableAccessExp{line, exp, idx}
		hasColon = true
	}

	return
}