package main

import (
	"errors"
	"fmt"
	"strings"
)

type Token struct {
	Type   int
	Value  string
	Line   uint
	Offset uint
}

type Parser struct {
	tokens  []Token
	current int
}

const (
	// Single-character tokens.
	TOKEN_LEFT_PAREN  = iota
	TOKEN_RIGHT_PAREN = iota
	TOKEN_LEFT_BRACE  = iota
	TOKEN_RIGHT_BRACE = iota
	TOKEN_COMMA       = iota
	TOKEN_DOT         = iota
	TOKEN_MINUS       = iota
	TOKEN_PLUS        = iota
	TOKEN_SEMICOLON   = iota
	TOKEN_SLASH       = iota
	TOKEN_STAR        = iota

	// One or two character tokens.
	TOKEN_BANG          = iota
	TOKEN_BANG_EQUAL    = iota
	TOKEN_EQUAL         = iota
	TOKEN_EQUAL_EQUAL   = iota
	TOKEN_GREATER       = iota
	TOKEN_GREATER_EQUAL = iota
	TOKEN_LESS          = iota
	TOKEN_LESS_EQUAL    = iota

	// Literals.
	TOKEN_IDENTIFIER = iota
	TOKEN_STRING     = iota
	TOKEN_NUMBER     = iota

	// Keywords.
	TOKEN_AND    = iota
	TOKEN_CLASS  = iota
	TOKEN_ELSE   = iota
	TOKEN_FALSE  = iota
	TOKEN_FUN    = iota
	TOKEN_FOR    = iota
	TOKEN_IF     = iota
	TOKEN_NIL    = iota
	TOKEN_OR     = iota
	TOKEN_PRINT  = iota
	TOKEN_RETURN = iota
	TOKEN_SUPER  = iota
	TOKEN_THIS   = iota
	TOKEN_TRUE   = iota
	TOKEN_VAR    = iota
	TOKEN_WHILE  = iota

	TOKEN_EOF = iota
)

func scan(tokens *[]Token, in *strings.Reader) error {
	// cachedRunes := make([]rune, 0)
	cachedErrors := ""
	line := uint(1)
	offset := uint(0)
	addToken := func(_type int, value string) bool {
		*tokens = append(*tokens, Token{_type, value, line, offset})
		return true
	}
	unRead := func() {
		in.UnreadByte()
		offset -= 1
	}
	read := func() (rune, int, error) {
		ch, sz, err := in.ReadRune()
		offset += 1
		return ch, sz, err
	}
	fmtError := func(msg string, line uint, offset uint) {
		cachedErrors += fmt.Sprintf("Error: Line %v Offset %v, %q\n", line, offset, msg)
	}
	for {
		addedThisTurn := false
		ch, _, err := read()
		if err != nil {
			break
		}

		sch := string(ch)
		switch sch {
		case "(":
			addedThisTurn = addToken(TOKEN_LEFT_PAREN, sch)
		case ")":
			addedThisTurn = addToken(TOKEN_RIGHT_PAREN, sch)
		case "{":
			addedThisTurn = addToken(TOKEN_LEFT_BRACE, sch)
		case "}":
			addedThisTurn = addToken(TOKEN_RIGHT_BRACE, sch)
		case ",":
			addedThisTurn = addToken(TOKEN_COMMA, sch)
		case ".":
			addedThisTurn = addToken(TOKEN_DOT, sch)
		case "-":
			addedThisTurn = addToken(TOKEN_MINUS, sch)
		case "+":
			addedThisTurn = addToken(TOKEN_PLUS, sch)
		case ";":
			addedThisTurn = addToken(TOKEN_SEMICOLON, sch)
		case "/":
			addedThisTurn = addToken(TOKEN_SLASH, sch)
		case "*":
			addedThisTurn = addToken(TOKEN_STAR, sch)

		case "\t":
		case " ":
		case "\n":
			line += 1
			offset = 0

		default:
			// 处理 1/2 字符的情况
			ch2, _, err := read()
			if err != nil {
				break
			}
			offset -= 1
			sch2 := string(ch2)
			switch sch {
			case "!":
				if sch2 == "=" {
					addedThisTurn = addToken(TOKEN_BANG_EQUAL, sch+sch2)
				} else {
					unRead()
					offset += 1
					addedThisTurn = addToken(TOKEN_BANG, sch)
				}
			case ">":
				if sch2 == "=" {
					addedThisTurn = addToken(TOKEN_GREATER_EQUAL, sch+sch2)
				} else {
					unRead()
					offset += 1
					addedThisTurn = addToken(TOKEN_GREATER, sch)
				}
			case "<":
				if sch2 == "=" {
					addedThisTurn = addToken(TOKEN_BANG_EQUAL, sch+sch2)
				} else {
					unRead()
					offset += 1
					addedThisTurn = addToken(TOKEN_LESS, sch)
				}
			case "=":
				if sch2 == "=" {
					addedThisTurn = addToken(TOKEN_EQUAL_EQUAL, sch+sch2)
				} else {
					unRead()
					offset += 1
					addedThisTurn = addToken(TOKEN_EQUAL, sch)
				}
			default:
				unRead()
				offset += 1
			}
		}

		if addedThisTurn {
			continue
		}

		// 处理 string/number
		if sch == "\"" {
			// 字符串
			waitingString := ""
			size := 0
			for {
				c, _, err := read()
				if err != nil {
					fmtError("No closure '\"' found.", line, offset)
					break
				}
				size += 1
				s := string(c)
				if s == "\"" {
					offset -= uint(size)
					addedThisTurn = addToken(TOKEN_STRING, waitingString)
					offset += uint(size)
					break
				} else if s == "\n" {
					fmtError("Can't allow line break in a string literal.", line, offset)
					unRead()
					break
				} else if s == "\\" {
					c2, _, err := read()
					sc2 := string(c2)
					if err != nil {
						fmtError("No closure '\"' found.", line, offset)
						break
					}
					if sc2 == "\"" {
						waitingString += "\""
					} else {
						waitingString += s
					}
				} else {
					waitingString += s
				}
			}
		}

		if addedThisTurn {
			continue
		}

		// 数字
		waitingNumbers := string(ch)
		numberRunes := "0123456789."
		numberRunesExists := map[rune]bool{}
		for _, n := range numberRunes {
			numberRunesExists[n] = true
		}
		if numberRunesExists[ch] {
			ateDot := false
			size := 0
			for {
				c, _, err := read()
				if err != nil {
					break
				}
				size += 1
				if ateDot && rune('.') == c {
					fmtError("Wrong number form.", line, offset)
					unRead()
					break
				} else {
					if numberRunesExists[c] {
						waitingNumbers += string(c)
					} else {
						unRead()
						size -= 1
						offset -= uint(size)
						addedThisTurn = addToken(TOKEN_NUMBER, waitingNumbers)
						offset += uint(size)
						break
					}
				}
			}
		}

		if addedThisTurn {
			continue
		}

		// 处理 identifier/keyword
		in_az09_ := map[rune]bool{}
		in_az_ := map[rune]bool{}
		az_ := "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_"
		_09 := "0123456789"
		for _, c := range az_ {
			in_az09_[c] = true
			in_az_[c] = true
		}
		for _, n := range _09 {
			in_az09_[n] = true
		}
		if in_az_[ch] {
			waitingIdentifier := string(ch)
			size := 0
			for {
				c, _, err := read()
				if err != nil {
					break
				}
				size += 1
				if in_az09_[c] {
					waitingIdentifier += string(c)
				} else {
					unRead()
					size -= 1
					// 读取完毕，分类 Token
					var tk int
					switch waitingIdentifier {
					case "and":
						tk = TOKEN_AND
					case "class":
						tk = TOKEN_CLASS
					case "else":
						tk = TOKEN_ELSE
					case "false":
						tk = TOKEN_FALSE
					case "fun":
						tk = TOKEN_FUN
					case "for":
						tk = TOKEN_FOR
					case "if":
						tk = TOKEN_IF
					case "nil":
						tk = TOKEN_NIL
					case "or":
						tk = TOKEN_OR
					case "print":
						tk = TOKEN_PRINT
					case "return":
						tk = TOKEN_RETURN
					case "super":
						tk = TOKEN_SUPER
					case "this":
						tk = TOKEN_THIS
					case "true":
						tk = TOKEN_TRUE
					case "var":
						tk = TOKEN_VAR
					case "while":
						tk = TOKEN_WHILE
					default:
						tk = TOKEN_IDENTIFIER
					}
					offset -= uint(size)
					addToken(tk, waitingIdentifier)
					offset += uint(size)
					break
				}
			}
		}

	}

	if cachedErrors != "" {
		return errors.New(cachedErrors)
	}
	return nil
}

func (parser *Parser) match(types ...int) bool {
	for _, t := range types {
		if parser.check(t) {
			parser.advance()
			return true
		}
	}

	return false
}

func (parser *Parser) check(t int) bool {
	if parser.isAtEnd() {
		return false
	}
	return parser.peek().Type == t
}

func (parser *Parser) advance() Token {
	if !parser.isAtEnd() {
		parser.current += 1
	}
	return parser.previous()
}

func (parser *Parser) isAtEnd() bool {
	if parser.current >= len(parser.tokens) {
		return true
	}
	return false
}

func (parser *Parser) peek() Token {
	return parser.tokens[parser.current]
}

func (parser *Parser) previous() Token {
	return parser.tokens[parser.current-1]
}

func (parser *Parser) expression() Expr {
	return parser.equality()
}

func (parser *Parser) consume(t int, explain string) {
	if !parser.match(t) {
		fmt.Println(explain)
	}
}

// equality → comparison ( ( "!=" | "==" ) comparison )*
func (parser *Parser) equality() Expr {
	var expr Expr = parser.comparison()

	for parser.match(TOKEN_BANG_EQUAL, TOKEN_EQUAL_EQUAL) {
		operator := parser.previous()
		right := parser.comparison()
		expr = &Binary{expr, operator, right}
	}

	return expr
}

// comparison → term ( ( ">" | ">=" | "<" | "<=" ) term )* ;
func (parser *Parser) comparison() Expr {
	var expr Expr = parser.term()

	for parser.match(TOKEN_GREATER, TOKEN_GREATER_EQUAL, TOKEN_LESS, TOKEN_LESS_EQUAL) {
		operator := parser.previous()
		right := parser.term()
		expr = &Binary{expr, operator, right}
	}

	return expr
}

// term → factor ( ( "-" | "+" ) factor )* ;
func (parser *Parser) term() Expr {
	var expr Expr = parser.factor()

	for parser.match(TOKEN_MINUS, TOKEN_PLUS) {
		operator := parser.previous()
		right := parser.factor()
		expr = &Binary{expr, operator, right}
	}

	return expr
}

// factor → unary ( ( "/" | "*" ) unary )* ;
func (parser *Parser) factor() Expr {
	var expr Expr = parser.unary()

	for parser.match(TOKEN_SLASH, TOKEN_STAR) {
		operator := parser.previous()
		right := parser.unary()
		expr = &Binary{expr, operator, right}
	}

	return expr
}

// unary → ( "!" | "-" ) unary
//       | primary ;
func (parser *Parser) unary() Expr {
	var expr Expr

	if parser.match(TOKEN_BANG, TOKEN_MINUS) {
		operator := parser.previous()
		expr = parser.unary()
		expr = &Unary{operator, expr}
	} else {
		parser.primary()
	}

	return expr
}

//primary → NUMBER | STRING | "true" | "false" | "nil"
//        | "(" expression ")" ;
func (parser *Parser) primary() Expr {
	var expr Expr

	if parser.match(TOKEN_TRUE) {
		return &Literal{true}
	} else if parser.match(TOKEN_FALSE) {
		return &Literal{false}
	} else if parser.match(TOKEN_NIL) {
		return &Literal{nil}
	} else if parser.match(TOKEN_NUMBER, TOKEN_STRING) {
		return &Literal{parser.previous().Value}
	}

	if parser.match(TOKEN_LEFT_PAREN) {
		expr = parser.expression()
		parser.consume(TOKEN_RIGHT_PAREN, fmt.Sprintf("Expect ')' after expression. At Line %v Offset %v.", parser.previous().Line, parser.previous().Offset))
	}

	return expr
}

func main() {
	var tokens []Token
	err := scan(&tokens, strings.NewReader(`
		fun abc() {
			var i = 1234;
			print "hello,world";
		}
	`))
	fmt.Println(tokens)
	fmt.Println(err)

	var expr Expr = &Binary{
		&Unary{
			Token{TOKEN_MINUS, "-", 1, 1},
			&Literal{123}},
		Token{TOKEN_STAR, "*", 1, 1},
		&Literal{45.78}}
	ap := new(AstPrinter)
	fmt.Println(ap.print(&expr))
}
