module main

// -------------------------- lexer start ---------------------------------
pub enum TokenKind {
	illegal
	eof
	assign
	plus
	minus
	asterisk
	slash
	comma
	semicolon
	colon
	equal
	bang
	bang_equal
	greater
	less
	greater_equal
	less_equal
	lparen
	rparen
	lbrace
	rbrace
	lbracket
	rbracket
	integer
	string
	identifier
	let
	function
}

pub struct Token {
pub:
	kind    TokenKind
	literal string
}

pub struct Lexer {
	input []u8
mut:
	position      int
	read_position int
	ch            string
}

pub fn Lexer.new(input string) Lexer {
	mut l := Lexer{
		input: input.bytes() //.bytestr()
		position: 0
		read_position: 0
		ch: ''
	}

	l.read_char()

	return l
}

fn (mut lexer Lexer) skip_whitespace() {
	for lexer.ch == ' ' {
		lexer.read_char()
	}
}

fn (lexer Lexer) new_token(kind TokenKind, literal string) Token {
	return Token{
		literal: literal
		kind: kind
	}
}

fn (mut lexer Lexer) next_token() Token {
	lexer.skip_whitespace()

	token := match lexer.ch {
		'!' {
			if lexer.peek_char() == '=' {
				lexer.read_char()
				lexer.new_token(TokenKind.bang_equal, '!=')
			} else {
				lexer.new_token(TokenKind.bang, lexer.ch)
			}
		}
		'=' {
			if lexer.peek_char() == '=' {
				lexer.read_char()
				lexer.new_token(TokenKind.equal, '==')
			} else {
				lexer.new_token(TokenKind.assign, lexer.ch)
			}
		}
		'>' {
			if lexer.peek_char() == '=' {
				lexer.read_char()
				lexer.new_token(TokenKind.greater_equal, '>=')
			} else {
				lexer.new_token(TokenKind.greater, lexer.ch)
			}
		}
		'<' {
			if lexer.peek_char() == '=' {
				lexer.read_char()
				lexer.new_token(TokenKind.less_equal, '<=')
			} else {
				lexer.new_token(TokenKind.less, lexer.ch)
			}
		}
		':' {
			lexer.new_token(TokenKind.colon, lexer.ch)
		}
		',' {
			lexer.new_token(TokenKind.comma, lexer.ch)
		}
		';' {
			lexer.new_token(TokenKind.semicolon, lexer.ch)
		}
		'+' {
			lexer.new_token(TokenKind.plus, lexer.ch)
		}
		'-' {
			lexer.new_token(TokenKind.minus, lexer.ch)
		}
		'*' {
			lexer.new_token(TokenKind.asterisk, lexer.ch)
		}
		'/' {
			lexer.new_token(TokenKind.slash, lexer.ch)
		}
		'(' {
			lexer.new_token(TokenKind.lparen, lexer.ch)
		}
		')' {
			lexer.new_token(TokenKind.rparen, lexer.ch)
		}
		'{' {
			lexer.new_token(TokenKind.lbrace, lexer.ch)
		}
		'}' {
			lexer.new_token(TokenKind.rbrace, lexer.ch)
		}
		'[' {
			lexer.new_token(TokenKind.lbracket, lexer.ch)
		}
		']' {
			lexer.new_token(TokenKind.rbracket, lexer.ch)
		}
		'\0' {
			lexer.new_token(TokenKind.eof, ' ')
		}
		'"' {
			lexer.new_token(TokenKind.string, lexer.read_string())
		}
		else {
			tok := if lexer.is_letter(lexer.ch) {
				literal := lexer.read_identifier()
				kind := lexer.lookup_identifier_or_keywords(literal)
				lexer.new_token(kind, literal)
			} else if lexer.is_digit(lexer.ch) {
				lexer.new_token(TokenKind.integer, lexer.read_number())
			} else {
				lexer.new_token(TokenKind.illegal, lexer.ch)
			}
			return tok
		}
	}

	lexer.read_char()

	return token
}

fn (lexer Lexer) lookup_identifier_or_keywords(identifier string) TokenKind {
	kind := match identifier {
		'fn' { TokenKind.function }
		'let' { TokenKind.let }
		else { TokenKind.identifier }
	}

	return kind
}

fn (lexer Lexer) is_digit(ch string) bool {
	return ch >= '0' && ch <= '9'
}

fn (lexer Lexer) is_letter(ch string) bool {
	return (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || ch == '_'
}

fn (mut lexer Lexer) read_identifier() string {
	mut identifier := ''

	for lexer.is_letter(lexer.ch) {
		identifier += lexer.ch
		lexer.read_char()
	}

	return identifier
}

fn (mut lexer Lexer) read_number() string {
	mut num := ''

	for lexer.is_digit(lexer.ch) {
		num += lexer.ch
		lexer.read_char()
	}

	return num
}

fn (mut lexer Lexer) read_string() string {
	position := lexer.position + 1
	lexer.read_char()

	for lexer.ch != '"' && lexer.ch != '\0' {
		lexer.read_char()
	}

	string_slice := lexer.input[position..lexer.position]

	return string_slice.bytestr()
}

fn (mut lexer Lexer) read_char() {
	if lexer.read_position >= lexer.input.len {
		lexer.ch = '\0'
	} else {
		lexer.ch = lexer.input[lexer.read_position].ascii_str()
	}
	lexer.position = lexer.read_position
	lexer.read_position += 1
}

fn (lexer Lexer) peek_char() string {
	if lexer.read_position >= lexer.input.len {
		return '\0'
	}
	return lexer.input[lexer.read_position].ascii_str()
}

// ------------------------ lexer end --------------------------------

// ------------------------ ast start --------------------------------
pub interface Node {
	token_literal() string
	print_string() string
}

pub interface Statement {
	Node
	statement_node()
}

pub interface Expression {
	Node
	expression_node()
}

pub struct Program {
pub mut:
	statements []Statement
}

fn (p Program) token_literal() string {
	if p.statements.len > 0 {
		return p.statements[0].token_literal()
	} else {
		return ''
	}
}

pub struct LetStatement {
	token Token
	value Expression
mut:
	name Identifier
}

pub fn (let_stmt LetStatement) statement_node() {}

pub fn (let_stmt LetStatement) token_literal() string {
	return let_stmt.token.literal
}

pub fn (let_stmt LetStatement) print_string() string {
	return ''
}

pub struct Identifier {
	token Token
	value string
}

pub fn (ident Identifier) expression_node() {}

pub fn (ident Identifier) token_literal() string {
	return ident.token.literal
}

// ------------------------ ast end --------------------------------

// ------------------------ parser start --------------------------------
pub struct Parser {
mut:
	l          Lexer
	peek_token Token
	cur_token  Token
}

pub fn Parser.new(l Lexer) Parser {
	mut p := Parser{
		l: l
	}

	p.next_token()
	p.next_token()

	return p
}

fn (mut p Parser) next_token() {
	p.cur_token = p.peek_token
	p.peek_token = p.l.next_token()
}

fn (mut p Parser) parse_program() Program {
	mut program := Program{}
	program.statements = []Statement{}

	for p.cur_token.kind != TokenKind.eof {
		stmt := p.parse_statement()
		if !isnil(stmt) {
			program.statements.prepend(stmt)
		}
		p.next_token()
	}

	return program
}

fn (mut p Parser) parse_statement() Statement {
	return match p.cur_token.kind {
		.let {
			p.parse_let_statement()
		}
		else {
			unsafe { nil }
		}
	}
}

fn (mut p Parser) parse_let_statement() LetStatement {
	mut stmt := LetStatement{
		token: p.cur_token
		value: unsafe { nil }
	}

	if !p.expect_peek(TokenKind.identifier) {
		unsafe { nil }
	}

	stmt.name = Identifier{
		token: p.cur_token
		value: p.cur_token.literal
	}

	if !p.expect_peek(TokenKind.assign) {
		unsafe { nil }
	}

	for !p.cur_token_is(TokenKind.semicolon) {
		p.next_token()
	}

	return stmt
}

fn (p Parser) cur_token_is(kind TokenKind) bool {
	return p.cur_token.kind == kind
}

fn (p Parser) peek_token_is(kind TokenKind) bool {
	return p.peek_token.kind == kind
}

fn (mut p Parser) expect_peek(kind TokenKind) bool {
	if p.peek_token_is(kind) {
		p.next_token()
		return true
	}

	return false
}

// ------------------------ parser end --------------------------------

/*
pub enum StatementNode {
	let
}

fn (node StatementNode) token_literal() string {
	return match node {
		.let { '' }
	}
}

fn (node StatementNode) print_string() string {
	return match node {
		.let { '' }
	}
}

pub enum ExpressionNode {
	identifier
}

fn (node ExpressionNode) token_literal() string {
	return match node {
		.identifier { '' }
	}
}

fn (node ExpressionNode) print_string() string {
	return match node {
		.identifier { '' }
	}
}

pub struct Program {
	statements []StatementNode
}

pub fn (p Program) token_literal() string {
	return if p.statements.len > 0 {
		match p.statements[0] {
			.let { '' }
		}
	} else {
		return ''
	}
}
*/

fn main() {
	// input := ' + - / * () {} , [] :;'
	// input := '(123 + 34) / 12'
	// input := 'abc_xxx'
	// input := '"Hello world"'
	// input := '== != ! > >= < <='
	input := 'fn let'
	mut l := Lexer.new(input)

	for {
		tok := l.next_token()
		println(tok)
		if tok.kind == TokenKind.eof {
			break
		}
	}
}

// v init .
// v init xxx
