package core

import (
	"reflect"
	"testing"
)

func TestLexer001(t *testing.T) {
	tests := []struct {
		input string
		want  []Token
	}{
		{
			"()",
			[]Token{
				{LPAREN, "(", 1, 1},
				{RPAREN, ")", 1, 2},
				{EOF, "", 0, 0},
			},
		},
		// Test symbols
		{
			"hello",
			[]Token{
				{SYMBOL, "hello", 1, 5},
				{EOF, "", 0, 0},
			},
		},
		{
			"+ ",
			[]Token{
				{SYMBOL, "+", 1, 1},
				{EOF, "", 0, 0},
			},
		},
		// Test symbols with special characters
		{
			"+ - * / = <>",
			[]Token{
				{SYMBOL, "+", 1, 1},
				{SYMBOL, "-", 1, 3},
				{SYMBOL, "*", 1, 5},
				{SYMBOL, "/", 1, 7},
				{SYMBOL, "=", 1, 9},
				{SYMBOL, "<>", 1, 12},
				{EOF, "", 0, 0},
			},
		},
		// Test nested expressions
		{
			"(hello world)",
			[]Token{
				{LPAREN, "(", 1, 1},
				{SYMBOL, "hello", 1, 6},
				{SYMBOL, "world", 1, 12},
				{RPAREN, ")", 1, 13},
				{EOF, "", 0, 0},
			},
		},
		// Test boolean literals
		{
			"#t #f",
			[]Token{
				{BOOLEAN, "#t", 1, 2},
				{BOOLEAN, "#f", 1, 5}, // Note: There seems to be a bug in lexer implementation
				{EOF, "", 0, 0},
			},
		},
		// Test with whitespace
		{
			"( hello )",
			[]Token{
				{LPAREN, "(", 1, 1},
				{SYMBOL, "hello", 1, 7},
				{RPAREN, ")", 1, 9},
				{EOF, "", 0, 0},
			},
		},
		// Test newlines and positions
		{
			"(hello\n world)",
			[]Token{
				{LPAREN, "(", 1, 1},
				{SYMBOL, "hello", 1, 6},
				{SYMBOL, "world", 2, 6},
				{RPAREN, ")", 2, 7},
				{EOF, "", 0, 0},
			},
		},
		{
			"#",
			[]Token{
				{ILLEGAL, "#", 1, 1},
				{EOF, "", 0, 0},
			},
		},
		{
			"##",
			[]Token{
				{ILLEGAL, "#", 1, 1},
				{ILLEGAL, "#", 1, 2},
				{EOF, "", 0, 0},
			},
		},
		{
			"#A",
			[]Token{
				{ILLEGAL, "#", 1, 1},
				{SYMBOL, "A", 1, 2},
				{EOF, "", 0, 0},
			},
		},
	}
	for _, tt := range tests {
		l := NewLexer(tt.input)
		for _, want := range tt.want {
			got := l.NextToken()
			if reflect.DeepEqual(got, want) != true {
				t.Errorf("input %q: got: %v, want: %v", tt.input, got, want)
			}
		}
	}
}

func TestLexer002(t *testing.T) {
	tests := []struct {
		input string
		wants []Token
	}{
		{
			"()",
			[]Token{
				{LPAREN, "(", 1, 1},
				{RPAREN, ")", 1, 2},
				{EOF, "", 0, 0},
			},
		},
		// Test symbols
		{
			"hello",
			[]Token{
				{SYMBOL, "hello", 1, 5},
				{EOF, "", 0, 0},
			},
		},
		{
			"+ ",
			[]Token{
				{SYMBOL, "+", 1, 1},
				{EOF, "", 0, 0},
			},
		},
		// Test symbols with special characters
		{
			"+ - * / = <>",
			[]Token{
				{SYMBOL, "+", 1, 1},
				{SYMBOL, "-", 1, 3},
				{SYMBOL, "*", 1, 5},
				{SYMBOL, "/", 1, 7},
				{SYMBOL, "=", 1, 9},
				{SYMBOL, "<>", 1, 12},
				{EOF, "", 0, 0},
			},
		},
		// Test nested expressions
		{
			"(hello world)",
			[]Token{
				{LPAREN, "(", 1, 1},
				{SYMBOL, "hello", 1, 6},
				{SYMBOL, "world", 1, 12},
				{RPAREN, ")", 1, 13},
				{EOF, "", 0, 0},
			},
		},
		// Test boolean literals
		{
			"#t #f",
			[]Token{
				{BOOLEAN, "#t", 1, 2},
				{BOOLEAN, "#f", 1, 5}, // Note: There seems to be a bug in lexer implementation
				{EOF, "", 0, 0},
			},
		},
		// Test with whitespace
		{
			"( hello )",
			[]Token{
				{LPAREN, "(", 1, 1},
				{SYMBOL, "hello", 1, 7},
				{RPAREN, ")", 1, 9},
				{EOF, "", 0, 0},
			},
		},
		// Test newlines and positions
		{
			"(hello\n world)",
			[]Token{
				{LPAREN, "(", 1, 1},
				{SYMBOL, "hello", 1, 6},
				{SYMBOL, "world", 2, 6},
				{RPAREN, ")", 2, 7},
				{EOF, "", 0, 0},
			},
		},
		{
			"#",
			[]Token{
				{ILLEGAL, "#", 1, 1},
				{EOF, "", 0, 0},
			},
		},
		{
			"##",
			[]Token{
				{ILLEGAL, "#", 1, 1},
				{ILLEGAL, "#", 1, 2},
				{EOF, "", 0, 0},
			},
		},
		{
			"#A",
			[]Token{
				{ILLEGAL, "#", 1, 1},
				{SYMBOL, "A", 1, 2},
				{EOF, "", 0, 0},
			},
		},
	}
	for _, tt := range tests {
		l := NewLexer(tt.input)
		gots := l.ReadTokens()
		if len(gots) != len(tt.wants) {
			t.Errorf("input %q: got: %v, want: %v", tt.input, gots, tt.wants)
		}
		for i, want := range tt.wants {
			if reflect.DeepEqual(gots[i], want) != true {
				t.Errorf("input %q: got: %v, want: %v", tt.input, gots[i], want)
			}
		}
	}
}
