package lox

import (
	"errors"
	"reflect"
	"testing"
)

func TestScan001(t *testing.T) {
	tests := []struct {
		input string
		want  []*Token
	}{
		{"+", []*Token{{tokentype: PLUS, lexeme: "+", line: 1}, {tokentype: EOF, line: 1}}},
		{`//this is a comment`, []*Token{{tokentype: EOF, line: 1}}},
		{"//1. this is a test", []*Token{{tokentype: EOF, line: 1}}},
		{"//2. this is a test\n", []*Token{{tokentype: EOF, line: 2}}},
		{"//3. this is a test\n+", []*Token{{tokentype: PLUS, lexeme: "+", line: 2}, {tokentype: EOF, line: 2}}},
		{"//4. this is a test\n+//this is a test", []*Token{{tokentype: PLUS, lexeme: "+", line: 2}, {tokentype: EOF, line: 2}}},
		{"//5. this is a test\n+//this is a test\n", []*Token{{tokentype: PLUS, lexeme: "+", line: 2}, {tokentype: EOF, line: 3}}},
		{"//6. this is a test\n+//this is a test\n+", []*Token{{tokentype: PLUS, lexeme: "+", line: 2}, {tokentype: PLUS, lexeme: "+", line: 3}, {tokentype: EOF, line: 3}}},

		// Single character tokens
		{"(", []*Token{{tokentype: LEFT_PAREN, lexeme: "(", line: 1}, {tokentype: EOF, line: 1}}},
		{")", []*Token{{tokentype: RIGHT_PAREN, lexeme: ")", line: 1}, {tokentype: EOF, line: 1}}},
		{"{", []*Token{{tokentype: LEFT_BRACE, lexeme: "{", line: 1}, {tokentype: EOF, line: 1}}},
		{"}", []*Token{{tokentype: RIGHT_BRACE, lexeme: "}", line: 1}, {tokentype: EOF, line: 1}}},
		{",", []*Token{{tokentype: COMMA, lexeme: ",", line: 1}, {tokentype: EOF, line: 1}}},
		{".", []*Token{{tokentype: DOT, lexeme: ".", line: 1}, {tokentype: EOF, line: 1}}},
		{"-", []*Token{{tokentype: MINUS, lexeme: "-", line: 1}, {tokentype: EOF, line: 1}}},
		{";", []*Token{{tokentype: SEMICOLON, lexeme: ";", line: 1}, {tokentype: EOF, line: 1}}},
		{"/", []*Token{{tokentype: SLASH, lexeme: "/", line: 1}, {tokentype: EOF, line: 1}}},
		{"*", []*Token{{tokentype: STAR, lexeme: "*", line: 1}, {tokentype: EOF, line: 1}}},

		// One or two character tokens
		{"!", []*Token{{tokentype: BANG, lexeme: "!", line: 1}, {tokentype: EOF, line: 1}}},
		{"!=", []*Token{{tokentype: BANG_EQUAL, lexeme: "!=", line: 1}, {tokentype: EOF, line: 1}}},
		{"=", []*Token{{tokentype: EQUAL, lexeme: "=", line: 1}, {tokentype: EOF, line: 1}}},
		{"==", []*Token{{tokentype: EQUAL_EQUAL, lexeme: "==", line: 1}, {tokentype: EOF, line: 1}}},
		{"<", []*Token{{tokentype: LESS, lexeme: "<", line: 1}, {tokentype: EOF, line: 1}}},
		{"<=", []*Token{{tokentype: LESS_EQUAL, lexeme: "<=", line: 1}, {tokentype: EOF, line: 1}}},
		{">", []*Token{{tokentype: GREATER, lexeme: ">", line: 1}, {tokentype: EOF, line: 1}}},
		{">=", []*Token{{tokentype: GREATER_EQUAL, lexeme: ">=", line: 1}, {tokentype: EOF, line: 1}}},

		// Whitespace handling
		{"   \t\r\n", []*Token{{tokentype: EOF, line: 2}}},

		// Mixed tokens with whitespace
		{"( )", []*Token{{tokentype: LEFT_PAREN, lexeme: "(", line: 1}, {tokentype: RIGHT_PAREN, lexeme: ")", line: 1}, {tokentype: EOF, line: 1}}},
		{"! =", []*Token{{tokentype: BANG, lexeme: "!", line: 1}, {tokentype: EQUAL, lexeme: "=", line: 1}, {tokentype: EOF, line: 1}}},

		// Line counting
		{"\n\n+", []*Token{{tokentype: PLUS, lexeme: "+", line: 3}, {tokentype: EOF, line: 3}}},
		{`"hello"`, []*Token{{tokentype: STRING, lexeme: "hello", literal: "hello", line: 1}, {tokentype: EOF, line: 1}}},

		// number
		{"123", []*Token{{tokentype: NUMBER, lexeme: "123", literal: float64(123), line: 1}, {tokentype: EOF, line: 1}}},
		{"123.899", []*Token{{tokentype: NUMBER, lexeme: "123.899", literal: float64(123.899), line: 1}, {tokentype: EOF, line: 1}}},
		{"0.899", []*Token{{tokentype: NUMBER, lexeme: "0.899", literal: float64(0.899), line: 1}, {tokentype: EOF, line: 1}}},
		{"0.0", []*Token{{tokentype: NUMBER, lexeme: "0.0", literal: float64(0.0), line: 1}, {tokentype: EOF, line: 1}}},
		{"10.0", []*Token{{tokentype: NUMBER, lexeme: "10.0", literal: float64(10.0), line: 1}, {tokentype: EOF, line: 1}}},
		
		// Identifiers
		{"foo", []*Token{{tokentype: IDENTIFIER, lexeme: "foo", literal: "foo", line: 1}, {tokentype: EOF, line: 1}}},
		{"_", []*Token{{tokentype: IDENTIFIER, lexeme: "_", literal: "_", line: 1}, {tokentype: EOF, line: 1}}},
		{"_bar", []*Token{{tokentype: IDENTIFIER, lexeme: "_bar", literal: "_bar", line: 1}, {tokentype: EOF, line: 1}}},
		{"FooBar", []*Token{{tokentype: IDENTIFIER, lexeme: "FooBar", literal: "FooBar", line: 1}, {tokentype: EOF, line: 1}}},
		{"foo123", []*Token{{tokentype: IDENTIFIER, lexeme: "foo123", literal: "foo123", line: 1}, {tokentype: EOF, line: 1}}},
		{"foo_123_bar", []*Token{{tokentype: IDENTIFIER, lexeme: "foo_123_bar", literal: "foo_123_bar", line: 1}, {tokentype: EOF, line: 1}}},

		// Keywords
		{"and", []*Token{{tokentype: AND, lexeme: "and", literal: "and", line: 1}, {tokentype: EOF, line: 1}}},
		{"class", []*Token{{tokentype: CLASS, lexeme: "class", literal: "class", line: 1}, {tokentype: EOF, line: 1}}},
		{"else", []*Token{{tokentype: ELSE, lexeme: "else", literal: "else", line: 1}, {tokentype: EOF, line: 1}}},
		{"false", []*Token{{tokentype: FALSE, lexeme: "false", literal: "false", line: 1}, {tokentype: EOF, line: 1}}},
		{"for", []*Token{{tokentype: FOR, lexeme: "for", literal: "for", line: 1}, {tokentype: EOF, line: 1}}},
		{"fun", []*Token{{tokentype: FUN, lexeme: "fun", literal: "fun", line: 1}, {tokentype: EOF, line: 1}}},
		{"if", []*Token{{tokentype: IF, lexeme: "if", literal: "if", line: 1}, {tokentype: EOF, line: 1}}},
		{"nil", []*Token{{tokentype: NIL, lexeme: "nil", literal: "nil", line: 1}, {tokentype: EOF, line: 1}}},
		{"or", []*Token{{tokentype: OR, lexeme: "or", literal: "or", line: 1}, {tokentype: EOF, line: 1}}},
		{"print", []*Token{{tokentype: PRINT, lexeme: "print", literal: "print", line: 1}, {tokentype: EOF, line: 1}}},
		{"return", []*Token{{tokentype: RETURN, lexeme: "return", literal: "return", line: 1}, {tokentype: EOF, line: 1}}},
		{"super", []*Token{{tokentype: SUPER, lexeme: "super", literal: "super", line: 1}, {tokentype: EOF, line: 1}}},
		{"this", []*Token{{tokentype: THIS, lexeme: "this", literal: "this", line: 1}, {tokentype: EOF, line: 1}}},
		{"true", []*Token{{tokentype: TRUE, lexeme: "true", literal: "true", line: 1}, {tokentype: EOF, line: 1}}},
		{"var", []*Token{{tokentype: VAR, lexeme: "var", literal: "var", line: 1}, {tokentype: EOF, line: 1}}},
		{"while", []*Token{{tokentype: WHILE, lexeme: "while", literal: "while", line: 1}, {tokentype: EOF, line: 1}}},
		
		// Mixed identifiers, keywords and other tokens
		{"foo bar", []*Token{{tokentype: IDENTIFIER, lexeme: "foo", literal: "foo", line: 1}, {tokentype: IDENTIFIER, lexeme: "bar", literal: "bar", line: 1}, {tokentype: EOF, line: 1}}},
		{"if foo else", []*Token{{tokentype: IF, lexeme: "if", literal: "if", line: 1}, {tokentype: IDENTIFIER, lexeme: "foo", literal: "foo", line: 1}, {tokentype: ELSE, lexeme: "else", literal: "else", line: 1}, {tokentype: EOF, line: 1}}},
	}

	for _, tt := range tests {
		scanner := NewScanner(tt.input)
		tokens, err := scanner.ScanTokens()
		if err != nil || !reflect.DeepEqual(tokens, tt.want) {
			t.Errorf("scan `%v` got %v, want %v", tt.input, tokens, tt.want)
		}
	}
}

func TestScan002(t *testing.T) {
	tests := []struct {
		input string
		want  error
	}{
		{`"hello`, errors.New("unterminated string in line 1")},
	}

	for _, tt := range tests {
		scanner := NewScanner(tt.input)
		tokens, err := scanner.ScanTokens()
		if tokens != nil || err.Error() != tt.want.Error() {
			t.Errorf("scan `%v` got `%v`, want `%v`", tt.input, err, tt.want)
		}
	}
}

func TestScan003(t *testing.T) {
	input := "123"
	scanner := NewScanner(input)
	tokens, err := scanner.ScanTokens()
	if err != nil {
		t.Errorf("scan `%v` got `%v`", input, err)
	}
	if len(tokens) != 2 {
		t.Errorf("scan `%v` got `%v`", input, tokens)
	}
	got := tokens[0]
	want := &Token{tokentype: NUMBER, lexeme: "123", literal: float64(123.0), line: 1}
	if !reflect.DeepEqual(got, want) {
		t.Errorf("scan `%v` got `%v` wnat `%v`", input, got, want)
	}
}
