package lexer

import (
	"interpreter/token"
	"testing"
)

func TestReadChar001(t *testing.T) {
	input := []byte("编123译器")
	l := NewLexer(input)
	n := 0
	for l.ch != 0 {
		//t.Logf("ch: %c, position: %v, readPosition: %v", l.ch, l.position, l.readPosition)
		n++
		l.readChar()
	}
	if n != 6 {
		t.Errorf("want %v, got %v", 6, n)
	}
}

func TestInteger001(t *testing.T) {
	input := "123"
	l := NewLexer([]byte(input))
	tok := l.NextToken()
	if tok.Type != token.INTEGER {
		t.Errorf("want INTEGER, got %v", tok.Type)
	}
	if tok.Literal != "123" {
		t.Errorf("want 123, got %v", tok.Literal)
	}
	tok = l.NextToken()
	if tok.Type != token.EOF {
		t.Errorf("want EOF %v, got %v", token.EOF, tok.Type)
	}
}

func TestInteger002(t *testing.T) {
	input := "123 45"
	l := NewLexer([]byte(input))
	tok := l.NextToken()
	if tok.Type != token.INTEGER {
		t.Errorf("want INTEGER, got %v", tok.Type)
	}
	if tok.Literal != "123" {
		t.Errorf("want 123, got %v", tok.Literal)
	}
	tok = l.NextToken()
	if tok.Type != token.INTEGER {
		t.Errorf("want INTEGER, got %v", tok.Type)
	}
	if tok.Literal != "45" {
		t.Errorf("want 45, got %v", tok.Literal)
	}
}

func TestOperator001(t *testing.T) {
	input := `+-*/`

	tests := []struct {
		expectedType    token.TokenType
		expectedLiteral string
	}{
		{token.PLUS, "+"},
		{token.MINUS, "-"},
		{token.ASTERISK, "*"},
		{token.SLASH, "/"},
		{token.EOF, ""},
	}

	l := NewLexer([]byte(input))

	for i, tt := range tests {
		tok := l.NextToken()

		if tok.Type != tt.expectedType {
			t.Fatalf("tests[%d] - tokentype wrong. expected=%q, got=%q",
				i, tt.expectedType, tok.Type)
		}

		if tok.Literal != tt.expectedLiteral {
			t.Fatalf("tests[%d] - literal wrong. expected=%q, got=%q",
				i, tt.expectedLiteral, tok.Literal)
		}
	}
}

func TestParenthesis001(t *testing.T) {
	input := `()`

	tests := []struct {
		expectedType    token.TokenType
		expectedLiteral string
	}{
		{token.LPAREN, "("},
		{token.RPAREN, ")"},
		{token.EOF, ""},
	}

	l := NewLexer([]byte(input))

	for i, tt := range tests {
		tok := l.NextToken()

		if tok.Type != tt.expectedType {
			t.Fatalf("tests[%d] - tokentype wrong. expected=%q, got=%q",
				i, tt.expectedType, tok.Type)
		}

		if tok.Literal != tt.expectedLiteral {
			t.Fatalf("tests[%d] - literal wrong. expected=%q, got=%q",
				i, tt.expectedLiteral, tok.Literal)
		}
	}
}

func TestNextToken001(t *testing.T) {
	input := `(+ 1 345) (- 123 4)`

	tests := []struct {
		expectedType    token.TokenType
		expectedLiteral string
	}{
		{token.LPAREN, "("},
		{token.PLUS, "+"},
		{token.INTEGER, "1"},
		{token.INTEGER, "345"},
		{token.RPAREN, ")"},
		{token.LPAREN, "("},
		{token.MINUS, "-"},
		{token.INTEGER, "123"},
		{token.INTEGER, "4"},
		{token.RPAREN, ")"},
		{token.EOF, ""},
	}

	l := NewLexer([]byte(input))

	for i, tt := range tests {
		tok := l.NextToken()

		if tok.Type != tt.expectedType {
			t.Fatalf("tests[%d] - tokentype wrong. expected=%q, got=%q",
				i, tt.expectedType, tok.Type)
		}

		if tok.Literal != tt.expectedLiteral {
			t.Fatalf("tests[%d] - literal wrong. expected=%q, got=%q",
				i, tt.expectedLiteral, tok.Literal)
		}
	}
}

func TestNextToken002(t *testing.T) {
	input := `（＋ 1 3４5） （- １２３ ４）`

	tests := []struct {
		expectedType    token.TokenType
		expectedLiteral string
	}{
		{token.LPAREN, "("},
		{token.PLUS, "+"},
		{token.INTEGER, "1"},
		{token.INTEGER, "345"},
		{token.RPAREN, ")"},
		{token.LPAREN, "("},
		{token.MINUS, "-"},
		{token.INTEGER, "123"},
		{token.INTEGER, "4"},
		{token.RPAREN, ")"},
		{token.EOF, ""},
	}

	l := NewLexer([]byte(input))

	for i, tt := range tests {
		tok := l.NextToken()

		if tok.Type != tt.expectedType {
			t.Fatalf("tests[%d] - tokentype wrong. expected=%q, got=%q",
				i, tt.expectedType, tok.Type)
		}

		if tok.Literal != tt.expectedLiteral {
			t.Fatalf("tests[%d] - literal wrong. expected=%q, got=%q",
				i, tt.expectedLiteral, tok.Literal)
		}
	}
}
