package funcparser

import (
	"strings"
)

// Character classes definition.
const (
	ignore       = " \t\n"
	numeric      = "0123456789."
	alpha        = "_abcdefghijklmnopqrstuvwxyz"
	expops       = "+-"
	termops      = "*/%"
	parenthesis  = "()"
	ops          = expops + termops + parenthesis
	alphanumeric = alpha + numeric
)

type tokenType int

const (
	err = iota
	id
	integer
	float
	op
	EOF
)

type Token struct {
	text  string
	pos   int
	_type tokenType
}

func (t Token) TypeIs(typ tokenType) bool {
	return t._type == typ
}

func (t Token) TextIs(txt string) bool {
	return t.text == txt
}

func (t Token) Equals(typ tokenType, txt string) bool {
	return t.TypeIs(typ) && t.TextIs(txt)
}

func Tokenize(text string) <-chan Token {
	c := make(chan Token)
	go tokenize(text, c)
	return c
}

func tokenize(text string, c chan<- Token) {
	getIndexOfLast := func(s string, i int, accepted string) int {
		for i < len(s) && strings.Contains(accepted, s[i:i+1]) {
			i++
		}
		return i - 1
	}

	for i := 0; i < len(text); i++ {
		f := text[i : i+1]
		switch {
		case strings.Contains(ignore, f):
			i = getIndexOfLast(text, i, ignore)
		case strings.Contains(ops, f):
			c <- Token{f, i, op}
		case strings.Contains(numeric, f):
			last := getIndexOfLast(text, i, numeric)
			t := text[i : last+1]
			if strings.Contains(t, ".") {
				c <- Token{t, i, float}
			} else {
				c <- Token{t, i, integer}
			}
			i = last
		case strings.Contains(alpha, f):
			last := getIndexOfLast(text, i, alphanumeric)
			c <- Token{text[i : last+1], i, id}
			i = last
		default:
			c <- Token{text[i : i+1], i, err}
		}
	}
	c <- Token{"", len(text), EOF}
}
