package token

// Tokenizer is the token context of parser.
type Tokenizer struct {
	text   []rune
	cursor int
	pos    struct {
		row int
		col int
	}
}

// NewTokenizer create a new parser context.
func NewTokenizer(text string) *Tokenizer {
	return &Tokenizer{text: []rune(text)}
}

// Copy a new context from the old state.
func (c *Tokenizer) Copy() *Tokenizer {
	var ctx Tokenizer = *c
	return &ctx
}

// HasNext test if there're text remained.
func (c *Tokenizer) HasNext() bool {
	return c.cursor < len(c.text)
}

// Next advance the cursor.
func (c *Tokenizer) Next() *Tokenizer {
	if !c.HasNext() {
		return c
	}
	c.cursor++
	if c.text[c.cursor-1] == '\n' {
		c.pos.row++
		c.pos.col = 0
	} else {
		c.pos.col++
	}
	return c
}

// NextN advance next n characters.
func (c *Tokenizer) NextN(n int) (string, bool) {
	ret := make([]rune, n)
	for n > 0 && c.HasNext() {
		ret = append(ret, c.Peek())
		c.Next()
		n--
	}
	return string(ret), n > 0
}

// NextIf advance if the next character are satisfied with given function.
func (c *Tokenizer) NextIf(cond func(ch rune) bool) (string, bool) {
	var ret []rune
	for c.HasNext() {
		ch := c.Peek()
		if !cond(ch) {
			return string(ret), true
		}
		ret = append(ret, ch)
		c.Next()
	}
	return string(ret), false
}

// AssertNext advance if the next string is asserted to be the given text.
func (c *Tokenizer) AssertNext(text string) bool {
	textLen := len(text)
	if c.PeekN(textLen) != text {
		return false
	}
	c.NextN(textLen)
	return true
}

// SkipIf skip characters until the next character is not satisfied with the given condition.
func (c *Tokenizer) SkipIf(cond func(ch rune) bool) {
	for c.HasNext() {
		ch := c.Peek()
		if !cond(ch) {
			break
		}
		c.Next()
	}
}

// Peek the character under the cursor.
func (c *Tokenizer) Peek() rune {
	ret := []rune(c.PeekN(1))
	if len(ret) == 0 {
		return 0
	}
	return ret[0]
}

// PeekN returns the next n characters as string.
func (c *Tokenizer) PeekN(n int) string {
	start := c.cursor
	length := n
	if n < 0 {
		start = start - n
		length = -length
	}
	textLen := len(c.text)
	if start >= textLen {
		return ""
	}
	if start+length >= textLen {
		length = textLen - start
	}
	return string(c.text[start : start+length])
}

// ParseFunc is the generic function for parsing.
type ParseFunc[E any] func(ctx *Tokenizer) (E, error)

// TryParse parse the iterator by trying given parse functions by order.
func TryParse[E any](ctx *Tokenizer, parseFuncs ...ParseFunc[E]) (expr E, err error) {
	for _, parse := range parseFuncs {
		copyCtx := ctx.Copy()
		expr, err = parse(copyCtx)
		if err == nil {
			*ctx = *copyCtx
			break
		}
	}
	return
}
