package template

import (
	"errors"
	"go-quick/interface/itemplate"
)

type (
	Template struct {
		set itemplate.ISet

		isTplString bool
		name        string
		tpl         string
		size        int

		tokens []*Token
		parser *Parser

		level  int
		parent *Template
		child  *Template
		//blocks         map[string]*NodeWrapper
		//exportedMacros map[string]*tagMacroNode
		//
		root *NodeDocument
		//
		//Options *Options
		Vars []any
	}
)

func newTemplateString(ts itemplate.ISet, tpl []byte) (*Template, *Error) {
	return newTemplate(ts, "<string>", true, tpl)
}

func newTemplate(ts itemplate.ISet, name string, isTplString bool, tpl []byte) (*Template, *Error) {
	strTpl := string(tpl)

	t := &Template{
		set:         ts,
		isTplString: isTplString,
		name:        name,
		tpl:         strTpl,
		size:        len(strTpl),
		//blocks:         make(map[string]*NodeWrapper),
		//exportedMacros: make(map[string]*tagMacroNode),
		//Options:        newOptions(),
	}

	// Copy all settings from another Options.
	//t.Options.Update(set.Options)

	// Tokenize it 相当于词法分析器 将模板字符串转为词法单元token序列
	tokens, err := t.lex(name, strTpl)
	if err != nil {
		return nil, err
	}
	t.tokens = tokens
	err = t.Parse()
	if err != nil {
		return nil, err
	}
	return t, nil
}

func (t *Template) lex(name, input string) ([]*Token, *Error) {
	lexer := NewLexer(name, input)
	lexer.run()
	if lexer.errored {
		errToken := lexer.tokens[len(lexer.tokens)-1]
		return nil, &Error{
			Filename:  name,
			Line:      errToken.Line,
			Column:    errToken.Col,
			Sender:    "lexer",
			OrigError: errors.New(errToken.Val),
		}
	}
	return lexer.tokens, nil

}

func (t *Template) Parse() *Error {
	t.parser = NewParser(t.name, t.tokens, t)
	doc, err := t.parser.parseDocument()
	if err != nil {
		return err
	}
	t.root = doc
	return nil
}
