package boolexpr

import (
	"fmt"
	"regexp"
	"strings"
	"unicode"
)

// ---- Tokenizer ----

type TokenType int

const (
	TokenLiteral TokenType = iota
	TokenAnd
	TokenOr
	TokenNot
	TokenLParen
	TokenRParen
	TokenEOF
)

type Token struct {
	Type  TokenType
	Value string
}

func tokenize(input string) ([]Token, error) {
	var tokens []Token
	i := 0
	for i < len(input) {
		switch {
		case unicode.IsSpace(rune(input[i])):
			i++
		case strings.HasPrefix(strings.ToLower(input[i:]), "and"):
			tokens = append(tokens, Token{Type: TokenAnd, Value: "AND"})
			i += 3
		case strings.HasPrefix(strings.ToLower(input[i:]), "or"):
			tokens = append(tokens, Token{Type: TokenOr, Value: "OR"})
			i += 2
		case strings.HasPrefix(strings.ToLower(input[i:]), "not"):
			tokens = append(tokens, Token{Type: TokenNot, Value: "NOT"})
			i += 3
		case input[i] == '(':
			tokens = append(tokens, Token{Type: TokenLParen, Value: "("})
			i++
		case input[i] == ')':
			tokens = append(tokens, Token{Type: TokenRParen, Value: ")"})
			i++
		default:
			start := i
			for i < len(input) && !unicode.IsSpace(rune(input[i])) && input[i] != '(' && input[i] != ')' {
				i++
			}
			tokens = append(tokens, Token{Type: TokenLiteral, Value: input[start:i]})
		}
	}
	tokens = append(tokens, Token{Type: TokenEOF})
	return tokens, nil
}

// ---- AST & Eval ----

type Expr interface {
	Eval(input string) bool
}

type Literal struct {
	Value string
}

func (l *Literal) Eval(input string) bool {
	return strings.Contains(input, l.Value)
}

type Not struct {
	Expr Expr
}

func (n *Not) Eval(input string) bool {
	return !n.Expr.Eval(input)
}

type And struct {
	Left, Right Expr
}

func (a *And) Eval(input string) bool {
	return a.Left.Eval(input) && a.Right.Eval(input)
}

type Or struct {
	Left, Right Expr
}

func (o *Or) Eval(input string) bool {
	return o.Left.Eval(input) || o.Right.Eval(input)
}

// ---- Parser ----

type Parser struct {
	tokens []Token
	pos    int
}

func (p *Parser) peek() Token {
	return p.tokens[p.pos]
}

func (p *Parser) next() Token {
	tok := p.tokens[p.pos]
	p.pos++
	return tok
}

func (p *Parser) parse() Expr {
	return p.parseOr()
}

func (p *Parser) parseOr() Expr {
	left := p.parseAnd()
	for p.peek().Type == TokenOr {
		p.next()
		right := p.parseAnd()
		left = &Or{Left: left, Right: right}
	}
	return left
}

func (p *Parser) parseAnd() Expr {
	left := p.parseUnary()
	for p.peek().Type == TokenAnd || p.peek().Type == TokenLiteral || p.peek().Type == TokenLParen || p.peek().Type == TokenNot {
		// Implicit AND allowed
		if p.peek().Type == TokenAnd {
			p.next()
		}
		right := p.parseUnary()
		left = &And{Left: left, Right: right}
	}
	return left
}

func (p *Parser) parseUnary() Expr {
	if p.peek().Type == TokenNot {
		p.next()
		return &Not{Expr: p.parseUnary()}
	}
	return p.parsePrimary()
}

func (p *Parser) parsePrimary() Expr {
	switch tok := p.next(); tok.Type {
	case TokenLiteral:
		return &Literal{Value: tok.Value}
	case TokenLParen:
		expr := p.parse()
		if p.next().Type != TokenRParen {
			panic("missing closing parenthesis")
		}
		return expr
	default:
		panic(fmt.Sprintf("unexpected token: %+v", tok))
	}
}

func CompileMatcher(expr string) func(string) bool {
	upperExpr := strings.ToUpper(expr)
	// 简单启发式判断是否为布尔表达式
	isBoolExpr := strings.Contains(upperExpr, "AND ") ||
		strings.Contains(upperExpr, "OR ") ||
		strings.Contains(upperExpr, "NOT ")

	if isBoolExpr {
		tokens, err := tokenize(expr)
		if err != nil {
			panic("invalid boolean expression")
		}
		parser := &Parser{tokens: tokens}
		ast := parser.parse()
		return func(s string) bool {
			return ast.Eval(s)
		}
	} else {
		// 作为正则处理
		re, err := regexp.Compile(expr)
		if err != nil {
			panic("invalid regex: " + err.Error())
		}
		return func(s string) bool {
			return re.MatchString(s)
		}
	}
}
