package exploits

/*
	exploits.go

	This file is used to define routing rules that blocks common exploits.

*/

import (
	_ "embed"
	"net/http"
	"regexp"

	agents "github.com/monperrus/crawler-user-agents"
)

type Detector struct {
}

func NewExploitDetector() *Detector {
	return &Detector{}
}

// RequestContainCommonExploits checks if the request contains common exploits
// such as SQL injection, file injection, and other common attack patterns.
func (d *Detector) RequestContainCommonExploits(r *http.Request) bool {
	query := r.URL.RawQuery
	userAgent := r.UserAgent()

	// Block SQL injections
	sqlInjectionPatterns := []string{
		`union.*select.*\(`,
		`union.*all.*select.*`,
		`concat.*\(`,
	}
	for _, pattern := range sqlInjectionPatterns {
		if match, _ := regexp.MatchString(pattern, query); match {
			return true
		}
	}

	// Block file injections
	fileInjectionPatterns := []string{
		`[a-zA-Z0-9_]=http://`,
		`[a-zA-Z0-9_]=(\.\.//?)+`,
		`[a-zA-Z0-9_]=/([a-z0-9_.]//?)+`,
	}
	for _, pattern := range fileInjectionPatterns {
		if match, _ := regexp.MatchString(pattern, query); match {
			return true
		}
	}

	// Block common exploits
	commonExploitPatterns := []string{
		`(<|%3C).*script.*(>|%3E)`,
		`GLOBALS(=|\[|\%[0-9A-Z]{0,2})`,
		`_REQUEST(=|\[|\%[0-9A-Z]{0,2})`,
		`proc/self/environ`,
		`mosConfig_[a-zA-Z_]{1,21}(=|\%3D)`,
		`base64_(en|de)code\(.*\)`,
	}
	for _, pattern := range commonExploitPatterns {
		if match, _ := regexp.MatchString(pattern, query); match {
			return true
		}
	}

	// Block spam
	spamPatterns := []string{
		`\b(ultram|unicauca|valium|viagra|vicodin|xanax|ypxaieo)\b`,
		`\b(erections|hoodia|huronriveracres|impotence|levitra|libido)\b`,
		`\b(ambien|blue\spill|cialis|cocaine|ejaculation|erectile)\b`,
		`\b(lipitor|phentermin|pro[sz]ac|sandyauer|tramadol|troyhamby)\b`,
	}
	for _, pattern := range spamPatterns {
		if match, _ := regexp.MatchString(pattern, query); match {
			return true
		}
	}

	// Block user agents
	userAgentPatterns := []string{
		`Indy Library`,
		`libwww-perl`,
		`GetRight`,
		`GetWeb!`,
		`Go!Zilla`,
		`Download Demon`,
		`Go-Ahead-Got-It`,
		`TurnitinBot`,
		`GrabNet`,
	}
	for _, pattern := range userAgentPatterns {
		if match, _ := regexp.MatchString(pattern, userAgent); match {
			return true
		}
	}

	return false
}

// RequestIsMadeByBots checks if the request is made by bots or crawlers
func (d *Detector) RequestIsMadeByBots(r *http.Request) bool {
	userAgent := r.UserAgent()
	return agents.IsCrawler(userAgent)
}
