package util

import (
	"strings"
)

const DEFAULT_PATH_SEPARATOR = "/"
const CACHE_TURNOFF_THRESHOLD = 65536
const VARIABLE_PATTERN = "\\{[^/]+?}"

var tokenizedPatternCache = make(map[string][]string, 1024)

/*func Match(pattern string, path string) bool {
	if strings.Index(path, DEFAULT_PATH_SEPARATOR) != strings.Index(pattern, DEFAULT_PATH_SEPARATOR) {
		return false
	}
	pattDirs := tokenizePattern(pattern)
	pathDirs := tokenizePath(path);
	var pattIdxStart = 0;
	var pattIdxEnd = len(pattDirs)- 1;
	var pathIdxStart = 0;
	var pathIdxEnd = len(pathDirs) - 1;
	for ;pattIdxStart <= pattIdxEnd && pathIdxStart <= pathIdxEnd; {
		pattDir := pattDirs[pattIdxStart];
		if (pattDir == "**") {
			break;
		}
		if (!matchStrings(pattDir, pathDirs[pathIdxStart])) {
			return false;
		}
		pattIdxStart++;
		pathIdxStart++;
	}
}*/
/*func matchStrings(pattern string, str string) bool {
	AntPathStringMatcher matcher = null;
	if (cachePatterns == null || cachePatterns.booleanValue()) {
		matcher = this.stringMatcherCache.get(pattern);
	}
	if (matcher == null) {
		matcher = new AntPathStringMatcher(pattern, this.caseSensitive);
		if (cachePatterns == null && this.stringMatcherCache.size() >= CACHE_TURNOFF_THRESHOLD) {
			// Try to adapt to the runtime situation that we're encountering:
			// There are obviously too many different patterns coming in here...
			// So let's turn off the cache since the patterns are unlikely to be reoccurring.
			deactivatePatternCache();
			return matcher;
		}
		if (cachePatterns == null || cachePatterns.booleanValue()) {
			this.stringMatcherCache.put(pattern, matcher);
		}
	}
}*/
func tokenizePattern(pattern string) []string {
	tokenized := tokenizedPatternCache[pattern]
	if tokenized == nil {
		tokenized = tokenizePath(pattern)
		if len(tokenizedPatternCache) >= CACHE_TURNOFF_THRESHOLD {
			deactivatePatternCache()
			return tokenized
		}
		tokenizedPatternCache[pattern] = tokenized
	}
	return tokenized
}
func tokenizePath(path string) []string {
	return strings.Split(path, "/")
}
func deactivatePatternCache() {
	tokenizedPatternCache = make(map[string][]string, 1024)
}
