package main

import (
	"fmt"
	"log"
	"net/http"
	"net/url"
	"os"
	"strings"
	"time"

	"github.com/Hackmanit/Web-Cache-Vulnerability-Scanner/pkg"
)

const version = "2.0.0"

var (
	currentDate      string
	filePath         string
	report           pkg.Report
	completedFile    *os.File
	proxyURL         *url.URL
	headerList       []string
	parameterList    []string
	noTestPreference bool
	excluded         map[string]bool
	added            map[string]bool
)

func main() {
	//pkg.ReadConfigFile()
	pkg.ParseFlags(version)
	/*****************************/

	/**** SET EXPORT STRUCT ****/
	report.Name = "Web_Cache_Vulnerability_Scanner"
	report.Version = version

	report.Config = &pkg.Config

	currentDate = time.Now().Format("2006-01-02_15-04-05")
	report.Date = currentDate
	report.Duration = "Not finished yet"
	/***************************/
	//Create generatePath directory
	if pkg.Config.GeneratePath != "./" {
		if !strings.HasSuffix(pkg.Config.GeneratePath, "/") {
			pkg.Config.GeneratePath += "/"
		}
		if _, err := os.Stat(pkg.Config.GeneratePath); err != nil {
			if os.IsNotExist(err) {
				err := os.Mkdir(pkg.Config.GeneratePath, 0755)
				if err != nil {
					msg := fmt.Sprintf("Error while creating Directory: %s\n", err.Error())
					pkg.PrintFatal(msg)
				}
			}
		}
	}

	filePath = fmt.Sprintf("%sWCVS_%s_%s", pkg.Config.GeneratePath, currentDate, pkg.RandomString(8))

	/* Setting Logoutput to Log file */
	if pkg.Config.GenerateLog {
		f, err := os.OpenFile(filePath+"_Log.txt", os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644)
		if err != nil {
			msg := fmt.Sprintf("Error while creating/opening Log File: %s\n", err.Error())
			pkg.PrintFatal(msg)
		}
		defer f.Close()
		log.SetOutput(f)
	}

	report.Config.Intitialized = true

	report.Command = fmt.Sprint(os.Args)
	report.Command = strings.TrimPrefix(report.Command, "[")
	report.Command = strings.TrimSuffix(report.Command, "]")
	pkg.PrintVerbose(report.Command+"\n\n", pkg.Cyan, 2)
	/******************************************/
	if pkg.Config.Verbosity < 0 || pkg.Config.Verbosity > 2 {
		msg := fmt.Sprintf("%d is not a valid verbosity between 0 and 2!\n", pkg.Config.Verbosity)
		pkg.PrintFatal(msg)
	}

	/* print copyright etc */
	pkg.PrintVerbose("Published by Hackmanit under http://www.apache.org/licenses/LICENSE-2.0\n", pkg.NoColor, 1)
	pkg.PrintVerbose("Author: Maximilian Hildebrand\n", pkg.NoColor, 1)
	pkg.PrintVerbose("Repository: https://github.com/Hackmanit/Web-Cache-Vulnerability-Scanner\n\n", pkg.NoColor, 1)

	// Print starting time
	msg := fmt.Sprintf("WCVS v%s started at %s\n", version, currentDate)
	pkg.PrintVerbose(msg, pkg.NoColor, 1)

	start := time.Now()

	noTestPreference = true
	if pkg.Config.OnlyTest != "" && pkg.Config.SkipTest != "" {
		msg = "You can't set both doTest and dontTest\n"
		pkg.PrintFatal(msg)
	} else if pkg.Config.OnlyTest != "" {
		noTestPreference = false
	} else if pkg.Config.SkipTest != "" {
		noTestPreference = false
	}

	if pkg.Config.GenerateReport {
		pkg.GenerateReport(report, filePath)
	}
	if pkg.Config.GenerateCompleted {
		completedFile = createCompletedURLs()
	}
	pkg.InitClient()
	/***************************/

	// Reading header wordlist, only if it is needed
	if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "header") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "header")) {
		if pkg.Config.HeaderWordlist != "" {
			headerList = pkg.ReadLocalFile(pkg.Config.HeaderWordlist, "header")
		} else {
			headerList = pkg.DefaultHeaders
		}
	}

	// Reading parameter wordlist, only if it is needed
	if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "parameter") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "parameter")) {
		if pkg.Config.ParameterWordlist != "" {
			parameterList = pkg.ReadLocalFile(pkg.Config.ParameterWordlist, "parameter")
		} else {
			parameterList = pkg.DefaultParameters
		}

	}

	/*******************************************************/

	excluded = make(map[string]bool)

	for _, u := range pkg.Config.RecExclude {
		u = strings.TrimSuffix(u, "\r")
		u = strings.TrimSpace(u)

		// check if empty or is a comment
		if u == "" || strings.HasPrefix(u, "//") {
			continue
		}
		if !strings.HasPrefix(u, "http://") && !strings.HasPrefix(u, "https://") {
			msg = fmt.Sprintf("URL %s doesn't begin with http:// or https:// and gets skipped\n", u)
			pkg.Print(msg, pkg.Yellow)
			continue
		}

		excluded[u] = true
	}

	added = make(map[string]bool)

	var testUrls []string
	for _, u := range pkg.Config.Urls {
		u = strings.TrimSuffix(u, "\r")
		u = strings.TrimSpace(u)

		// check if empty or is a comment
		if u == "" || strings.HasPrefix(u, "//") {
			continue
		}
		if !strings.HasPrefix(u, "http://") && !strings.HasPrefix(u, "https://") {
			prefix := "https"
			if pkg.Config.UseHTTP {
				prefix = "http"
			}
			msg = fmt.Sprintf("URL %s gets the prefix %s\n", u, prefix)
			pkg.PrintVerbose(msg, pkg.Yellow, 2)

			u = prefix + "://" + u
		}

		added[u] = true
		testUrls = append(testUrls, u)
	}

	for i, u := range testUrls {
		var recUrls []string
		var progress string

		progress = fmt.Sprintf("(%d/%d)", i+1, len(testUrls))
		runTests(0, u, progress, &recUrls, "sub_")

		for rec := 1; rec <= pkg.Config.Recursivity; rec++ {
			var urlsToAdd []string

			for ii, uu := range recUrls {
				if ii != 0 && ii == pkg.Config.RecLimit {
					msg = "RecLimit was reached. The next URL - if available - will be tested\n"
					pkg.Print(msg, pkg.NoColor)
					break
				}
				progress = fmt.Sprintf("(%d/%d):(%d/%d)", i+1, len(testUrls), ii+1, len(recUrls))
				runTests(rec, uu, progress, &urlsToAdd, "crawl_")
			}

			recUrls = urlsToAdd
		}
	}

	/* Scan finished */
	msg = "Successfully finished the scan\n"
	pkg.PrintVerbose(msg, pkg.NoColor, 1)

	duration := time.Since(start)
	msg = fmt.Sprintf("Duration: %s\n\n", duration)
	pkg.PrintVerbose(msg, pkg.NoColor, 1)
	/****************/

	if pkg.Config.GenerateReport {
		report.Duration = duration.String()
		pkg.GenerateReport(report, filePath)
	}
}

func runTests(rec int, u string, progress string, foundUrls *[]string, stat string) {
	var repWebsite pkg.ReportWebsite
	var err error

	msg := fmt.Sprintf("\nTesting website%s: %s\n", progress, u)
	pkg.Print(msg, pkg.NoColor)
	pkg.Print("===============================================================\n", pkg.NoColor)

	if !strings.HasPrefix(u, "http://") && !strings.HasPrefix(u, "https://") {
		if pkg.Config.UseHTTP {
			u = "http://" + u
		} else {
			u = "https://" + u
		}
	}
	repWebsite.URL = u

	/* Setting up client: cookies and noredirect */
	msg = "Setting up client\n"
	pkg.PrintVerbose(msg, pkg.NoColor, 2)

	// Setting cookies, specified by setcookies
	pkg.Config.Website.Cookies = map[string]string{}
	for _, c := range pkg.Config.Cookies {
		c = strings.TrimSuffix(c, "\r")
		c = strings.TrimSpace(c)
		if c == "" {
			continue
		} else if !strings.Contains(c, "=") {
			msg = "Specified cookie %s doesn't contain a = and will be skipped\n"
			pkg.PrintVerbose(msg, pkg.NoColor, 2)
			continue
		} else {
			cSlice := strings.SplitAfterN(c, "=", 2)
			cSlice[0] = strings.TrimSuffix(cSlice[0], "=")

			pkg.Config.Website.Cookies[cSlice[0]] = cSlice[1]
		}
	}

	timeOutDuration := time.Duration(time.Duration(pkg.Config.TimeOut) * time.Second)
	clientNoRedir := &http.Client{
		CheckRedirect: func(redirRequest *http.Request, via []*http.Request) error {
			/* Commented out, because it unnecessary bloats up logs, especially for 301/302 links
			msg := fmt.Sprintf("Redirect Request denied: %s\n", redirRequest.Header)
			pkg.PrintVerbose(msg, pkg.Yellow, 2)
			*/
			return http.ErrUseLastResponse
		},
		Timeout: timeOutDuration,
	}

	http.DefaultClient = clientNoRedir

	// retrieve cookies, headers etc. Only setStatusCode if no cookies shall be accepted. Otherwise the next request with set Cookies sets the status code
	if !pkg.Config.DeclineCookies {
		pkg.Config.Website, err = pkg.GetWebsite(u, false, false)
	} else {
		pkg.Config.Website, err = pkg.GetWebsite(u, true, false)
	}
	if err != nil {
		repWebsite.HasError = true
		repWebsite.ErrorMessages = append(repWebsite.ErrorMessages, err.Error())
		report.Websites = append(report.Websites, repWebsite)

		msg := fmt.Sprintf("Couldn't test url: %s\n", err.Error())
		pkg.Print(msg, pkg.Red)
		return
	}

	if strings.HasPrefix(pkg.Config.Website.Body, "<html><head><title>Burp Suite") {
		msg := fmt.Sprintf("Couldn't connect to given url: \n%s\n", pkg.Config.Website.Body)
		pkg.Print(msg, pkg.Red)
		return
	}

	if !pkg.Config.DeclineCookies {
		// retrieve response with all cookies set
		pkg.Config.Website, err = pkg.GetWebsite(u, true, false)
		if err != nil {
			repWebsite.HasError = true
			repWebsite.ErrorMessages = append(repWebsite.ErrorMessages, err.Error())
			report.Websites = append(report.Websites, repWebsite)

			msg := fmt.Sprintf("Couldn't test url: %s\n", err.Error())
			pkg.Print(msg, pkg.Red)
			return
		}
	}

	// check if there's a cache and the cachebuster works
	var errSlice []error
	var alwaysMiss bool
	pkg.Config.Website.Cache, alwaysMiss, errSlice = pkg.CheckCache(parameterList, headerList)
	for _, err := range errSlice {
		if err != nil {
			repWebsite.HasError = true
			repWebsite.ErrorMessages = append(repWebsite.ErrorMessages, err.Error())
		}
	}
	/* dont return when there's an error. Because the crawler shall be run anyways. Also there might was a cachebuster found and an error for an other cachebuster which doesnt matter
	if len(errSlice) > 0 && !pkg.Config.Force {
		return
	}
	*/

	// retrieve response with cachebuster if cachebuster was found
	if pkg.Config.Website.Cache.CBwasFound {
		pkg.Config.Website, err = pkg.GetWebsite(u, true, true)
		if err != nil {
			repWebsite.HasError = true
			repWebsite.ErrorMessages = append(repWebsite.ErrorMessages, err.Error())
			report.Websites = append(report.Websites, repWebsite)

			msg := fmt.Sprintf("Couldn't test url: %s\n", err.Error())
			pkg.Print(msg, pkg.Red)
			return
		}
	}

	if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "decep") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "decep")) {
		msg = addSeparator("Web Cache Deception")
		pkg.PrintVerbose(msg, pkg.NoColor, 1)

		if alwaysMiss || pkg.Config.Website.Cache.Indicator == "" {
			repWebsite.Results = append(repWebsite.Results, pkg.TestWebCacheDeception())
		} else {
			msg = "The response already gets cached!"
			pkg.Print(msg+"\n", pkg.Yellow)
		}
	} else {
		msg = addSeparator("Skipping Web Cache Deception")
		pkg.PrintVerbose(msg, pkg.NoColor, 1)
	}

	/*******************************************/
	if pkg.Config.Website.Cache.CBwasFound || pkg.Config.Force {
		repWebsite.CacheIndicator = pkg.Config.Website.Cache.Indicator
		repWebsite.CBName = pkg.Config.Website.Cache.CBName
		repWebsite.CBwasFound = pkg.Config.Website.Cache.CBwasFound

		if !pkg.Config.Website.Cache.CBwasFound && pkg.Config.Force {
			fmt.Println()
			pkg.Print("No Cachebuster was found. Forcing the parameter "+pkg.Config.CacheBuster+" as Cachebuster.\n", pkg.Cyan)
			pkg.Config.Website.Cache.CBwasFound = true
			pkg.Config.Website.Cache.CBisParameter = true
			pkg.Config.Website.Cache.CBName = pkg.Config.CacheBuster
		}

		/* Testing for cookie poisoning */
		if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "cookie") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "cookie")) {
			msg = addSeparator("Cookie Poisoning")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)

			repWebsite.Results = append(repWebsite.Results, pkg.ScanCookies())

			if len(pkg.Config.Website.Cookies) == 0 {
				msg = "There were no cookies to test!"
				pkg.Print(msg+"\n", pkg.Yellow)

				repWebsite.Results[len(repWebsite.Results)-1].ErrorMessages = append(repWebsite.Results[len(repWebsite.Results)-1].ErrorMessages, msg)
				repWebsite.Results[len(repWebsite.Results)-1].HasError = true
			}
		} else {
			msg = addSeparator("Skipping Cookie Poisoning")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)
		}
		/*****************************/

		/* Testing for css poisoning */
		if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "css") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "css")) {
			msg = addSeparator("CSS Poisoning")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)

			repWebsite.Results = append(repWebsite.Results, pkg.ScanCSS())
		} else {
			msg = addSeparator("Skipping CSS Poisoning")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)
		}
		/*****************************/

		/* Testing for multiple forwarding headers for poisoning */
		if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "forward") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "forward")) {
			msg = addSeparator("Multiple Forwarding Headers Poisoning")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)

			repWebsite.Results = append(repWebsite.Results, pkg.ScanForwardingHeaders())
		} else {
			msg = addSeparator("Skipping Multiple Forwarding Headers Poisoning")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)
		}
		/*************************************************************/

		/* Testing for HTTP request smuggling poisoning */
		if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "smuggl") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "smuggl")) {
			msg = addSeparator("HTTP Request Smuggling Poisoning")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)

			repWebsite.Results = append(repWebsite.Results, pkg.ScanHTTPRequestSmuggling(proxyURL))
		} else {
			msg = addSeparator("Skipping HTTP Request Smuggling Poisoning")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)
		}
		/*************************************************************/

		/* Testing for multiple Cache Poisoned Denial Of Service Techniques */
		if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "dos") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "dos")) {
			msg = addSeparator("Cache Poisoned Denial Of Service")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)

			repWebsite.Results = append(repWebsite.Results, pkg.DOS())
		} else {
			msg = addSeparator("Skipping Cache Poisoned Denial Of Service")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)
		}
		/***********************************************************/

		/* Testing for header poisoning */
		if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "header") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "header")) {
			msg = addSeparator("Header Poisoning")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)

			repWebsite.Results = append(repWebsite.Results, pkg.ScanHeaders(headerList))
		} else {
			msg = addSeparator("Skipping Header Poisoning")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)
		}
		/********************************/

		/* Testing for query parameter poisoning */
		if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "parameter") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "parameter")) {
			msg = addSeparator("Query Parameter Poisoning")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)
			repWebsite.Results = append(repWebsite.Results, pkg.ScanParameters(parameterList))
		} else {
			msg = addSeparator("Skipping Query Parameter Poisoning")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)
		}
		/*****************************************/

		/* Testing for Fat GET */
		if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "fat") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "fat")) {
			msg = addSeparator("Fat GET Poisoning")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)

			if pkg.Config.DoPost {
				msg = "Can't check for Fat GET Poisoning, because POST was specified\n"
				pkg.PrintVerbose(msg, pkg.Yellow, 1)
			} else {
				repWebsite.Results = append(repWebsite.Results, pkg.ScanFatGET())
			}
		} else {
			msg = addSeparator("Skipping Fat GET Poisoning")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)
		}
		/**********************/

		/* Testing for Parameter Cloaking */
		if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "cloaking") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "cloaking")) {
			msg = addSeparator("Parameter Cloaking Poisoning")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)

			if pkg.Config.DoPost {
				msg = "Can't check for Parameter Cloaking Poisoning, because POST was specified\n"
				pkg.PrintVerbose(msg, pkg.Yellow, 1)
			} else {
				repWebsite.Results = append(repWebsite.Results, pkg.ScanParameterCloaking())
			}
		} else {
			msg = addSeparator("Skipping Parameter Cloaking Poisoning")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)
		}
		/**********************************/

		/* Testing for Parameter Pollution */
		if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "pollution") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "pollution")) {
			msg = addSeparator("Parameter Pollution Poisoning")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)
			repWebsite.Results = append(repWebsite.Results, pkg.ScanParameterPollution())
		} else {
			msg = addSeparator("Skipping Parameter Pollution Poisoning")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)
		}
		/**********************************/

		/* Testing for Parameter Encoding */
		if noTestPreference || strings.Contains(pkg.Config.OnlyTest, "encoding") || (pkg.Config.SkipTest != "" && !strings.Contains(pkg.Config.SkipTest, "encoding")) {
			msg = addSeparator("Parameter Encoding Poisoning")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)
			repWebsite.Results = append(repWebsite.Results, pkg.ScanParameterEncoding())
		} else {
			msg = addSeparator("Skipping Parameter Encoding Poisoning")
			pkg.PrintVerbose(msg, pkg.NoColor, 1)
		}
		/**********************************/
	}

	/* Check for linked files */
	if pkg.Config.Recursivity > rec {
		msg = fmt.Sprintf("\nChecking recursively for urls (%d/%d)\n", rec+1, pkg.Config.Recursivity)
		pkg.Print(msg, pkg.NoColor)

		tempUrls := pkg.CrawlUrls(u, added, excluded)

		if len(tempUrls) > 0 {
			msg = fmt.Sprintf("Found %d url(s)\n", len(tempUrls))
			pkg.Print(msg, pkg.NoColor)

			msg = "Adding the following urls to the Queue:"
			pkg.PrintVerbose(msg+"\n", pkg.NoColor, 1)
			for _, u := range tempUrls {
				pkg.PrintVerbose(u+"\n", pkg.NoColor, 1)
			}

			*foundUrls = append(*foundUrls, tempUrls...)
		} else {
			msg = "No urls were found to add to the queue\n"
			pkg.Print(msg, pkg.NoColor)
		}
	}
	/**************************/

	if pkg.Config.GenerateCompleted {
		_, err = completedFile.WriteString(u + "\n")
		if err != nil {
			pkg.Print("Couldn't write to WCVS_Completed File: %s\n"+err.Error(), pkg.Red)
		}
	}

	if pkg.Config.GenerateReport {
		for _, r := range repWebsite.Results {
			if r.Vulnerable {
				repWebsite.Vulnerable = true
				break
			}
		}
		report.Websites = append(report.Websites, repWebsite)

		report.Vulnerable = report.Vulnerable || repWebsite.Vulnerable
		pkg.PrintNewLine()
		pkg.GenerateReport(report, filePath)
	}

	pkg.Print("===============================================================\n\n", pkg.NoColor)
}

func addSeparator(msg string) string {
	separator := " --------------------------------------------------------------"
	return fmt.Sprintf("\n%s\n| %s\n%s\n", separator, msg, separator)
}

func createCompletedURLs() *os.File {
	completedPath := filePath + "_Completed.txt"

	_, err := os.Stat(completedPath)

	var file *os.File
	defer file.Close()

	if !os.IsNotExist(err) {
		msg := fmt.Sprintf("The file %s will be overwritten, as it already exists\n", completedPath)
		pkg.PrintVerbose(msg, pkg.Yellow, 1)
		file, err = os.OpenFile(completedPath, os.O_WRONLY, 0666)
	} else {
		file, err = os.Create(completedPath)
	}
	if err != nil {
		msg := "Couldn't create WCVS_Completed file: " + err.Error() + "\n"
		pkg.PrintFatal(msg)
	}

	return file
}
