package main

import (
	"context"
	"html/template"
	"net/http"
	"strconv"
	"time"

	"gitea.buaaica.org/ASI/kingbase_exporter/internal"
	"gitea.buaaica.org/ASI/kingbase_exporter/internal/scraper"
	"github.com/julienschmidt/httprouter"
	"github.com/prometheus/client_golang/prometheus"
	"github.com/prometheus/client_golang/prometheus/promhttp"
	"go.uber.org/zap"
)

// Define the html returned from other pages.
var landingPage = `<html>
<head>
  <title>Kingbase server exporter</title>
</head>
<body>
  <h1>Kingbase server exporter</h1>
  <p><a href="{{.}}">Metrics</a></p>
</body>
</html>
`

// Initialize the configuration used for router,
// using RouterOption with all the SetRouterXxxx methods
// to form a builder pattern to build the configuration.
type routerOptions struct {
	dsn     string
	metrics string
	scraper []scraper.DBScraper
	timeout int
}

// Logger builder function methods.
type RouterOption func(*routerOptions)

// Set kingbase server connect string.
func SetRouterDsn(dsn string) RouterOption {
	return func(ro *routerOptions) {
		ro.dsn = dsn
	}
}

// Set path for metrics.
func SetRouterMetrics(metrics string) RouterOption {
	return func(ro *routerOptions) {
		ro.metrics = metrics
	}
}

// Set scraper which have been filtered by configration.
func SetRouterScraper(s []scraper.DBScraper) RouterOption {
	return func(ro *routerOptions) {
		ro.scraper = s
	}
}

// Set request timeout time.
func SetRouterTimeout(timeout int) RouterOption {
	return func(ro *routerOptions) {
		ro.timeout = timeout
	}
}

func newMetricsHandle(dsn string, s []scraper.DBScraper, timeout int) http.HandlerFunc {
	return func(w http.ResponseWriter, r *http.Request) {
		// parser prometheus timeout settings
		ctx := r.Context()
		if v := r.Header.Get("X-Prometheus-Scrape-Timeout-Seconds"); v != "" {
			timeoutSeconds, err := strconv.ParseFloat(v, 64)
			if err != nil {
				zap.S().With("err", err.Error()).Infof("Failed to parse timeout from Prometheus header")
			} else {
				// Set the smaller value to the timeout time.
				if timeoutSeconds < float64(timeout) {
					zap.S().Warnf("Timeout should be lower than prometheus scrape timeout")
				} else {
					timeoutSeconds = float64(timeout)
				}

				// Avoid variable masking by not using `:=` expressions
				var cancel context.CancelFunc
				ctx, cancel = context.WithTimeout(ctx, time.Duration(timeoutSeconds*float64(time.Second)))
				defer cancel()
				r = r.WithContext(ctx)
			}
		}

		// Filter scraper by the query parameters passed by prometheus.
		collectParam := r.URL.Query()["collect[]"]
		filterScrape := s
		if len(collectParam) > 0 {
			filters := map[string]bool{}
			for _, param := range collectParam {
				filters[param] = true
			}
			filterScrape = nil
			for _, v := range s {
				if filters[v.Name()] {
					filterScrape = append(filterScrape, v)
				}
			}
		}

		// Collecting information through the collector interface
		registry := prometheus.NewRegistry()
		registry.MustRegister(internal.NewExporter(ctx, dsn, internal.NewMetrics(), filterScrape))

		gatherers := prometheus.Gatherers{
			prometheus.DefaultGatherer,
			registry,
		}
		h := promhttp.HandlerFor(gatherers, promhttp.HandlerOpts{})
		h.ServeHTTP(w, r)
	}
}

// Defines the routing of the application.
// Currently only one Prometheus endpoint
// and one default page are specified.
func NewRouter(_ context.Context, options ...RouterOption) http.Handler {
	o := routerOptions{}
	for _, v := range options {
		v(&o)
	}

	router := httprouter.New()

	router.NotFound = http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
		t := template.Must(template.New("landing").Parse(string(landingPage)))
		_ = t.Execute(w, o.metrics)
	})

	handler := promhttp.InstrumentMetricHandler(prometheus.DefaultRegisterer, newMetricsHandle(o.dsn, o.scraper, o.timeout))

	router.GET(o.metrics, func(w http.ResponseWriter, r *http.Request, _ httprouter.Params) {
		handler.ServeHTTP(w, r)
	})

	return router
}
