package internal

import (
	"context"
	"database/sql"
	"sync"
	"time"

	_ "github.com/wangjq4214/kb_driver"
	"go.uber.org/zap"

	"gitea.buaaica.org/ASI/kingbase_exporter/internal/scraper"
	"github.com/prometheus/client_golang/prometheus"
)

// Time spent collecting data for collection.
var scrapeDurationDesc = prometheus.NewDesc(
	prometheus.BuildFQName(scraper.Namespace, scraper.Exporter, "collect_duration_seconds"),
	"Collector time duration.",
	[]string{"collector"},
	nil,
)

// The basic information required to collect the exporter.
type Metrics struct {
	// Total number of times Kingbase was scraped for metrics.
	TotalScrapes prometheus.Counter

	// Total number of times an error occurred scraping a Kingbase.
	ScrapeErrors *prometheus.CounterVec

	// Whether the last scrape of metrics from Kingbase resulted in an error (1 for error, 0 for success).
	Error prometheus.Gauge

	// Identify whether Kingbase database is running properly.
	KingbaseUp prometheus.Gauge
}

func NewMetrics() Metrics {
	subsystem := scraper.Exporter

	return Metrics{
		TotalScrapes: prometheus.NewCounter(prometheus.CounterOpts{
			Namespace: scraper.Namespace,
			Subsystem: subsystem,
			Name:      "scrapes_total",
			Help:      "Total number of times Kingbase was scraped for metrics.",
		}),
		ScrapeErrors: prometheus.NewCounterVec(prometheus.CounterOpts{
			Namespace: scraper.Namespace,
			Subsystem: subsystem,
			Name:      "scrape_errors_total",
			Help:      "Total number of times an error occurred scraping a Kingbase.",
		}, []string{"collector"}),
		Error: prometheus.NewGauge(prometheus.GaugeOpts{
			Namespace: scraper.Namespace,
			Subsystem: subsystem,
			Name:      "last_scrape_error",
			Help:      "Whether the last scrape of metrics from Kingbase resulted in an error (1 for error, 0 for success).",
		}),
		KingbaseUp: prometheus.NewGauge(prometheus.GaugeOpts{
			Namespace: scraper.Namespace,
			Name:      "up",
			Help:      "Whether the Kingbase server is up.",
		}),
	}
}

// Exporter structure, mainly implementing the collect interface of Prometheus.
type Exporter struct {
	// Uniform timeout time using context.
	ctx context.Context

	// DB connect string.
	dsn string

	// The basic information required to collect the exporter.
	metrics Metrics

	// Scrape for data collection using sql.
	dbScraper []scraper.DBScraper
}

// Create a new Exporter.
func NewExporter(ctx context.Context, dsn string, metrics Metrics, dbScraper []scraper.DBScraper) *Exporter {
	return &Exporter{
		ctx:       ctx,
		dsn:       dsn,
		metrics:   metrics,
		dbScraper: dbScraper,
	}
}

// Connecting to the database.
func (e *Exporter) connect() (*sql.DB, error) {
	db, err := sql.Open("kingbase", e.dsn)
	if err != nil {
		zap.S().With("err", err.Error()).Errorf("Error opening connection to kingbase database")
		e.metrics.Error.Set(1)
		return nil, err
	}

	// Set connection params.
	db.SetMaxOpenConns(1)
	db.SetMaxIdleConns(1)
	db.SetConnMaxLifetime(1 * time.Minute)

	// Check if the database connection is successful.
	if err := db.PingContext(e.ctx); err != nil {
		zap.S().With("err", err.Error()).Errorf("Error pinging kingbase server")
		e.metrics.KingbaseUp.Set(0)
		e.metrics.Error.Set(1)

		return nil, err
	}

	e.metrics.KingbaseUp.Set(1)
	e.metrics.Error.Set(0)
	return db, nil
}

// Run scrape to capture database performance data.
func (e *Exporter) dbScrape(ch chan<- prometheus.Metric) {
	scrapeTime := time.Now()
	db, err := e.connect()
	if err != nil {
		return
	}
	defer db.Close()

	ch <- prometheus.MustNewConstMetric(scrapeDurationDesc, prometheus.GaugeValue, time.Since(scrapeTime).Seconds(), "connection")

	// Running scrape in parallel.
	wg := sync.WaitGroup{}
	defer wg.Wait()

	for _, sc := range e.dbScraper {
		wg.Add(1)

		go func(scrape scraper.DBScraper) {
			defer wg.Done()
			label := "collect." + scrape.Name()
			scrapeTime := time.Now()
			if err := scrape.Scraper(e.ctx, db, ch, zap.L()); err != nil {
				zap.S().With("err", err.Error()).Errorf("Error from scraper which is %v", label)
			}

			ch <- prometheus.MustNewConstMetric(scrapeDurationDesc, prometheus.GaugeValue, time.Since(scrapeTime).Seconds(), label)
		}(sc)
	}
}

func (e *Exporter) Collect(ch chan<- prometheus.Metric) {
	e.metrics.TotalScrapes.Inc()
	e.dbScrape(ch)

	ch <- e.metrics.TotalScrapes
	e.metrics.ScrapeErrors.Collect(ch)
	ch <- e.metrics.Error
	ch <- e.metrics.KingbaseUp
}

func (e *Exporter) Describe(ch chan<- *prometheus.Desc) {
	ch <- e.metrics.TotalScrapes.Desc()
	e.metrics.ScrapeErrors.Describe(ch)
	ch <- e.metrics.Error.Desc()
	ch <- e.metrics.KingbaseUp.Desc()
}

// Check Exporter if fit Collector interface.
var _ prometheus.Collector = (*Exporter)(nil)
