package collector

import (
	"context"
	"github.com/go-kit/kit/log"
	"github.com/go-redis/redis/v8"
	"github.com/prometheus/client_golang/prometheus"
)

// 错误指标
// rejected_connections 拒绝连接数（max_client限制）
// keyspace_misses key未命中次数
// master_link_down_since_seconds 主从断开的持续时间（秒）
const (
	// Subsystem.
	err = "err"
	//
	rejected                   = "rejected_connections"
	keySpaceMisses             = "keyspace_misses"
	masterLinkDownSinceSeconds = "master_link_down_since_seconds"
)

// Metric descriptors.
var (
	rejectedConnectionsDesc = prometheus.NewDesc(
		prometheus.BuildFQName(namespace, err, rejected),
		"由于maxclients限制而拒绝的连接数",
		[]string{}, nil,
	)
	keySpaceMissesDesc = prometheus.NewDesc(
		prometheus.BuildFQName(namespace, err, keySpaceMisses),
		"在主字典中查找key失败的次数",
		[]string{}, nil,
	)
	masterLinkDownSinceSecondsDesc = prometheus.NewDesc(
		prometheus.BuildFQName(namespace, err, masterLinkDownSinceSeconds),
		"主从节点连接断开以来，经过的秒数",
		[]string{}, nil,
	)
)

// ScrapeErr collects from performance.
type ScrapeErr struct{}

// Name of the Scraper. Should be unique.
func (ScrapeErr) Name() string {
	return err
}

// Help describes the role of the Scraper.
func (ScrapeErr) Help() string {
	return "Collect the current size of all registered binlog files"
}

// Version of Redis from which scraper is available.
func (ScrapeErr) Version() float64 {
	return 4.0
}

// Scrape collects data from redis connection and sends it over channel as prometheus metric.
func (ScrapeErr) Scrape(ctx context.Context, client *redis.Client, ch chan<- prometheus.Metric, logger log.Logger) error {

	data, err := infos(client, "replication", "stats")
	if err != nil {
		return err
	}

	rej := valueOf(data, rejected)
	misses := valueOf(data, keySpaceMisses)
	ld := valueOf(data, masterLinkDownSinceSeconds)

	ch <- prometheus.MustNewConstMetric(
		rejectedConnectionsDesc, prometheus.GaugeValue, rej,
	)

	ch <- prometheus.MustNewConstMetric(
		keySpaceMissesDesc, prometheus.GaugeValue, misses,
	)

	ch <- prometheus.MustNewConstMetric(
		masterLinkDownSinceSecondsDesc, prometheus.GaugeValue, ld,
	)

	return nil
}

// check interface
var _ Scraper = ScrapeErr{}
