package dao

import (
	"context"
	"fmt"
	"github.com/nsqio/go-nsq"
	"go.uber.org/zap"
	"gorm.io/gorm"
	"gsms/internal/conf"
	"gsms/pkg/mcontext"
	"gsms/pkg/orm"
	"net/http"
	"time"
)

type Dao struct {
	c         *conf.Config
	DB        *gorm.DB
	client    *http.Client
	producer  *nsq.Producer
	consumers []*nsq.Consumer
}

type nsqLogger struct{}

// Output impl
func (n *nsqLogger) Output(_ int, s string) (err error) {
	ctx := context.Background()
	log := mcontext.Logger(ctx)
	log.Info(s)
	return
}


// New init client
func New(c *conf.Config) (dao *Dao) {
	producer, err := nsq.NewProducer(c.NSQAddr.NSQAddress, c.NSQ)
	log := mcontext.Logger(context.Background())
	if err != nil {
		log.Fatal(err.Error())
		return
	}
	producer.SetLogger(&nsqLogger{}, nsq.LogLevelDebug)
	dao = &Dao{
		c:  c,
		DB: orm.NewPostgreSQL(c.ORM),
		client: &http.Client{
			Timeout: 2 * time.Second,
		},
		producer: producer,

	}
	return
}


// RunWorker 增加一个worker
func (d *Dao) StartWorker(topic string, handler nsq.Handler) {
	ctx := context.Background()
	log := mcontext.Logger(ctx)
	consumer, err := nsq.NewConsumer(topic, "channel", d.c.NSQ)
	if err != nil {
		log.Fatal(err.Error())
		return
	}
	consumer.SetLogger(&nsqLogger{}, nsq.LogLevelError)
	consumer.AddConcurrentHandlers(handler, 5)
	err = consumer.ConnectToNSQLookupd(d.c.NSQAddr.LookupAddress)
	if err != nil {
		log.Fatal(err.Error())
		return
	}
	// saving for close
	d.consumers = append(d.consumers, consumer)
}

func (d *Dao) Close(ctx context.Context) {
	l := mcontext.Logger(ctx)
	if d.DB != nil {
		sqlDB, err := d.DB.DB()
		if err != nil {
			l.Fatal("could not get sqlDB", zap.Error(err))
		}
		if err:= sqlDB.Close(); err!=nil {
			l.Fatal(fmt.Sprintf("db connections errors %s", err))
		}
		l.Info("db connections were closed")
	}
	if len(d.consumers) != 0 {
		for _, c := range d.consumers {
			c.Stop()
		}
		l.Info("nsq consumers were stopped")
	}

}