// Upsert_user 太过频繁会出内存异常，这里加上一层本地缓存，并且实施单读单存
package mongodb

import (
	"context"
	"historyOfWikipedia/internal/model"
	"time"

	log "github.com/sirupsen/logrus"
	"go.mongodb.org/mongo-driver/bson"
	"go.mongodb.org/mongo-driver/mongo"
	"go.mongodb.org/mongo-driver/mongo/options"
)

type MongoFileHistoryRefDataBase struct {
	ctx                 context.Context
	client              *mongo.Client
	collection_ref      *mongo.Collection
	collection_schedule *mongo.Collection
}

// type LoadRevisionPageSize struct {

func NewMongoFileHistoryRefDataBase(MongoUri string, version string) *MongoFileHistoryRefDataBase {
	client, err := mongo.NewClient(options.Client().ApplyURI(MongoUri))
	if err != nil {
		log.Fatal(err)
	}
	ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
	defer cancel()
	client.Connect(ctx)
	err = client.Ping(ctx, nil)
	if err != nil {
		log.Fatal(err)
	}
	database := client.Database("wikipedia_ref")
	cctx := context.Background()

	mongodb := MongoFileHistoryRefDataBase{
		ctx:                 cctx,
		client:              client,
		collection_ref:      database.Collection("history_file_ref_" + version),
		collection_schedule: database.Collection("history_file_schedule_" + version),
	}
	mongodb.initIndex()
	return &mongodb
}

func (c *MongoFileHistoryRefDataBase) initIndex() {
	mods := []mongo.IndexModel{
		{Keys: bson.M{"hash": 1}},
		{Keys: bson.M{"revIDSet": 1}},
		{Keys: bson.M{"ref.doi": 1}},
		{Keys: bson.M{"ref.title": 1}},
		{Keys: bson.M{"match.magID": 1}},
		{Keys: bson.M{"match.mode": 1}},
	}
	_, err := c.collection_ref.Indexes().CreateMany(c.ctx, mods)
	if err != nil {
		log.Warn("collection_users index err:", err)
	}
}

func (c *MongoFileHistoryRefDataBase) Insert_schedule(ID string) error {
	_, err := c.collection_schedule.InsertOne(c.ctx, bson.M{"_id": ID})
	if err != nil && mongo.IsDuplicateKeyError(err) {
		// update doc upsert
		// c.collection_ref.UpdateByID(c.ctx, doc.ID, bson.M{"$addToSet": bson.M{"revIDSet": doc.RevIDSet[0]}})
		return nil
	}
	return err
}

func (c *MongoFileHistoryRefDataBase) Check_schedule(ID string) bool {
	err := c.collection_schedule.FindOne(c.ctx, bson.M{"_id": ID}).Decode(&bson.M{})
	if err != nil && err == mongo.ErrNoDocuments {
		return false
	}
	return true
}

func (c *MongoFileHistoryRefDataBase) Insert_ref(doc *model.WikipediaFileHistoryRefObj) error {
	_, err := c.collection_ref.InsertOne(c.ctx, doc)
	if err != nil && mongo.IsDuplicateKeyError(err) {
		// update doc upsert
		// c.collection_ref.UpdateByID(c.ctx, doc.ID, bson.M{"$addToSet": bson.M{"revIDSet": doc.RevIDSet[0]}})
		return nil
	}
	return err
}

func (c *MongoFileHistoryRefDataBase) BulkWrite_ref(doc []mongo.WriteModel) error {
	opt := options.BulkWrite().SetOrdered(false)
	_, err := c.collection_ref.BulkWrite(c.ctx, doc, opt)
	if err != nil && mongo.IsDuplicateKeyError(err) {
		return nil
	}
	return err
}

// 在 pages_articles 更新后， 对比 schedule_revision_info 和 pages_articles，找出过时了的 schedule_revision_info
func (c *MongoFileHistoryRefDataBase) Get_unlink() chan model.WikipediaFileHistoryRefObj {
	// 忽略重定向的 page
	cur, err := c.collection_ref.Find(c.ctx, bson.M{"match.mode": 0})
	failOnError(err, "查询失败了")

	outchan := make(chan model.WikipediaFileHistoryRefObj, 10000000)
	go func() {
		ct := 0
		for cur.Next(c.ctx) {
			ct += 1
			var doc model.WikipediaFileHistoryRefObj
			err := cur.Decode(&doc)
			failOnError(err, "decode 失败了")
			outchan <- doc
			if ct%100000 == 0 {
				log.Info("current count:", ct)
			}
		}
		close(outchan)
	}()

	return outchan
}

func (c *MongoFileHistoryRefDataBase) Updata_match(ID string, data model.RefMatch) {

	_, err := c.collection_ref.UpdateOne(c.ctx, bson.M{"_id": ID}, bson.M{"$set": bson.M{"match": data}})
	if err != nil {
		log.Warn("wait 1s,updateone err:", err)
		<-time.After(time.Second)
	}
}

func (c *MongoFileHistoryRefDataBase) Close() {
	c.client.Disconnect(c.ctx)
}
