package main

import (
	"bytes"
	"context"
	"encoding/json"
	"fmt"
	"log"
	"mag/internal/database"
	"sync"
	"time"

	"github.com/elastic/go-elasticsearch/v8"
	"github.com/emirpasic/gods/sets/hashset"
	"go.mongodb.org/mongo-driver/bson"
	"go.mongodb.org/mongo-driver/mongo"
	"go.mongodb.org/mongo-driver/mongo/options"
)

type authorItem struct {
	ID int64 `json:"id"`
}

type recoreItem struct {
	ID      int64        `json:"id"`
	Authors []authorItem `json:"authors"`
	Ref     []int64      `json:"references"`
}

type recoreMongoItem struct {
	ID      int64   `bson:"_id"`
	Authors []int64 `bson:"author"`
	Refin   []int64 `bson:"in"`
}

type esResponse struct {
	ScroolID string `json:"_scroll_id"`
	Hits     struct {
		Hits []struct {
			Source recoreItem `json:"_source"`
		} `json:"hits"`
	} `json:"hits"`
}

// 组合后的数据队列
type paperStore struct {
	AuthorSet *hashset.Set
	Refin     []int64
}

type resultMongo struct {
	ID int64   `bson:"_id"`
	S1 int32   `bson:"s1,omitempty"`
	S2 float64 `bson:"s2,omitempty"`
}

var (
	ALL_PAPER       map[int64]*paperStore
	ALL_Mongo_PAPER map[int64]*recoreMongoItem
)

func init() {
	ALL_PAPER = make(map[int64]*paperStore)
	ALL_Mongo_PAPER = make(map[int64]*recoreMongoItem)
}
func main() {
	log.Println("start")
	// 第一步，导入到 mongo
	// esDumpToMongo()
	// 第二步，计算
	work()
}

// 从 es 过滤一遍后导出到mongo
func esDumpToMongo() {

	cfg := elasticsearch.Config{
		Addresses: []string{
			"http://192.168.50.3:9200",
		},
	}
	es, _ := elasticsearch.NewClient(cfg)
	chout := make(chan recoreItem, 100000)
	go getAllRecord(es, chout)

	// 等待完成导出
	for item := range chout {
		autList := []int64{}
		for _, author := range item.Authors {
			if author.ID != 0 {
				autList = append(autList, author.ID)
			}
		}
		if obj, ok := ALL_Mongo_PAPER[item.ID]; ok {
			obj.Authors = autList
		} else {
			ALL_Mongo_PAPER[item.ID] = &recoreMongoItem{
				ID:      item.ID,
				Authors: autList,
			}
		}
		for _, linsout := range item.Ref {
			if obj, ok := ALL_Mongo_PAPER[linsout]; ok {
				obj.Refin = append(obj.Refin, item.ID)
			} else {
				ALL_Mongo_PAPER[linsout] = &recoreMongoItem{
					ID:    linsout,
					Refin: []int64{item.ID},
				}
			}
		}
	}
	// es 导入完成
	log.Println("es 导出完成")
	client := database.InitMongo()
	collection := client.Database("mag2020").Collection("pageinfo_author")

	opts := options.BulkWrite().SetOrdered(false)
	models := []mongo.WriteModel{}
	// 过滤 导入 mongo
	i := 0
	for _, item := range ALL_Mongo_PAPER {
		if len(item.Authors) == 0 || len(item.Refin) == 0 {
			continue
		}
		i += 1
		models = append(models, mongo.NewInsertOneModel().SetDocument(item))
		if i%10000 == 0 {
			log.Println("insert to mongo")
			retTmp, err := collection.BulkWrite(context.Background(), models, opts)
			if err != nil {
				log.Println("bulk upsert fail", err)
				log.Printf("%+v", retTmp)
			}
			models = []mongo.WriteModel{}
		}
	}
	if len(models) > 0 {
		retTmp, err := collection.BulkWrite(context.Background(), models, opts)
		log.Println("insert to mongo")
		if err != nil {
			log.Println("bulk upsert fail", err)
			log.Printf("%+v", retTmp)
		}
	}

}

func work() {

	client := database.InitMongo()
	collection := client.Database("mag2020").Collection("pageinfo_author")

	ctx := context.Background()
	cur, err := collection.Find(ctx, bson.M{})
	if err != nil {
		log.Fatal(err)
	}
	defer cur.Close(ctx)

	// i := 0
	for cur.Next(ctx) {
		// i += 1
		var result recoreMongoItem
		err := cur.Decode(&result)
		if err != nil {
			log.Fatal(err)
		}
		ALL_Mongo_PAPER[result.ID] = &result
		// test
		// if i > 1000 {
		// 	break
		// }
	}

	log.Println("导出到 mongo 完成")

	// 遍历计算,这里并发计算
	// id
	idChan := make(chan int64, 10000)
	outchan := make(chan resultMongo, 100000)
	threadCount := 20
	wg := sync.WaitGroup{}
	wg.Add(threadCount)
	go resuleHandle(outchan)
	fmt.Println("开启 resule handle")
	for i := 0; i < threadCount; i++ {
		go calFull(idChan, outchan, &wg)
	}
	fmt.Println("开启 calFull,开始导入 id")

	for key := range ALL_Mongo_PAPER {
		idChan <- key
	}
	close(idChan)
	fmt.Println("id 导入完成")
	wg.Wait()
	fmt.Println("close outchan")
	close(outchan)
	// 启动任务
	<-time.After(50 * time.Second)
}

// 计算自摸度
func calFull(idChan chan int64, outchan chan resultMongo, wg *sync.WaitGroup) {
	for id := range idChan {
		obj := ALL_Mongo_PAPER[id]
		objAuthorSet := hashset.New()
		for _, item := range obj.Authors {
			objAuthorSet.Add(item)
		}

		if len(obj.Refin) == 0 || objAuthorSet == nil || objAuthorSet.Size() == 0 {
			continue
		}

		resMon := resultMongo{
			ID: id,
		}
		// 循环一边所有的 linksin:
		for _, linksin := range obj.Refin {
			// find linksin ,一定能找到
			inobj := ALL_Mongo_PAPER[linksin]
			if inobj == nil {
				continue
			}
			inobjAuthorSet := hashset.New()
			for _, item := range inobj.Authors {
				inobjAuthorSet.Add(item)
			}
			if inobjAuthorSet.Size() == 0 {
				continue
			}
			// 计算交集
			retSet := getInterset(objAuthorSet, inobjAuthorSet)
			if retSet.Size() > 0 {
				resMon.S1++
				resMon.S2 += float64(retSet.Size()) / float64(objAuthorSet.Size())
			}
		}
		if resMon.S1 != 0 {
			outchan <- resMon
		}
	}
	wg.Done()
}

// 保存结果
func resuleHandle(outchan chan resultMongo) {
	ctx := context.Background()
	client := database.InitMongo()
	collection := client.Database("mag2020").Collection("author_degree")
	opts := options.BulkWrite().SetOrdered(false)
	models := []mongo.WriteModel{}
	i := 0
	for item := range outchan {
		i += 1
		models = append(models, mongo.NewInsertOneModel().SetDocument(item))
		if i%10000 == 0 {
			log.Println("insert to mongo")
			_, err := collection.BulkWrite(ctx, models, opts)
			if err != nil {
				log.Println("bulk upsert fail", err)
				// log.Printf("%+v", retTmp)
			}
			models = []mongo.WriteModel{}
		}
	}
	if len(models) > 0 {
		_, err := collection.BulkWrite(ctx, models, opts)
		log.Println("insert to mongo")
		if err != nil {
			log.Println("bulk upsert fail", err)
			// log.Printf("%+v", retTmp)
		}
	}
}

// 交集计算
func getInterset(set1, set2 *hashset.Set) *hashset.Set {
	retSet := hashset.New()
	for _, item := range set1.Values() {
		if set2.Contains(item) {
			retSet.Add(item)
		}
	}
	return retSet
}

// 带出所有 record
func getAllRecord(es *elasticsearch.Client, chout chan recoreItem) {
	var buf bytes.Buffer
	query := map[string]interface{}{
		"size": 10000,
		"query": map[string]interface{}{
			"match_all": map[string]interface{}{},
		},
		"_source": []string{"authors", "id", "references"},
	}
	if err := json.NewEncoder(&buf).Encode(query); err != nil {
		log.Fatalf("Error encoding query: %s", err)
	}
	// Perform the search request.
	res, err := es.Search(
		es.Search.WithContext(context.Background()),
		es.Search.WithIndex("mag2020_des"),
		es.Search.WithBody(&buf),
		es.Search.WithScroll(time.Minute),
		es.Search.WithTrackTotalHits(true),
	)
	if err != nil {
		log.Fatalf("Error getting response: %s", err)
	}

	if res.IsError() {
		var e map[string]interface{}
		if err := json.NewDecoder(res.Body).Decode(&e); err != nil {
			log.Fatalf("Error parsing the response body: %s", err)
		} else {
			// Print the response status and error information.
			log.Fatalf("[%s] %s: %s",
				res.Status(),
				e["error"].(map[string]interface{})["type"],
				e["error"].(map[string]interface{})["reason"],
			)
		}
	}
	var r esResponse

	if err := json.NewDecoder(res.Body).Decode(&r); err != nil {
		log.Fatalf("Error parsing the response body: %s", err)
	}
	res.Body.Close()
	for _, item := range r.Hits.Hits {
		if len(item.Source.Authors) > 0 {
			chout <- item.Source
		}
	}
	i := 0
	ScroolID := r.ScroolID
	for {
		i += 1
		res, err := es.Scroll(
			es.Scroll.WithContext(context.Background()),
			es.Scroll.WithScroll(time.Minute),
			es.Scroll.WithScrollID(ScroolID),
		)
		if err != nil {
			log.Fatalf("Error getting response: %s", err)
		}
		if res.IsError() {
			var e map[string]interface{}
			if err := json.NewDecoder(res.Body).Decode(&e); err != nil {
				log.Fatalf("Error parsing the response body: %s", err)
			} else {
				// Print the response status and error information.
				log.Fatalf("[%s] %s: %s",
					res.Status(),
					e["error"].(map[string]interface{})["type"],
					e["error"].(map[string]interface{})["reason"],
				)
			}
		}
		var rt esResponse

		if err := json.NewDecoder(res.Body).Decode(&rt); err != nil {
			log.Fatalf("Error parsing the response body: %s", err)
		}
		res.Body.Close()
		if len(rt.Hits.Hits) == 0 {
			break
		}
		for _, item := range rt.Hits.Hits {
			if len(item.Source.Authors) > 0 {
				chout <- item.Source
			}

		}
		ScroolID = rt.ScroolID
		// if i > 3 {
		// 	break
		// }
	}
	close(chout)
}
