/*
 * @Version: 0.0.1
 * @Author: ider
 * @Date: 2020-11-05 11:22:52
 * @LastEditors: ider
 * @LastEditTime: 2020-11-20 18:26:24
 * @Description:计算好结果保存到数据库
 */

package store

import (
	"bufio"
	"context"
	"encoding/json"
	"fmt"
	"io"
	"os"
	"strconv"
	"strings"
	"sync"
	"time"

	"magGo/src/db"

	"github.com/emirpasic/gods/sets/hashset"
	log "github.com/sirupsen/logrus"
	"go.mongodb.org/mongo-driver/bson"
	"go.mongodb.org/mongo-driver/mongo"
	"go.mongodb.org/mongo-driver/mongo/options"
)

func init() {
	log.SetOutput(os.Stdout)
	// 设置日志级别为warn以上
}

type NewChanObj struct {
	Key  string
	Year int
	S    *[]string
	AC   int
}

/**
 * @description: 将数据全部存储到 mongodb ,共 207119631 条
 */
func DumpAllLinksChanRef() {

	fi, err := os.Open("/home/ni/data/mag/all_refed_authors_data.txt")
	if err != nil {
		log.Printf("Error: %s\n", err)
		return
	}
	defer fi.Close()
	start_time := time.Now()
	br := bufio.NewReader(fi)

	ch := make(chan string, 100000)
	chOut := make(chan NewChanObj, 100000)
	const threaCount = 20
	var wg = sync.WaitGroup{}
	wg.Add(threaCount)
	// 处理数据的协程
	for i := 0; i < threaCount; i++ {
		go func() {
			for a := range ch {
				strs := strings.Split(a, "\t")
				linksoutArray := []string{}
				var autherArray = []interface{}{}
				if len(strs) >= 3 {
					err := json.Unmarshal([]byte(strs[2]), &linksoutArray)
					if err != nil {
						log.Printf("Error: %s\n", err)
						log.Printf("%+v\n", string(a))
						return
					}
					err = json.Unmarshal([]byte(strs[3]), &autherArray)
					if err != nil {
						log.Printf("Error: %s\n", err)
						log.Printf("%+v\n", string(a))
						return
					}
				} else {
					log.Println("strs", strs)
				}
				// 不过滤linksout 数量
				// if len(linksoutArray) > 0 {
				co := NewChanObj{Key: strs[0], S: &linksoutArray, AC: len(autherArray)}
				chOut <- co
				// }
			}
			wg.Done()
		}()
	}
	// linksout保存到字典,linksin保存到字典
	var wgOut = sync.WaitGroup{}
	wgOut.Add(10)
	for i := 0; i < 10; i++ {
		go func() {
			collection := db.Client.Database("mag").Collection("pageinfo")
			opts := options.BulkWrite().SetOrdered(false)
			models := []mongo.WriteModel{}
			count := 0
			for chobj := range chOut {
				count += 1
				// 保存 linksout
				linksoutSet := hashset.New()
				for _, s := range *chobj.S {
					linksoutSet.Add(s)
				}
				// save linksin
				for _, sid := range *chobj.S {
					models = append(models, mongo.NewUpdateOneModel().SetFilter(bson.M{"_id": sid}).SetUpdate(bson.M{"$addToSet": bson.M{"in": chobj.Key}}).SetUpsert(true))
				}
				models = append(models, mongo.NewUpdateOneModel().SetFilter(bson.M{"_id": chobj.Key}).SetUpdate(bson.M{"$set": bson.M{"ac": chobj.AC, "out": linksoutSet.Values()}}).SetUpsert(true))
				if count%10000 == 0 {
					collection.BulkWrite(context.TODO(), models, opts)
					models = []mongo.WriteModel{}
					log.Println("write success", count)
				}
			}
			if len(models) > 0 {
				collection.BulkWrite(context.TODO(), models, opts)
			}
			wgOut.Done()
		}()
	}

	// 加载到队列
	count := 0
	for {
		a, c := br.ReadString('\n')
		if c == io.EOF {
			break
		}
		count += 1
		if count%1000000 == 0 {
			log.Println(count)
		}
		if count < 130000000 {
			continue
		}
		if count >= 163320000 {
			continue
		}
		ch <- a
	}
	close(ch)
	wg.Wait()
	close(chOut)
	wgOut.Wait()
	end_time := time.Now()
	log.Println("read all links:", end_time.Sub(start_time))
	return
}

/**
 * @description: 存储 mag 文章的年份,共 207119631 条
 */
func DumpAllYear() {

	fi, err := os.Open("/home/ni/data/mag/all_refed_authors_data.txt")
	if err != nil {
		log.Printf("Error: %s\n", err)
		return
	}
	defer fi.Close()
	start_time := time.Now()
	br := bufio.NewReader(fi)

	ch := make(chan string, 100000)
	chOut := make(chan NewChanObj, 100000)
	const threaCount = 20
	var wg = sync.WaitGroup{}
	wg.Add(threaCount)
	// 处理数据的协程
	for i := 0; i < threaCount; i++ {
		go func() {
			for a := range ch {
				strs := strings.Split(a, "\t")
				if len(strs) >= 2 {
					year, err := strconv.Atoi(strs[1])
					if err == nil {
						co := NewChanObj{Key: strs[0], Year: year}
						chOut <- co
					}
				} else {
					log.Println("strs", strs)
				}
			}
			wg.Done()
		}()
	}
	// linksout保存到字典,linksin保存到字典
	var wgOut = sync.WaitGroup{}
	wgOut.Add(10)
	for i := 0; i < 10; i++ {
		go func() {
			collection := db.Client.Database("mag").Collection("pageinfo")
			opts := options.BulkWrite().SetOrdered(false)
			models := []mongo.WriteModel{}
			count := 0
			for chobj := range chOut {
				count += 1
				models = append(models, mongo.NewUpdateOneModel().SetFilter(bson.M{"_id": chobj.Key}).SetUpdate(bson.M{"$set": bson.M{"year": chobj.Year}}).SetUpsert(true))
				if count%10000 == 0 {
					collection.BulkWrite(context.TODO(), models, opts)
					models = []mongo.WriteModel{}
					log.Println("write success", count)
				}
			}
			if len(models) > 0 {
				collection.BulkWrite(context.TODO(), models, opts)
			}
			wgOut.Done()
		}()
	}

	// 加载到队列
	count := 0
	for {
		a, c := br.ReadString('\n')
		if c == io.EOF {
			break
		}
		count += 1
		if count%1000000 == 0 {
			log.Println(count)
		}
		ch <- a
	}
	close(ch)
	wg.Wait()
	close(chOut)
	wgOut.Wait()
	end_time := time.Now()
	log.Println("read all links:", end_time.Sub(start_time))
	return
}

type transD struct {
	NodeId            *string  `bson:"_id"`
	Node_linksout_set []string `bson:"out"`
	Node_linksin_set  []string `bson:"in"`
	AC                int      `bson:"ac"`
}

// 计算每一个page的颠覆度，文章必须有引用和被引用
func CalPageD() {

	chin := make(chan transD, 100000)
	chout := make(chan []bson.M, 100000)

	wg := sync.WaitGroup{}
	wg.Add(1)
	go func() {

		collection := db.Client.Database("mag").Collection("pageinfo")
		opts := options.BulkWrite().SetOrdered(false)
		models := []mongo.WriteModel{}
		count := 0
		for bsm := range chout {
			models = append(models, mongo.NewUpdateOneModel().SetFilter(bsm[0]).SetUpdate(bson.M{"$set": bsm[1]}).SetUpsert(true))
			count += 1
			if count%10000 == 0 {
				log.Println("写入", count)
				collection.BulkWrite(context.TODO(), models, opts)
				models = []mongo.WriteModel{}
			}
		}
		if len(models) > 0 {
			collection.BulkWrite(context.TODO(), models, opts)
		}
		wg.Done()
		log.Println("DONE")
	}()
	const threadCount = 40
	wgin := sync.WaitGroup{}
	wgin.Add(threadCount)
	for i := 0; i < threadCount; i++ {
		go func() {

			collection := db.Client.Database("mag").Collection("pageinfo")
			for obj := range chin {
				nodeId := obj.NodeId

				node_linksout_set := obj.Node_linksout_set
				// 找到被引用的集合，linksinSet：被引用的节点群
				node_linksin_set := obj.Node_linksin_set
				var (
					ni, nj, nk int64
				)
				// 没有 linksin，不计算
				if node_linksin_set != nil {
					// 计算 ni,nj
					for _, node_linksin_nodeId := range node_linksin_set {
						// 该节点的linksout
						var result transD
						collection.FindOne(context.Background(), bson.M{"_id": node_linksin_nodeId}).Decode(&result)
						// 判断是否找到
						if result.NodeId != nil {
							node_linksin_node_linksout_set := result.Node_linksout_set
							// 有交集则 nj,否则 ni
							if Is_intersection(&node_linksin_node_linksout_set, &node_linksout_set) {
								nj += 1
							} else {
								ni += 1
							}
						}
					}
					// 计算 nk,找到 linkout 的所有 linksin
					node_linksout_node_linksin_set := hashset.New()
					for _, node_linksout_nodeId := range node_linksout_set {
						var result transD
						collection.FindOne(context.Background(), bson.M{"_id": node_linksout_nodeId}).Decode(&result)
						for _, v := range result.Node_linksin_set {
							node_linksout_node_linksin_set.Add(v)
						}
					}

					// 计算差集合
					node_linksin_hashset := hashset.New()
					for _, v := range node_linksin_set {
						node_linksin_hashset.Add(v)
					}
					nk = Cal_uniq_count(node_linksout_node_linksin_set, node_linksin_hashset)
				}
				// 学科 id,ni,nj,nk,作者数，linksout size,linksin size
				chout <- []bson.M{bson.M{"_id": *nodeId}, bson.M{"D": bson.M{"i": ni, "j": nj, "k": nk, "ac": obj.AC, "oc": len(node_linksout_set), "ic": len(node_linksin_set)}}}

			}
			wgin.Done()
		}()
	}

	// 读取数据库
	ctx := context.TODO()
	collection := db.Client.Database("mag").Collection("pageinfo")
	cur, err := collection.Find(ctx, bson.M{"D": nil, "S": bson.M{"$ne": nil}})
	if err != nil {
		log.Fatal(err)
	}
	defer cur.Close(ctx)
	for cur.Next(ctx) {
		var result transD
		err := cur.Decode(&result)
		if err != nil {
			log.Fatal(err)
		}
		chin <- result
	}
	if err := cur.Err(); err != nil {
		log.Fatal(err)
	}

	close(chin)
	wgin.Wait()
	close(chout)
	wg.Wait()
}

// 计算是否有交集
func Is_intersection(a, b *[]string) bool {
	set := hashset.New()
	for _, v := range *a {
		set.Add(v)
	}
	for _, v := range *b {
		if set.Contains(v) {
			return true
		}
	}
	return false
}

// 计算差
func Cal_uniq_count(a, b *hashset.Set) (count int64) {
	for _, v := range a.Values() {
		if !b.Contains(v.(string)) {
			count += 1
		}
	}
	return
}

// 给文章标记上学科
func DumpsSubject() {
	// AllSubjects := []string{"Number theory.txt", "Logic.txt", "Genome editing.txt", "Quantum computing.txt", "Experimental physics.txt", "Deep learning.txt", "Operating system.txt", "Genetic engineering.txt", "Literature.txt", "Geometry.txt", "Industrial engineering.txt", "Applied physics.txt", "Applied mathematics.txt", "Algebra.txt", "Cognitive science.txt", "Theoretical physics.txt", "Philosophy.txt", "Linguistics.txt", "Biological engineering.txt", "Anthropology.txt", "Computer engineering.txt", "Civil engineering.txt", "History.txt", "Theoretical computer science.txt", "Discrete mathematics.txt", "Political science.txt", "Environmental engineering.txt", "Mechanical engineering.txt", "Earth science.txt", "Geography.txt", "Machine learning.txt", "Electrical engineering.txt", "Chemical engineering.txt", "Environmental science.txt", "Sociology.txt", "Neuroscience.txt", "Artificial intelligence.txt", "Geology.txt", "Economics.txt", "Mathematics.txt", "Physics.txt", "Psychology.txt", "Materials science.txt", "Computer science.txt", "Engineering disciplines.txt", "Chemistry.txt", "Biology.txt", "Medicine.txt"}
	AllSubjects := []string{"Medicine.txt"}
	wg := sync.WaitGroup{}
	wg.Add(1)
	chOut := make(chan []string, 100000)
	// 保存进程
	go func() {

		collection := db.Client.Database("mag").Collection("pageinfo")
		opts := options.BulkWrite().SetOrdered(false)
		models := []mongo.WriteModel{}
		count := 0
		for bsm := range chOut {
			count += 1
			models = append(models, mongo.NewUpdateOneModel().SetFilter(bson.M{"_id": bsm[0]}).SetUpdate(bson.M{"$addToSet": bson.M{"S": bsm[1]}}).SetUpsert(true))
			if count%100000 == 0 {
				collection.BulkWrite(context.TODO(), models, opts)
				models = []mongo.WriteModel{}
				log.Println("write success", count)
			}
		}
		if len(models) > 0 {
			collection.BulkWrite(context.TODO(), models, opts)
		}
		wg.Done()
	}()

	for _, subjectName := range AllSubjects {
		fi, err := os.Open("/home/ni/data/mag/reference/delete_noref_v2/" + subjectName)
		if err != nil {
			log.Printf("Error: %s\n", err)
			return
		}
		defer fi.Close()

		start_time := time.Now()
		br := bufio.NewReader(fi)
		set := hashset.New()
		for {
			a, c := br.ReadString('\n')
			if c == io.EOF {
				break
			}
			strs := strings.Split(string(a), "\t")
			set.Add(strs[0])
		}
		end_time := time.Now()
		fmt.Println("读学科耗时", subjectName, end_time.Sub(start_time))
		for _, v := range set.Values() {
			chOut <- []string{v.(string), strings.Replace(subjectName, ".txt", "", 1)}
		}

	}
	close(chOut)
	wg.Wait()
}
