package main

import (
	"bufio"
	"compress/gzip"
	"context"
	"encoding/json"
	"fmt"
	"log"
	"mag/internal/database"
	"math"
	"os"
	"strconv"
	"strings"
	"sync"

	"github.com/emirpasic/gods/sets/hashset"
	"go.mongodb.org/mongo-driver/bson"
)

var (
	YearMap = make(map[int64]int)
	PageMap = make(map[int64]*database.PageInfo)
)

func initdata() {

	// localcache
	log.Println("开始导入")

	ctx := context.Background()
	client := database.InitMongo()
	collection := client.Database("mag2020").Collection("pageinfo")

	cur, err := collection.Find(ctx, bson.M{})
	if err != nil {
		log.Fatal(err)
	}
	defer cur.Close(ctx)
	for cur.Next(ctx) {

		var result database.PageInfo
		err := cur.Decode(&result)
		if err != nil {
			log.Fatal(err)
		}

		// filter isolated node
		if len(result.Out) == 0 && len(result.In) == 0 {
			continue
		}

		// join year map
		YearMap[result.Id] = result.Year

		// join local cache
		PageMap[result.Id] = &result
	}
	log.Println("finsh local cache,size:", len(PageMap), len(YearMap))
}

func filterGraphByYear(year int) map[int64]*database.PageInfo {
	retPageMap := make(map[int64]*database.PageInfo)
	for _, obj := range PageMap {
		if obj.Year <= year {
			newPM := database.PageInfo{Id: obj.Id}
			for _, ID := range obj.In {
				if subYear, ok := YearMap[ID]; ok && subYear <= year {
					newPM.In = append(newPM.In, ID)
				}
			}
			for _, ID := range obj.Out {
				if subYear, ok := YearMap[ID]; ok && subYear <= year {
					newPM.Out = append(newPM.Out, ID)
				}
			}
			if len(newPM.In) > 0 || len(newPM.Out) > 0 {
				retPageMap[newPM.Id] = &newPM
			}
		}
	}
	return retPageMap
}

func uniSize(in, out []int64) int {
	hs := hashset.New()
	for _, v := range in {
		hs.Add(v)
	}
	for _, v := range out {
		hs.Add(v)
	}
	return hs.Size()
}

func calEntropy(network map[int64]*database.PageInfo) (float64, float64, float64) {
	var inSum, inN, outSum, outN, allSum, allN int

	for _, pageinfo := range network {
		cache := len(pageinfo.In)
		if cache != 0 {
			inN += 1
			inSum += cache
		}

		cache = len(pageinfo.Out)
		if cache != 0 {
			outN += 1
			outSum += cache
		}

		cache = uniSize(pageinfo.In, pageinfo.Out)
		if cache != 0 {
			allN += 1
			allSum += cache
		}
	}

	//  E min
	i_E_min := math.Log(float64(4 * (inN - 1)))
	o_E_min := math.Log(float64(4 * (outN - 1)))
	all_E_min := math.Log(float64(4 * (allN - 1)))

	var inI, outI, allI float64

	for _, pageinfo := range network {
		cache := len(pageinfo.In)
		if cache != 0 {
			I := float64(cache) / float64(inSum)
			inI += I * math.Log(I)
		}

		cache = len(pageinfo.Out)
		if cache != 0 {
			I := float64(cache) / float64(outSum)
			outI += I * math.Log(I)
		}

		cache = uniSize(pageinfo.In, pageinfo.Out)
		if cache != 0 {
			I := float64(cache) / float64(allSum)
			allI += I * math.Log(I)
		}
	}
	retin := (-2*inI - i_E_min) / (2*math.Log(float64(inN)) - i_E_min)
	retout := (-2*outI - o_E_min) / (2*math.Log(float64(outN)) - o_E_min)
	retall := (-2*allI - all_E_min) / (2*math.Log(float64(allN)) - all_E_min)

	fmt.Println("N", inN, outN, allN)
	fmt.Println("max", 2*math.Log(float64(inN)), 2*math.Log(float64(outN)), 2*math.Log(float64(allN)))
	fmt.Println("min", i_E_min, o_E_min, all_E_min)
	fmt.Println("v", -2*inI, -2*outI, -2*allI)
	return retin, retout, retall
}

func readGzLinksFile(filePath string) map[int64]*database.PageInfo {
	// filename := "/home/ni/data/wiki/wdd"

	retData := make(map[int64]*database.PageInfo)

	file, err := os.Open(filePath)

	if err != nil {
		log.Fatal(err)
	}

	gz, err := gzip.NewReader(file)

	if err != nil {
		log.Fatal(err)
	}

	defer file.Close()
	defer gz.Close()

	scanner := bufio.NewScanner(gz)
	// optionally, resize scanner's capacity for lines over 64K, see next example
	for scanner.Scan() {
		rowText := scanner.Text()
		rows := strings.Split(rowText, "\t")
		ID, err := strconv.ParseInt(rows[0], 10, 64)
		if err != nil {
			log.Println(err)
		}
		linksoutData := []int64{}
		err = json.Unmarshal([]byte(rows[1]), &linksoutData)
		if err != nil {
			log.Println(err)
		}

		// handle linksout
		if node, ok := retData[ID]; ok {
			node.Out = linksoutData
		} else {
			retData[ID] = &database.PageInfo{
				Id:  ID,
				Out: linksoutData,
			}
		}

		// handle linksin
		for _, outID := range linksoutData {
			if node, ok := retData[outID]; ok {
				node.In = append(node.In, ID)
			} else {
				retData[outID] = &database.PageInfo{
					Id: outID,
					In: []int64{ID},
				}
			}
		}
	}
	return retData
}

// 计算 wikipedai 历年的结构熵
func claWikipediaEntropy() {

	ctx := context.Background()
	collection := database.InitMongo().Database("graph").Collection("wikipedia_entropy")
	for year := 2001; year <= 2021; year++ {
		for _, month := range []int64{3, 6, 9, 12} {
			if year == 2021 && month > 3 {
				continue
			}
			Key := fmt.Sprintf("%d_%d", year, month)
			// filename := "/tmp/all.gz"
			filename := fmt.Sprintf("/home/ni/data/wiki/wdd/%s/allwiki_linksout.txt.gz", Key)
			network := readGzLinksFile(filename)
			fmt.Println("read over,start cal:", year, month)
			retin, retout, retall := calEntropy(network)
			fmt.Println("cal over", year, month)
			fmt.Println(retin, retout, retall)
			collection.InsertOne(ctx, bson.M{"_id": Key, "in": retin, "out": retout, "all": retall})

		}

	}

}

func mag_entropy() {
	initdata()

	ctx := context.Background()
	collection := database.InitMongo().Database("graph").Collection("mag_entropy")
	yearChan := make(chan int, 1000)
	for year := 1900; year <= 2020; year++ {
		var result bson.M
		err := collection.FindOne(ctx, bson.M{"_id": year}).Decode(&result)
		if err == nil {
			fmt.Println(result)
			continue
		}
		yearChan <- year
		fmt.Println("import year:", year)
	}
	close(yearChan)

	thread_count := 2
	wg := sync.WaitGroup{}
	wg.Add(thread_count)

	for i := 0; i < thread_count; i++ {
		go func() {
			for year := range yearChan {

				network := filterGraphByYear(year)
				fmt.Println("network size:", len(network), year)
				retin, retout, retall := calEntropy(network)
				collection.InsertOne(ctx, bson.M{"_id": year, "in": retin, "out": retout, "all": retall})
			}
			wg.Done()
		}()
	}
	wg.Wait()
	fmt.Println("over")
}

func main() {
	// mag_entropy()
	claWikipediaEntropy()
}
