/*
 * @Version: 0.0.1
 * @Author: ider
 * @Date: 2021-09-01 12:51:28
 * @LastEditors: ider
 * @LastEditTime: 2021-09-02 12:51:07
 * @Description:导入文件 package 总的 ref , 将 ref 全部导入到 mongo
 */
package main

import (
	"bufio"
	"compress/gzip"
	"crypto/md5"
	"encoding/hex"
	"encoding/json"
	"historyOfWikipedia/internal/database/mongodb"
	"historyOfWikipedia/internal/model"
	"io"
	"io/ioutil"
	"log"
	"os"
	"regexp"
	"sort"
	"strings"
	"sync"
	"time"

	"github.com/emirpasic/gods/sets/hashset"
	"go.mongodb.org/mongo-driver/mongo"
)

var (
	FILE_PATH = "/home/ni/data/wiki/tmpdata_allref_2022new"
	Key_Set   = hashset.New()
)

func getAllFile(pathname string) ([]string, error) {
	var s []string
	rd, err := ioutil.ReadDir(pathname)
	if err != nil {
		log.Println("read dir fail:", err)
		return s, err
	}

	for _, fi := range rd {
		if !fi.IsDir() {
			fullName := pathname + "/" + fi.Name()
			s = append(s, fullName)
		}
	}
	return s, nil
}

type row_data struct {
	ArtID     int64               `json:"artId,string" bson:"artID"`
	RevID     int64               `json:"revid,string" bson:"revID"`
	Ns        int32               `json:"ns,string" bson:"ns"`
	Title     string              `json:"art_title" bson:"title"`
	Redirect  string              `json:"redirectTitle" bson:"-"`
	Timestamp time.Time           `json:"timestamps" bson:"timestamp"`
	Reflist   []map[string]string `json:"reflist" bson:"ref"`
}

func filterReflist(inchan chan row_data, outchan chan model.WikipediaHistoryRefObj, wg_filter *sync.WaitGroup) {

	// 统计 cite 分布
	ct := 0

	journal_book_re := regexp.MustCompile(`(?i)journal|book`)

	resub := regexp.MustCompile(`{|}`)
	recite := regexp.MustCompile(`(?i)\bcite\b`)

	for item := range inchan {
		var new_ref []map[string]string
	REFLOOP:
		for _, row := range item.Reflist {

			// delete empty value in ref
			for k, v := range row {
				if len(v) == 0 {
					delete(row, k)
				}
			}

			// refString export to row
			if refString, ok := row["refString"]; ok {
				refString = resub.ReplaceAllString(refString, " ")
				for _, item := range strings.Split(refString, "|") {

					// city key cite_type
					if recite.MatchString(item) {
						if row["cite_type"] == "" {
							cite_type := recite.ReplaceAllString(item, "")
							cite_type = strings.Trim(cite_type, " ")
							row["cite_type"] = cite_type
							continue
						}
					}

					// match other value
					if strings.Contains(item, "=") {
						spalitEqual := strings.Split(item, "=")
						if len(spalitEqual) == 2 {
							key := strings.Trim(spalitEqual[0], " ")
							if row[key] == "" {
								value := strings.Trim(spalitEqual[1], " ")
								if len(value) > 0 && len(key) > 0 {
									row[key] = value
								}
							}
						}
					}
				}
				delete(row, "refString")
			}

			// doi exist and doi size > 5
			if doi, ok := row["doi"]; ok && len(doi) > 5 {
				ct += 1
				new_ref = append(new_ref, row)
				continue REFLOOP
			}

			// cite type 匹配
			if cite_type, ok := row["cite_type"]; ok {

				// flag_handle[cite_type] += 1
				switch cite_type {
				case "journal", "patent", "book", "thesis", "press release", "encyclopedia", "report", "magazine", "conference", "document", "arxiv", "paper", "americana", "work", "serial", "archive", "manual", "chapter", "periodical", "dissertation", "proceedings":
					new_ref = append(new_ref, row)
					continue REFLOOP
				}

				if journal_book_re.MatchString(cite_type) {
					new_ref = append(new_ref, row)
					continue REFLOOP
				}
			}

			// refname has journal book or doi
			if refname, ok := row["refname"]; ok {
				for _, key := range []string{"journal", "book", "doi"} {
					if find := strings.Contains(refname, key); find {
						new_ref = append(new_ref, row)
						continue REFLOOP
					}
				}
			}
		}

		// log.Println("new_ref size", len(new_ref))
		// 测试 一篇文章的 ref 是否有 title 相同的内容
		if len(new_ref) > 0 {
			item.Reflist = new_ref
			artID := item.ArtID
			// revID := item.RevID
			dateSnap := FormatTimestampToSnap(item.Timestamp)
			// cache_datas := []model.WikipediaRefObj{}
			for _, sub := range new_ref {
				calKey := ""

				title := sub["title"]
				if title != "" {
					calKey += title
				}

				cite_type := sub["cite_type"]
				if cite_type != "" {
					calKey += cite_type
				}

				year := sub["year"]
				if year != "" {
					calKey += year
				}

				url := sub["url"]
				if url != "" {
					calKey += url
				}

				doi := sub["doi"]
				if doi != "" {
					calKey += doi
				}

				hashKey := GetMD5Hash(calKey)

				dataObj := model.WikipediaHistoryRefObj{
					ID:        hashKey,
					Ref:       sub,
					ArtIDSnap: []int64{artID},
					DateSnap:  []int32{dateSnap},
				}
				outchan <- dataObj
			}
		}
	}
	wg_filter.Done()
}

func FormatTimestampToSnap(t time.Time) int32 {
	dateMonthSnap := int32(t.Year())*100 + int32(t.Month())
	return dateMonthSnap
}

func GetMD5Hash(text string) string {
	hash := md5.Sum([]byte(text))
	return hex.EncodeToString(hash[:])
}

func OutHandle(outchan chan model.WikipediaHistoryRefObj, lastWg *sync.WaitGroup) {
	DataCache := make(map[string]model.WikipediaHistoryRefObj)
	artSnapCache := make(map[string]map[int64]bool)
	artDateSnapCache := make(map[string]map[int64]map[int32]bool)
	dateSnapCache := make(map[string]map[int32]bool)
	for item := range outchan {

		if _, exist := DataCache[item.ID]; exist {
			if _, exist := artSnapCache[item.ID][item.ArtIDSnap[0]]; !exist {
				artSnapCache[item.ID][item.ArtIDSnap[0]] = true
			}
			if _, exist := dateSnapCache[item.ID][item.DateSnap[0]]; !exist {
				dateSnapCache[item.ID][item.DateSnap[0]] = true
			}
			if _, exist := artDateSnapCache[item.ID][item.ArtIDSnap[0]]; !exist {
				artDateSnapCache[item.ID][item.ArtIDSnap[0]] = make(map[int32]bool)
				artDateSnapCache[item.ID][item.ArtIDSnap[0]][item.DateSnap[0]] = true
			} else {
				if _, exist := artDateSnapCache[item.ID][item.ArtIDSnap[0]][item.DateSnap[0]]; !exist {
					artDateSnapCache[item.ID][item.ArtIDSnap[0]][item.DateSnap[0]] = true
				}
			}

		} else {
			dateSnapCache[item.ID] = make(map[int32]bool)
			dateSnapCache[item.ID][item.DateSnap[0]] = true

			artSnapCache[item.ID] = make(map[int64]bool)
			artSnapCache[item.ID][item.ArtIDSnap[0]] = true

			artDateSnapCache[item.ID] = make(map[int64]map[int32]bool)
			artDateSnapCache[item.ID][item.ArtIDSnap[0]] = make(map[int32]bool)
			artDateSnapCache[item.ID][item.ArtIDSnap[0]][item.DateSnap[0]] = true
			// clean snaps
			item.DateSnap = []int32{}
			item.ArtIDSnap = []int64{}
			DataCache[item.ID] = item
		}
	}
	log.Println("file out handle done")
	mongo_obj := mongodb.NewMongoHistoryRefDataBase("mongodb://knogen:knogen@r730xd-2.lmd.wuzhen.ai:27017", "20220201")
	bulkInsertOperate := []mongo.WriteModel{}

	for _, item := range DataCache {
		for dateSnap, _ := range dateSnapCache[item.ID] {
			item.DateSnap = append(item.DateSnap, dateSnap)
		}

		for artSnap, _ := range artSnapCache[item.ID] {
			item.ArtIDSnap = append(item.ArtIDSnap, artSnap)
		}
		// sort item.DateSnap
		sort.Slice(item.DateSnap, func(i, j int) bool {
			return item.DateSnap[i] < item.DateSnap[j]
		})

		artdateset := make(map[int64][]int32)
		for artSnap, dateSnap := range artDateSnapCache[item.ID] {
			artdateset[artSnap] = []int32{}
			for date, _ := range dateSnap {
				artdateset[artSnap] = append(artdateset[artSnap], date)
			}
			sort.Slice(artdateset[artSnap], func(i, j int) bool {
				return artdateset[artSnap][i] < artdateset[artSnap][j]
			})
		}

		item.ArtDateSnap = artdateset
		bulkInsertOperate = append(bulkInsertOperate, mongo.NewInsertOneModel().SetDocument(item))
		if len(bulkInsertOperate) > 10000 {
			if err := mongo_obj.BulkWrite_ref(bulkInsertOperate); err != nil {
				log.Println("bulk error", err)
			}
			bulkInsertOperate = []mongo.WriteModel{}
		}
	}
	if len(bulkInsertOperate) > 0 {
		if err := mongo_obj.BulkWrite_ref(bulkInsertOperate); err != nil {
			log.Println("bulk error", err)
		}
	}
	log.Println("out handle done")
	lastWg.Done()
}

// iterate all gz file from dir

func main() {
	files_1, _ := getAllFile("/home/ni/data/wiki/tmpdata_allref_2022new_all_zh/tmpdata_allref_2022new_all_229/tmpdata_allref_2022new_all_1")
	files_2, _ := getAllFile("/home/ni/data/wiki/tmpdata_allref_2022new_all_zh/tmpdata_allref_2022new_all_229/tmpdata_allref_2022new_all")
	files_1 = append(files_1, files_2...)
	files_2, _ = getAllFile("/home/ni/data/wiki/tmpdata_allref_2022new_all_zh/tmpdata_allref_2022new_all_222/tmpdata_allref_2022new_all_1")
	files_1 = append(files_1, files_2...)
	files_2, _ = getAllFile("/home/ni/data/wiki/tmpdata_allref_2022new_all_zh/tmpdata_allref_2022new_all_222/tmpdata_allref_2022new_all")
	files_1 = append(files_1, files_2...)

	log.Println("file count", len(files_1))
	inchan := make(chan row_data, 10000)
	outchan := make(chan model.WikipediaHistoryRefObj, 10000)
	lastWg := sync.WaitGroup{}
	lastWg.Add(1)
	go OutHandle(outchan, &lastWg)
	fileChan := make(chan string, 1000)

	wg_filter := sync.WaitGroup{}
	filterThreadCount := 30
	wg_filter.Add(filterThreadCount)
	for i := 0; i < filterThreadCount; i++ {
		go filterReflist(inchan, outchan, &wg_filter)
	}

	// test one file
	for _, path := range files_1 {
		fileChan <- path
	}
	close(fileChan)

	wg := sync.WaitGroup{}
	threadCount := 20
	wg.Add(threadCount)
	for i := 0; i < threadCount; i++ {
		go func() {
			for path := range fileChan {
				f, err := os.Open(path)
				if err != nil {
					log.Println(err)
				}
				r, err := gzip.NewReader(f)
				if err != nil {
					log.Println(err)
				}

				reader := bufio.NewReader(r)
				dct := 0
				adct := 0
				for {
					adct += 1
					line, err := reader.ReadString('\n')

					if err == io.EOF || err == io.ErrUnexpectedEOF {
						break
					}
					if err != nil {
						log.Println("read error", err)
						break
					}
					var rd row_data
					err = json.Unmarshal([]byte(line), &rd)
					if err != nil {
						log.Println(err, path, string(line))
					}
					if len(rd.Reflist) == 0 || rd.Redirect != "" {
						continue
					}
					dct += 1
					inchan <- rd
				}
				r.Close()
				log.Println("fct,dct,", path, dct, adct)
			}
			wg.Done()
		}()
	}

	wg.Wait()
	close(inchan)
	log.Println("wg done")
	wg_filter.Wait()
	close(outchan)
	log.Println("wg_filter done")
	lastWg.Wait()
	// <-time.After(90000000 * time.Second)
}
