/*
 * @Version: 0.0.1
 * @Author: ider
 * @Date: 2021-09-01 12:51:28
 * @LastEditors: ider
 * @LastEditTime: 2021-09-02 12:51:07
 * @Description:导入文件 package 总的 ref , 将 ref 全部导入到 mongo
 */
package main

import (
	"bufio"
	"compress/gzip"
	"crypto/md5"
	"encoding/hex"
	"encoding/json"
	"historyOfWikipedia/internal/database/mongodb"
	"historyOfWikipedia/internal/model"
	"io"
	"io/ioutil"
	"log"
	"os"
	"strconv"
	"strings"
	"time"

	"github.com/emirpasic/gods/sets/hashset"
)

var (
	FILE_PATH = "/home/ni/data/wiki/tmpdata_allref_2022new"
)

func getAllFile(pathname string) ([]string, error) {
	var s []string
	rd, err := ioutil.ReadDir(pathname)
	if err != nil {
		log.Println("read dir fail:", err)
		return s, err
	}

	for _, fi := range rd {
		if !fi.IsDir() {
			fullName := pathname + "/" + fi.Name()
			s = append(s, fullName)
		}
	}
	return s, nil
}

type row_data struct {
	ArtID     int64               `json:"artId,string" bson:"artID"`
	RevID     int64               `json:"revid,string" bson:"revID"`
	Ns        int32               `json:"ns,string" bson:"ns"`
	Title     string              `json:"art_title" bson:"title"`
	Redirect  string              `json:"redirectTitle" bson:"-"`
	Timestamp time.Time           `json:"timestamps" bson:"timestamp"`
	Reflist   []map[string]string `json:"reflist" bson:"ref"`
}

func filterReflist(inchan chan row_data, outchan chan row_data) {

	// 统计 cite 分布
	// flag_handle := make(map[string]int)
	mongo := mongodb.NewMongoRefDataBase("mongodb://knogen:knogen@r730xd-2.lmd.wuzhen.ai:27017", "20220112")
	ct := 0
	for item := range inchan {

		var new_ref []map[string]string
	REFLOOP:
		for _, row := range item.Reflist {
			// 保留 doi
			if _, ok := row["doi"]; ok {
				ct += 1
				new_ref = append(new_ref, row)
				continue
			}
			if cite_type, ok := row["cite_type"]; ok {

				// flag_handle[cite_type] += 1
				switch cite_type {
				case "journal", "patent", "book", "thesis", "press release", "encyclopedia", "report", "magazine", "conference", "document", "arxiv", "paper", "americana", "work", "serial", "archive", "manual", "chapter", "periodical", "dissertation", "proceedings":
					new_ref = append(new_ref, row)
					continue REFLOOP
				}
				for _, key := range []string{"journal", "book"} {
					if find := strings.Contains(cite_type, key); find {
						new_ref = append(new_ref, row)
						continue REFLOOP
					}
				}
			}

			if refString, ok := row["refString"]; ok {
				for _, key := range []string{"journal", "book", "doi"} {
					if find := strings.Contains(refString, key); find {
						new_ref = append(new_ref, row)
						continue REFLOOP
					}
				}
			}

			if refname, ok := row["refname"]; ok {
				for _, key := range []string{"journal", "book", "doi"} {
					if find := strings.Contains(refname, key); find {
						new_ref = append(new_ref, row)
						continue REFLOOP
					}
				}
			}
		}

		// log.Println("new_ref size", len(new_ref))
		// 测试 一篇文章的 ref 是否有 title 相同的内容
		if len(new_ref) > 0 {
			item.Reflist = new_ref

			set := hashset.New()
			artID := strconv.Itoa(int(item.ArtID))
			revID := strconv.Itoa(int(item.RevID))
			// cache_datas := []model.WikipediaRefObj{}
			for _, sub := range new_ref {
				calKey := artID + revID

				title := sub["title"]
				if title != "" {
					calKey += title
				}

				cite_type := sub["cite_type"]
				if cite_type != "" {
					calKey += cite_type
				}

				year := sub["year"]
				if year != "" {
					calKey += year
				}

				url := sub["url"]
				if url != "" {
					calKey += url
				}

				doi := sub["doi"]
				if doi != "" {
					calKey += doi
				}

				hashKey := GetMD5Hash(calKey)

				dataObj := model.WikipediaRefObj{
					ID:    hashKey,
					ArtID: item.ArtID,
					RevID: item.RevID,
					Ns:    item.Ns,
					Ref:   sub,
				}

				if set.Contains(hashKey) {
					log.Println("dup hashKey", hashKey, item)
				} else {
					mongo.Insert_ref(&dataObj)
					// cache_datas = append(cache_datas, dataObj)
					set.Add(title)
				}
			}
			// log.Println(item)
			outchan <- item
		}
	}
	close(outchan)
}

func refHandle(inchan chan row_data) {
	ct := 0
	for _ = range inchan {
		ct += 1
	}
	log.Println("all:", ct)
}

func GetMD5Hash(text string) string {
	hash := md5.Sum([]byte(text))
	return hex.EncodeToString(hash[:])
}

func main() {
	files_1, _ := getAllFile("/mnt/sas/home/ni/data/wiki/tmpdata_allref_2022new_no3.5G")
	files_2, _ := getAllFile("/mnt/sas/home/ni/data/wiki/tmpdata_allref_2022new_3.5G")
	files_1 = append(files_1, files_2...)
	log.Println("file count", len(files_1))
	inchan := make(chan row_data, 1000)
	outchan := make(chan row_data, 100)
	go filterReflist(inchan, outchan)
	go refHandle(outchan)
	fct := 0
	for _, path := range files_1 {
		fct += 1
		f, err := os.Open(path)
		if err != nil {
			log.Println(err)
		}
		r, err := gzip.NewReader(f)
		if err != nil {
			log.Println(err)
		}
		defer r.Close()

		reader := bufio.NewReader(r)
		dct := 0
		adct := 0
		for {
			adct += 1
			line, err := reader.ReadString('\n')

			if err == io.EOF {
				break
			}
			var rd row_data
			err = json.Unmarshal([]byte(line), &rd)
			if err != nil {
				log.Println(err, path, string(line))
			}
			if len(rd.Reflist) == 0 || rd.Redirect != "" {
				continue
			}
			dct += 1
			inchan <- rd

		}

		// scanner := bufio.NewScanner(r)
		// dct := 0
		// adct := 0
		// for scanner.Scan() {
		// 	adct += 1
		// 	var rd row_data
		// 	bts := scanner.Bytes()
		// 	// log.Println("bt", bts)
		// 	err := json.Unmarshal(bts, &rd)
		// 	if err != nil {
		// 		log.Println(err, path, string(bts))
		// 	}
		// 	if len(rd.Reflist) == 0 || rd.Redirect != "" {
		// 		continue
		// 	}
		// 	dct += 1
		// 	inchan <- rd
		// 	// log.Println(rd)
		// }
		log.Println("fct,dct,", path, fct, dct, adct)
		// break
	}
	close(inchan)
	<-time.After(9000000 * time.Second)
}
