/*
 * @Version: 0.0.1
 * @Author: ider
 * @Date: 2021-09-02 12:53:03
 * @LastEditors: ider
 * @LastEditTime: 2021-09-03 09:53:55
 * @Description:将 ref 与数据库匹配
 */

package main

import (
	"bytes"
	"context"
	"encoding/json"
	"historyOfWikipedia/internal/database/elastic"
	"historyOfWikipedia/internal/database/mongodb"
	"historyOfWikipedia/internal/model"
	"log"
	"regexp"
	"sort"
	"strconv"
	"strings"
	"time"

	"github.com/agnivade/levenshtein"
	"github.com/elastic/go-elasticsearch/v8"
)

func main() {
	work()
}

func work() {
	// 提取年份
	re := regexp.MustCompile(`\d{4}`)
	es := elastic.InitElastic("http://192.168.1.231:9200")
	es1 := elastic.InitElastic("http://192.168.1.229:9200")

	mongo := mongodb.NewMongoHistoryHandingRefDataBase("mongodb://knogen:knogen@r730xd-2.lmd.wuzhen.ai:27017", "20220201")

	linkChan := mongo.Get_unlink()
	for i := 0; i < 15; i++ {
		go func() {
			for item := range linkChan {
				log.Println("start id:", item.ID)
				// doi 匹配
				if item.Ref["doi"] != "" {

					magID := doiQuery(item.Ref["doi"], es)
					if magID != 0 {
						mt := model.RefMatch{MagID: magID, Mode: 1}
						mongo.Updata_match(item.ID, mt)
						continue
					}
				}
				// title 匹配
				if item.Ref["title"] != "" {
					year := 0
					if item.Ref["year"] != "" {
						submatchall := re.FindAllString(item.Ref["year"], -1)
						if len(submatchall) > 0 {
							year, _ = strconv.Atoi(submatchall[0])
						}
					} else if item.Ref["date"] != "" {
						submatchall := re.FindAllString(item.Ref["date"], -1)
						if len(submatchall) > 0 {
							year, _ = strconv.Atoi(submatchall[0])
						}
					}
					first := item.Ref["first"]
					if first == "" {
						first = item.Ref["first1"]
					}

					last := item.Ref["last"]
					if last == "" {
						last = item.Ref["last1"]
					}
					rm := titleQuery(item.Ref["title"], year, first, last, es)
					if rm.Mode < 100 {
						mongo.Updata_match(item.ID, rm)
					}
					continue
				} else {
					// title 为空
					mt := model.RefMatch{Mode: 10}
					mongo.Updata_match(item.ID, mt)
					continue
				}
			}
		}()
	}
	// 重复一遍 es1
	for i := 0; i < 35; i++ {
		go func() {
			for item := range linkChan {
				log.Println("start id:", item.ID)
				// doi 匹配
				if item.Ref["doi"] != "" {
					magID := doiQuery(item.Ref["doi"], es1)
					if magID != 0 {
						mt := model.RefMatch{MagID: magID, Mode: 1}
						mongo.Updata_match(item.ID, mt)
						continue
					}
				}
				// title 匹配
				if item.Ref["title"] != "" {
					year := 0
					if item.Ref["year"] != "" {
						submatchall := re.FindAllString(item.Ref["year"], -1)
						if len(submatchall) > 0 {
							year, _ = strconv.Atoi(submatchall[0])
						}
					} else if item.Ref["date"] != "" {
						submatchall := re.FindAllString(item.Ref["date"], -1)
						if len(submatchall) > 0 {
							year, _ = strconv.Atoi(submatchall[0])
						}
					}
					first := item.Ref["first"]
					if first == "" {
						first = item.Ref["first1"]
					}

					last := item.Ref["last"]
					if last == "" {
						last = item.Ref["last1"]
					}
					rm := titleQuery(item.Ref["title"], year, first, last, es1)
					if rm.Mode < 100 {
						mongo.Updata_match(item.ID, rm)
					}
					continue
				} else {
					// title 为空
					mt := model.RefMatch{Mode: 10}
					mongo.Updata_match(item.ID, mt)
					continue
				}
			}
		}()
	}
	<-time.After(120 * time.Hour)

}

//  匹配规则， 保存于 mode
//  1：doi 匹配
//  2：title year 完全匹配， 第一作者在 mag 中属于子集
//  3：title year 完全匹配
//  4: title 完全匹配， year 为空，  第一作者在 mag 中属于子集
//  5: title 完全匹配， year 为空。
//  6: title 完全匹配， year 不匹配， 第一作者在 mag 中属于子集
//  7: title 编辑距离误差在 10% , year 匹配， 作者子集匹配
//  10 title 匹配语言距离差距 5 以上, title 为空, 不保存 magID
//  99 例外情况，暂不处理

func titleQuery(title string, year int, first, last string, es *elasticsearch.Client) model.RefMatch {
	var buf bytes.Buffer
	query := map[string]interface{}{
		"size": 10,
		"query": map[string]interface{}{
			"match": map[string]interface{}{
				"title": title,
			},
		},
		"_source": []string{"title", "year", "authors"},
	}
	if err := json.NewEncoder(&buf).Encode(query); err != nil {
		log.Fatalf("Error encoding query: %s", err)
	}
	// Perform the search request.
	res, err := es.Search(
		es.Search.WithContext(context.Background()),
		es.Search.WithIndex("mag2020_des"),
		es.Search.WithBody(&buf),
		es.Search.WithTrackTotalHits(true),
		es.Search.WithPretty(),
	)
	if err != nil {
		log.Fatalf("Error getting response: %s", err)
	}
	defer res.Body.Close()

	if res.IsError() {
		var e map[string]interface{}
		if err := json.NewDecoder(res.Body).Decode(&e); err != nil {
			log.Fatalf("Error parsing the response body: %s", err)
		} else {
			// Print the response status and error information.
			log.Fatalf("[%s] %s: %s",
				res.Status(),
				e["error"].(map[string]interface{})["type"],
				e["error"].(map[string]interface{})["reason"],
			)
		}
	}
	var r map[string]interface{}

	if err := json.NewDecoder(res.Body).Decode(&r); err != nil {
		log.Fatalf("Error parsing the response body: %s", err)
	}
	var cacheRms []model.RefMatch
	minDistance := 1000
	// 10个结果，按分数从高到底进行比较
	for _, hit := range r["hits"].(map[string]interface{})["hits"].([]interface{}) {
		// log.Printf(" * ID=%s, %s", hit.(map[string]interface{})["_id"], hit.(map[string]interface{})["_source"])
		retTitle := hit.(map[string]interface{})["_source"].(map[string]interface{})["title"].(string)
		var retYear int
		ys := hit.(map[string]interface{})["_source"].(map[string]interface{})["year"]
		if ys != nil {
			retYear = int(ys.(float64))
		}
		// retYear := int(hit.(map[string]interface{})["_source"].(map[string]interface{})["year"].(float64))
		// log.Println(hit.(map[string]interface{})["_source"].(map[string]interface{}))
		authors := hit.(map[string]interface{})["_source"].(map[string]interface{})["authors"].([]interface{})
		magID, _ := strconv.ParseInt(hit.(map[string]interface{})["_id"].(string), 10, 64)
		// 排除空格影响
		if strings.EqualFold(strings.ReplaceAll(retTitle, " ", ""), strings.ReplaceAll(title, " ", "")) {
			rm := model.RefMatch{
				MagID: magID,
				Mode:  100,
			}
			switch year {
			case retYear:
				// year 匹配
				if authorCheck(authors, first, last) {
					// 全部符合
					rm.Mode = 2
				} else {
					// 名字不符合
					rm.Mode = 3
				}
			case 0:
				// year 为空
				if authorCheck(authors, first, last) {
					// 全部符合
					rm.Mode = 4
				} else {
					// 名字不符合
					rm.Mode = 5
				}
			default:
				// year 为空
				if authorCheck(authors, first, last) {
					// 全部符合
					rm.Mode = 6
				}
			}
			cacheRms = append(cacheRms, rm)
		} else {
			// 名字不匹配
			// 进行编辑距离计算,排除空格影响，保留最小编辑距离
			distance := levenshtein.ComputeDistance(strings.ReplaceAll(strings.ToLower(retTitle), " ", ""), strings.ReplaceAll(strings.ToLower(title), " ", ""))
			if len(retTitle)/10 >= distance {
				// 编辑距离差在 10% 之内的，
				if year == retYear && authorCheck(authors, first, last) {
					// 全部符合
					cacheRms = append(cacheRms, model.RefMatch{
						MagID: magID,
						Mode:  7,
					})
					continue
				}

			}
			if minDistance > distance {
				minDistance = distance
			}
		}
	}

	// 找到最优的解
	if len(cacheRms) > 0 {
		sort.SliceStable(cacheRms, func(i, j int) bool {
			return cacheRms[i].Mode < cacheRms[j].Mode
		})
		if cacheRms[0].MagID != 0 && cacheRms[0].Mode <= 10 {
			return cacheRms[0]
		}
	}

	// title 不匹配，根据编辑距离判断是否有解
	if minDistance != 1000 && minDistance > 5 {
		rm := model.RefMatch{Mode: 10}
		return rm
	}
	// 无法判断，暂时 不匹配
	log.Println("title not match:", title, year, first, last, minDistance)
	return model.RefMatch{Mode: 99}
}

// 判断名字是否包含
func authorCheck(authors []interface{}, first, last string) bool {
	if len(authors) == 0 {
		return false
	}
	for _, item := range authors {
		author := item.(map[string]interface{})
		if name, ok := author["name"]; ok {
			if first != "" && strings.Contains(strings.ToLower(name.(string)), strings.ToLower(first)) {
				return true
			} else if last != "" && strings.Contains(strings.ToLower(name.(string)), strings.ToLower(last)) {
				return true
			}
		}
	}
	return false
}

func doiQuery(doi string, es *elasticsearch.Client) (magID int64) {
	var buf bytes.Buffer
	query := map[string]interface{}{
		"size": 1,
		"query": map[string]interface{}{
			"match": map[string]interface{}{
				"doi": doi,
			},
		},
		"_source": []string{"title", "year", "doi"},
	}
	if err := json.NewEncoder(&buf).Encode(query); err != nil {
		log.Fatalf("Error encoding query: %s", err)
	}
	// Perform the search request.
	res, err := es.Search(
		es.Search.WithContext(context.Background()),
		es.Search.WithIndex("mag2020_des"),
		es.Search.WithBody(&buf),
		es.Search.WithTrackTotalHits(true),
		es.Search.WithPretty(),
	)
	if err != nil {
		log.Fatalf("Error getting response: %s", err)
	}
	defer res.Body.Close()

	if res.IsError() {
		var e map[string]interface{}
		if err := json.NewDecoder(res.Body).Decode(&e); err != nil {
			log.Fatalf("Error parsing the response body: %s", err)
		} else {
			// Print the response status and error information.
			log.Fatalf("[%s] %s: %s",
				res.Status(),
				e["error"].(map[string]interface{})["type"],
				e["error"].(map[string]interface{})["reason"],
			)
		}
	}
	var r map[string]interface{}

	if err := json.NewDecoder(res.Body).Decode(&r); err != nil {
		log.Fatalf("Error parsing the response body: %s", err)
	}
	// Print the response status, number of results, and request duration.
	// log.Printf(
	// 	"[%s] %d hits; took: %dms",
	// 	res.Status(),
	// 	int(r["hits"].(map[string]interface{})["total"].(map[string]interface{})["value"].(float64)),
	// 	int(r["took"].(float64)),
	// )
	// Print the ID and document source for each hit.
	for _, hit := range r["hits"].(map[string]interface{})["hits"].([]interface{}) {
		// log.Printf(" * ID=%s, %s", hit.(map[string]interface{})["_id"], hit.(map[string]interface{})["_source"])
		retdoi := hit.(map[string]interface{})["_source"].(map[string]interface{})["doi"].(string)
		if strings.EqualFold(doi, retdoi) {
			magID, _ = strconv.ParseInt(hit.(map[string]interface{})["_id"].(string), 10, 64)
			return
		} else {
			log.Println("not match doi:", doi, retdoi)
		}
	}
	magID = 0
	return
}
