/*
 * @Version: 0.0.1
 * @Author: ider
 * @Date: 2021-05-21 14:59:57
 * @LastEditors: ider
 * @LastEditTime: 2021-06-09 08:48:09
 * @Description: 从历史文件中导入所有资料
 */
package main

import (
	"encoding/xml"
	"fmt"
	"historyOfWikipedia/internal/config"
	"historyOfWikipedia/internal/database/mongodb"
	"historyOfWikipedia/internal/model"
	"historyOfWikipedia/internal/rpc"
	"io"
	"io/ioutil"
	"regexp"
	"strings"
	"sync"
	"time"

	"github.com/saracen/go7z"
	log "github.com/sirupsen/logrus"
)

type text struct {
	Value   string `xml:",chardata"`
	Bytes   int32  `xml:"bytes,attr"`
	Deleted string `xml:"deleted,attr"`
}

type redirect struct {
	Title string `xml:"title,attr"`
}

type contributor struct {
	Username string `xml:"username"`
	Ip       string `xml:"ip"`
	ID       int64  `xml:"id"`
	Deleted  string `xml:"deleted,attr"`
}

type revision struct {
	ID          int64       `xml:"id"`
	Text        text        `xml:"text"`
	Parentid    int64       `xml:"parentid"`
	Timestamp   string      `xml:"timestamp"`
	Comment     string      `xml:"comment"`
	Model       string      `xml:"model"`
	Format      string      `xml:"format"`
	Contributor contributor `xml:"contributor"`
}

type Page struct {
	ID        int64      `xml:"id"`
	Ns        int32      `xml:"ns"`
	Title     string     `xml:"title"`
	Redirect  redirect   `xml:"redirect"`
	Revisions []revision `xml:"revision"`
}

var (
	VERSION = "20230401"
	RPC     *rpc.RemoteWikiTextServer
	reCt    *regexp.Regexp
	reCn    *regexp.Regexp
	reCl    *regexp.Regexp
)

func init() {
	log.SetFormatter(&log.TextFormatter{
		FullTimestamp: true,
	})
	log.SetReportCaller(true)

	reCt = regexp.MustCompile("\t")

	reCn = regexp.MustCompile("\n{3,}")

	reCl = regexp.MustCompile(`\n\[\[zh:|\n\[\[fr:|\n\[\[ja:|\n\[\[de:|\n\[\[pl:|\n\[\[it:|\n\[\[nl:|\n\[\[pt:|\n\[\[es:|\n\[\[ru:|\n\[\[es:|\n\[\[ceb:|\n\[\[sv:|\n\[\[vi:`)

}

func main() {

	RPC = rpc.NewRemoteWikiTextServer()
	mongoobj := mongodb.NewMongoHistoryDataBase(config.Env.MongoUri, VERSION)
	// 文件名的导入队列
	file_chan := make(chan string, 10000)
	paths, _ := getAllFile(config.Env.HistoryDumpFileFolder)
	for _, path := range paths {
		pathes := strings.Split(path, "/")
		if strings.HasSuffix(path, "7z") && !mongoobj.Get_kv(pathes[len(pathes)-1]) {
			file_chan <- path
		}
	}
	close(file_chan)

	// page 处理起
	pageChan := make(chan *Page, 100)
	retHandleThreadCount := 10
	retWg := sync.WaitGroup{}
	retWg.Add(retHandleThreadCount)
	for i := 0; i < retHandleThreadCount; i++ {
		go retHandle(pageChan, &retWg)
	}

	// 解析 xml
	threadCount := 90
	wg := sync.WaitGroup{}
	wg.Add(threadCount)
	for i := 0; i < threadCount; i++ {
		go func() {
			for path := range file_chan {
				worker(path, mongoobj, pageChan)
			}
			wg.Done()
		}()
	}
	wg.Wait()
	close(pageChan)
	retWg.Wait()
	log.Info("finish")
}

func getAllFile(pathname string) ([]string, error) {
	var s []string
	rd, err := ioutil.ReadDir(pathname)
	if err != nil {
		fmt.Println("read dir fail:", err)
		return s, err
	}

	for _, fi := range rd {
		if !fi.IsDir() {
			fullName := pathname + "/" + fi.Name()
			s = append(s, fullName)
		}
	}
	return s, nil
}

func worker(filepath string, mongoobj *mongodb.MongoHistoryDataBase, pageChan chan *Page) {
	log.Info("start:", filepath)
	sz, err := go7z.OpenReader(filepath)
	if err != nil {
		panic(err)
	}
	defer sz.Close()

	_, err = sz.Next()
	if err == io.EOF {
		return // End of archive
	}
	if err != nil {
		panic(err)
	}
	// fmt.Println(hdr)

	d := xml.NewDecoder(sz)
	log.Println("开始读取")
	count := 0
	for {
		t, tokenErr := d.Token()
		if tokenErr != nil {
			if tokenErr == io.EOF {
				break
			} else {
				log.Fatal("unknown err", tokenErr)
			}
			// handle error
		}
		switch t := t.(type) {
		case xml.StartElement:
			if t.Name.Local == "page" {
				var page Page
				if err := d.DecodeElement(&page, &t); err != nil {
					log.Warning(err)
				}
				pageChan <- &page
				count++
			}
		}
	}
	log.Infof("finsh %d:%s", count, filepath)
	pathes := strings.Split(filepath, "/")
	mongoobj.Insert_kv(pathes[len(pathes)-1])
}

func retHandle(pageChan chan *Page, wg *sync.WaitGroup) {

	// userCacheMap := make(map[string]primitive.ObjectID)

	mongoobj := mongodb.NewMongoHistoryDataBase(config.Env.MongoUri, VERSION)
	for pageItem := range pageChan {

		if pageItem.Redirect.Title != "" || pageItem.Ns != 0 {
			continue
		}

		// // 确保降序排序
		// sort.SliceStable(pageItem.Revisions, func(i, j int) bool {
		// 	jt, err := time.Parse("2006-01-02T15:04:05Z", pageItem.Revisions[j].Timestamp)
		// 	if err != nil {
		// 		log.Warn("time parse fail:", pageItem.Revisions[j].Timestamp, err)
		// 	}
		// 	it, err := time.Parse("2006-01-02T15:04:05Z", pageItem.Revisions[i].Timestamp)
		// 	if err != nil {
		// 		log.Warn("time parse fail:", pageItem.Revisions[i].Timestamp, err)
		// 	}
		// 	return it.After(jt)
		// })
		var (
			clearRevisions []*revision
			// flagTimestamp  time.Time
		)

		for i := range pageItem.Revisions {
			// 检测是否是空节点
			if pageItem.Revisions[i].Text.Deleted != "" {
				continue
			} else if pageItem.Revisions[i].Text.Bytes == 0 {
				continue
			} else if len(pageItem.Revisions[i].Text.Value) == 0 {
				continue
			}
			// timestamp, err := time.Parse("2006-01-02T15:04:05Z", pageItem.Revisions[i].Timestamp)
			// if err != nil {
			// 	log.Warn("time parse fail:", pageItem.Revisions[i].Timestamp, err)
			// }
			// 同一月份的，跳过
			// if timestamp.Year() == flagTimestamp.Year() && timestamp.Month() == flagTimestamp.Month() {
			// 	continue
			// }
			// flagTimestamp = timestamp
			clearRevisions = append(clearRevisions, &pageItem.Revisions[i])
		}

		// 清理后的 revision ，确保每月只会解析一条记录
		wg := sync.WaitGroup{}
		wg.Add(len(clearRevisions))
		outChan := make(chan *model.LoadRevisionWords, len(clearRevisions))
		for i := range clearRevisions {

			go func(i int) {
				timestamp, _ := time.Parse("2006-01-02T15:04:05Z", clearRevisions[i].Timestamp)

				retString := wikiTextHandle(&clearRevisions[i].Text.Value)
				value := RPC.ShortestDistance(*retString)

				lrp := model.LoadRevisionWords{
					Revid:     clearRevisions[i].ID,
					ArticleID: pageItem.ID,
					Timestamp: timestamp,
					Ns:        pageItem.Ns,
					Words:     value,
				}
				outChan <- &lrp

				wg.Done()
			}(i)
		}
		wg.Wait()
		close(outChan)
		revsizes := []*model.LoadRevisionWords{}
		for lrp := range outChan {
			revsizes = append(revsizes, lrp)
		}
		mongoobj.Insert_many_revision_words(revsizes)

	}
	wg.Done()
}

// wikiTextHandle 清理 wikitext， 超过3个 \n 会被缩减到 2 个， \t 会被替换为空格。
// 清理之后语言链接的规则
// 最后一行含 category ，则跳过
func wikiTextHandle(s *string) *string {

	//将匹配到的部分替换为"##.#"
	rets := reCt.ReplaceAllString(*s, " ")

	// 去首尾差异字符串
	rets = strings.Trim(rets, "\n ")

	// 超过两行的空格替换为两行
	//将匹配到的部分替换为"##.#"
	cleanString := reCn.ReplaceAllString(rets, "\n\n")

	// 分割行数
	stringList := strings.Split(cleanString, "\n\n")

	// 只有一行
	if len(stringList) <= 1 {
		return &cleanString
	}

	// 最后一行含 category  跳出
	if strings.Contains(stringList[len(stringList)-1], "[[Category:") {
		return &cleanString
	}

	// 最后第二段含 category 的，去掉最后一段
	if strings.Contains(stringList[len(stringList)-2], "[[Category:") {
		stringList = stringList[:len(stringList)-1]
		cleanString = strings.Join(stringList, "\n\n")
		return &cleanString
	}

	// 最后一段有语言链接，去掉最后一段
	mt := reCl.MatchString(stringList[len(stringList)-1])
	if mt {
		stringList = stringList[:len(stringList)-1]
		cleanString = strings.Join(stringList, "\n\n")
		return &cleanString
	}

	// 不清理
	return &cleanString
}
