/*
 * @Version: 0.0.1
 * @Author: ider
 * @Date: 2021-05-21 14:59:57
 * @LastEditors: ider
 * @LastEditTime: 2022-03-04 15:21:30
 * @Description: 从历史文件中导入所有资料,only 处理 snap
 */
package main

import (
	"encoding/xml"
	"fmt"
	"historyOfWikipedia/internal/config"
	"historyOfWikipedia/internal/database/mongodb"
	"historyOfWikipedia/internal/model"
	"io"
	"io/ioutil"
	"sort"
	"strings"
	"sync"
	"time"

	"github.com/iwuzhen/filehub"
	"github.com/iwuzhen/filehub/seaweeds3"
	"github.com/jinzhu/now"
	"github.com/saracen/go7z"
	log "github.com/sirupsen/logrus"
	"go.mongodb.org/mongo-driver/bson/primitive"

	_ "net/http/pprof"
)

var (
	VERSION = "20230401"
	Filehub filehub.Filehub
)

func init() {
	log.SetFormatter(&log.TextFormatter{
		FullTimestamp: true,
	})
	log.SetReportCaller(true)
	var err error
	Filehub, err = seaweeds3.NewSeaweedS3("seaweeds3://dafqeQG2141RFq:gegio4gnberianowanewF@127.0.0.1:18335/revision")
	if err != nil {
		log.Fatal(err)
	}
}

type text struct {
	Value   string `xml:",chardata"`
	Bytes   int32  `xml:"bytes,attr"`
	Deleted string `xml:"deleted,attr"`
}

type redirect struct {
	Title string `xml:"title,attr"`
}

type contributor struct {
	Username string `xml:"username"`
	Ip       string `xml:"ip"`
	ID       int64  `xml:"id"`
	Deleted  string `xml:"deleted,attr"`
}

type revision struct {
	ID          int64       `xml:"id"`
	Text        text        `xml:"text"`
	Parentid    int64       `xml:"parentid"`
	Timestamp   string      `xml:"timestamp"`
	Comment     string      `xml:"comment"`
	Model       string      `xml:"model"`
	Format      string      `xml:"format"`
	Contributor contributor `xml:"contributor"`
}

type Page struct {
	ID        int64      `xml:"id"`
	Ns        int32      `xml:"ns"`
	Title     string     `xml:"title"`
	Redirect  redirect   `xml:"redirect"`
	Revisions []revision `xml:"revision"`
}

func main() {

	// go func() {
	// 	log.Println(http.ListenAndServe("0.0.0.0:10000", nil))
	// }()

	mongoobj := mongodb.NewMongoHistoryDataBase(config.Env.MongoUri, VERSION)
	// 文件名的导入队列
	file_chan := make(chan string, 10000)
	paths, _ := getAllFile(config.Env.HistoryDumpFileFolder)
	for _, path := range paths {
		pathes := strings.Split(path, "/")
		if strings.HasSuffix(path, "7z") && !mongoobj.Get_kv(pathes[len(pathes)-1]) {
			file_chan <- path
		}
	}
	close(file_chan)

	// page 处理起
	pageChan := make(chan Page, 1000)
	retHandleThreadCount := 10
	retWg := sync.WaitGroup{}
	retWg.Add(retHandleThreadCount)
	for i := 0; i < retHandleThreadCount; i++ {
		go retHandle(pageChan, &retWg)
	}

	// 解析 xml
	threadCount := 30
	wg := sync.WaitGroup{}
	wg.Add(threadCount)
	for i := 0; i < threadCount; i++ {
		go func() {
			for path := range file_chan {
				worker(path, mongoobj, pageChan)
			}
			wg.Done()
		}()
	}
	wg.Wait()
	close(pageChan)
	retWg.Wait()
	log.Info("finish")
}

func getAllFile(pathname string) ([]string, error) {
	var s []string
	rd, err := ioutil.ReadDir(pathname)
	if err != nil {
		fmt.Println("read dir fail:", err)
		return s, err
	}

	for _, fi := range rd {
		if !fi.IsDir() {
			fullName := pathname + "/" + fi.Name()
			s = append(s, fullName)
		}
	}
	return s, nil
}

func worker(filepath string, mongoobj *mongodb.MongoHistoryDataBase, pageChan chan Page) {
	log.Info("start:", filepath)
	sz, err := go7z.OpenReader(filepath)
	if err != nil {
		panic(err)
	}
	defer sz.Close()

	_, err = sz.Next()
	if err == io.EOF {
		return // End of archive
	}
	if err != nil {
		panic(err)
	}
	// fmt.Println(hdr)

	d := xml.NewDecoder(sz)
	log.Println("开始读取")
	count := 0
	for {
		t, tokenErr := d.Token()
		if tokenErr != nil {
			if tokenErr == io.EOF {
				break
			} else {
				log.Fatal("unknown err", tokenErr)
			}
			// handle error
		}
		switch t := t.(type) {
		case xml.StartElement:
			if t.Name.Local == "page" {
				var page Page
				if err := d.DecodeElement(&page, &t); err != nil {
					log.Warning(err)
				}
				pageChan <- page
				// todo try to fix
				page = Page{}
				count++
			}
		}
	}
	log.Infof("finsh %d:%s", count, filepath)
	pathes := strings.Split(filepath, "/")
	mongoobj.Insert_kv(pathes[len(pathes)-1])
}

func retHandle(pageChan chan Page, wg *sync.WaitGroup) {

	timetags := generate_static_time_tag()
	userCacheMap := make(map[string]primitive.ObjectID)

	mongoobj := mongodb.NewMongoHistoryDataBase(config.Env.MongoUri, VERSION)
	for pageItem := range pageChan {
		newPage := &model.LoadPagesArticlesObj{
			ID:       pageItem.ID,
			Ns:       pageItem.Ns,
			Redirect: pageItem.Redirect.Title,
			Title:    pageItem.Title,
		}

		// 确保按升序排序
		sort.SliceStable(pageItem.Revisions, func(i, j int) bool {
			jt, err := time.Parse("2006-01-02T15:04:05Z", pageItem.Revisions[j].Timestamp)
			if err != nil {
				log.Warn("time parse fail:", pageItem.Revisions[j].Timestamp, err)
			}
			it, err := time.Parse("2006-01-02T15:04:05Z", pageItem.Revisions[i].Timestamp)
			if err != nil {
				log.Warn("time parse fail:", pageItem.Revisions[i].Timestamp, err)
			}
			return jt.After(it)
		})
		// 更新早晚的时间点
		if len(pageItem.Revisions) > 0 {
			newPage.FirstRevisionID = pageItem.Revisions[0].ID
			newPage.FirstRevisionUpdate, _ = time.Parse("2006-01-02T15:04:05Z", pageItem.Revisions[0].Timestamp)
			newPage.LastRevisionID = pageItem.Revisions[len(pageItem.Revisions)-1].ID
			newPage.LastRevisionUpdate, _ = time.Parse("2006-01-02T15:04:05Z", pageItem.Revisions[len(pageItem.Revisions)-1].Timestamp)
		}
		// 更新 article
		// todo 此处展示不用更新
		err := mongoobj.Insert_pages_articles(newPage)
		if err != nil {
			log.Info("insert many article err:", err)
		}

		// 更新所有的 user 和 revision_info
		var revision_info_list []*model.LoadRevisionInfoObj

		for i := range pageItem.Revisions {
			timestamp, err := time.Parse("2006-01-02T15:04:05Z", pageItem.Revisions[i].Timestamp)
			if err != nil {
				log.Warn("time parse fail:", pageItem.Revisions[i].Timestamp, err)
			}
			// userCacheMap
			revision_info_doc := &model.LoadRevisionInfoObj{
				Revid:     pageItem.Revisions[i].ID,
				ArticleID: pageItem.ID,
				Comment:   pageItem.Revisions[i].Comment,
				ParentID:  pageItem.Revisions[i].Parentid,
				// User       primitive.ObjectID `bson:"user"`                 // User
				Timestamp: timestamp,
				Bytes:     pageItem.Revisions[i].Text.Bytes,
			}

			// user 处理，ip 和 用户+ID 不同时存在
			// todo user 不处理了
			if pageItem.Revisions[i].Contributor.Ip != "" {
				if userObjID, stats := userCacheMap[pageItem.Revisions[i].Contributor.Ip]; !stats {
					userObjID, err := mongoobj.Upsert_user_by_ip(pageItem.Revisions[i].Contributor.Ip)
					if err != nil {
						log.Warn("user id 插入失败:", pageItem.ID, pageItem.Revisions[i].ID, pageItem.Revisions[i].Contributor)
					} else {
						userCacheMap[pageItem.Revisions[i].Contributor.Ip] = userObjID
						revision_info_doc.User = userObjID
					}
				} else {
					revision_info_doc.User = userObjID
				}

			} else if pageItem.Revisions[i].Contributor.Username != "" {
				key := fmt.Sprintf("%s-%d", pageItem.Revisions[i].Contributor.Username, pageItem.Revisions[i].Contributor.ID)
				if userObjID, stats := userCacheMap[key]; !stats {
					userObjID, err := mongoobj.Upsert_user_by_name_id(pageItem.Revisions[i].Contributor.Username, pageItem.Revisions[i].Contributor.ID)
					if err != nil {
						log.Warn("user id 插入失败:", pageItem.ID, pageItem.Revisions[i].ID, pageItem.Revisions[i].Contributor)
					} else {
						userCacheMap[key] = userObjID
						revision_info_doc.User = userObjID
					}
				} else {
					revision_info_doc.User = userObjID
				}
			}

			// 数据意外情况打标机
			if pageItem.Revisions[i].Format != "text/x-wiki" {
				log.Warn("意外的Format:", pageItem.Revisions[i].Format)
			}
			if pageItem.Revisions[i].Model != "wikitext" {
				log.Warn("意外的wikitext:", pageItem.Revisions[i].Model)
			}

			// 检测是否是空节点
			if pageItem.Revisions[i].Text.Deleted != "" {
				revision_info_doc.TextHidden = true
			} else if pageItem.Revisions[i].Text.Bytes == 0 {
				revision_info_doc.TextHidden = true
			} else if len(pageItem.Revisions[i].Text.Value) == 0 {
				revision_info_doc.TextHidden = true
			}
			revision_info_list = append(revision_info_list, revision_info_doc)
		}
		// todo 此处暂时不用更新
		if len(revision_info_list) > 0 {
			err := mongoobj.Insert_many_revision_info(revision_info_list)
			if err != nil {
				log.Warn("insert 出了点小问题：", err)
			}
		}

		if pageItem.Redirect.Title != "" {
			continue
		}

		revision_detail_map := make(map[int64]*model.LoadRevisionDetailObj)
		// 更新逻辑,标记 snap
		for _, timeItem := range timetags {
			timgTag := time_format(timeItem)
			//  snap 的 reversion info
			var cacheSnapRevision *revision
			// 没有找到 timgTag 的进行处理，找到小于 time 的最大 timTag
			for i := range pageItem.Revisions {
				it, err := time.Parse("2006-01-02T15:04:05Z", pageItem.Revisions[i].Timestamp)
				if err != nil {
					log.Warn("time parse fail:", pageItem.Revisions[i].Timestamp, err)
				}
				// 数组中已经没有比 tag 更早的时间了
				if it.After(timeItem) {
					break
				} else if pageItem.Revisions[i].Text.Deleted != "" {
					continue
				} else if pageItem.Revisions[i].Text.Bytes == 0 {
					continue
				} else if len(pageItem.Revisions[i].Text.Value) == 0 {
					continue
				}

				cacheSnapRevision = &pageItem.Revisions[i]
			}
			// 只处理这个时间点前的 snap
			if cacheSnapRevision != nil {
				if snapRevisionDetail, stats := revision_detail_map[cacheSnapRevision.ID]; stats {
					snapRevisionDetail.Snapshot = append(snapRevisionDetail.Snapshot, timgTag)

				} else {
					timestamp, _ := time.Parse("2006-01-02T15:04:05Z", cacheSnapRevision.Timestamp)
					snapRevisionDetail = &model.LoadRevisionDetailObj{
						Revid:     cacheSnapRevision.ID,
						ArticleID: pageItem.ID,
						Timestamp: timestamp,
						Text:      cacheSnapRevision.Text.Value,
						Snapshot:  []string{timgTag},
						Format:    cacheSnapRevision.Format,
					}
					revision_detail_map[cacheSnapRevision.ID] = snapRevisionDetail
				}
			}
		}
		if len(revision_detail_map) > 0 {
			var cache_list []*model.LoadRevisionDetailObj
			for k := range revision_detail_map {
				cache_list = append(cache_list, revision_detail_map[k])
				// 不用更新文件
				// fileName := fmt.Sprintf("%d_%d", revision_detail_map[k].ArticleID, revision_detail_map[k].Revid)
				// loop_put_filer(fileName, []byte(revision_detail_map[k].Text), revision_detail_map[k].Format)
			}
			err := mongoobj.Insert_many_revision_snap(cache_list)
			if err != nil {
				log.Warn("insert 出了点小问题：", err)
			}
		} else {
			log.Printf("revision_detail_map:%d,%s", len(revision_detail_map), pageItem.ID, pageItem.Title)
		}
		// log.Infof("finish:%d,%s", pageItem.ID, pageItem.Title)
		// pageItem = nil
	}
	wg.Done()
}

// 生成当前日期前的 tag
func generate_static_time_tag() (timetags []time.Time) {
	nowt := time.Now()
	for year := 2001; year <= nowt.Year(); year++ {
		for _, month := range []int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12} {
			// if year == 2022 && month >= 2 {
			// 	break
			// }
			// for _, month := range []int{12} {
			tt := time.Date(year, time.Month(month), 11, 0, 0, 0, 0, time.Local)
			tt = now.With(tt).EndOfMonth()
			if nowt.Sub(tt) > 0 {
				timetags = append(timetags, tt)
			}
		}
	}
	return
}

func time_format(t time.Time) string {
	return t.Format("2006-01-02")
}

// 尝试存储数据直到成功
func loop_put_filer(fileName string, data []byte, contType string) {
	for i := 0; i < 100; i++ {
		_, err := Filehub.Put(fileName, data, contType)
		if err == nil {
			return
		}
		log.Warnf("wait 1s: %s %s %d : %s", fileName, contType, len(data), err)
		<-time.After(time.Second)
	}

}
