package wx_article_catcher

import (
	"encoding/json"
	"flag"
	"fmt"
	"github.com/bitly/go-simplejson"
	"os"
	"strconv"
	"strings"
	"time"
	"webDownloader/engine/base"
	"webDownloader/engine/generic"
	"webDownloader/engine/wx_article_catcher/utils"
)

func init() {
	generic.RegisterEngine(&EngineWxHomepage{})
}

type EngineWxHomepage struct {
	generic.EngineGeneric
}

func (e *EngineWxHomepage) Accept(url string) bool {
	//fmt.Println("免费功能，权限校验无条件通过")
	return strings.Contains(url, "://mp.weixin.qq.com") && strings.Contains(url, "profile_ext?action=home")
}

func (e *EngineWxHomepage) ParseConfig() *map[string]interface{} {
	fmt.Println("正在解析命令行配置...")
	// 声明配置
	if e.Conf == nil {
		e.Conf = map[string]interface{}{}
	}

	// 解析参数
	numArticle := flag.Int("n", -1, "指定下载文章的数量，为整数，0表示不下载（无意义），负值表示全部下载，默认值-1")
	savePath := flag.String("p", "./", "用于指定下载路径，默认为下载到当前工作目录，带空格的路径需要用引号包裹")
	book := flag.Bool("b", false, "用于指定处理成对pandoc友好的格式，方便制作电子书")
	offset := flag.Int("off", 0, "偏移量，指定偏移量后程序会跳过一定数量的文章，直接从后面开始下载，默认值0")
	numThreads := flag.Int("t", 10, "协程数，在一般情况下等价于允许同时下载的文章数，默认值10")
	noOriginCheck := flag.Bool("O", false, "表示关闭原创校验，下载范围内所有能正常浏览的文章都会被下载")
	printCss := flag.Bool("css", false, "输出评论区的css样式文件，用于部署静态页面，若没有此文件评论区格式会出错")
	keepPaidArticle := flag.Bool("keep", false, "保留未购买的付费文章，不加此参数未购买的付费文章不会被保存到本地，防止覆盖从他人处获取的已购文章")
	hideDonate := flag.Bool("d", false, "在下载完成后不显示捐助信息，直接退出")
	flag.Parse()

	// 填充至配置对象
	e.Conf["numArticle"] = *numArticle

	// 存储路径必须以 "/" 结尾，简化后续处理步骤
	path := *savePath
	if !strings.HasSuffix(path, "/") {
		path += "/"
	}

	e.Conf["savePath"] = path
	e.Conf["book"] = *book
	e.Conf["offset"] = *offset
	e.Conf["numThreads"] = *numThreads
	e.Conf["noOriginCheck"] = *noOriginCheck
	e.Conf["printCss"] = *printCss
	e.Conf["keep"] = *keepPaidArticle
	e.Conf["hideDonate"] = *hideDonate

	SetThreads(*numThreads)

	return &e.Conf
}

func homePageEnvInit(e *EngineWxHomepage) error {
	path := e.Conf["savePath"].(string)
	err := os.MkdirAll(path, 0755)
	if err != nil {
		return err
	}
	css := e.Conf["printCss"].(bool)
	if !css {
		return nil
	}
	file, _ := os.OpenFile(path+"wxMessage.css", os.O_RDWR|os.O_CREATE, 0666)
	_, _ = file.WriteString(utils.CSS)
	_ = file.Close()
	return fmt.Errorf("优先打印css，不再执行下载流程")
}

func parseHomepage(baseUrl string, e *EngineWxHomepage) {
	offset := e.Conf["offset"].(int)
	canMsgContinue := 1

	articleList := &e.Tasks

	num := e.Conf["numArticle"].(int)
	for canMsgContinue == 1 && (num < 0 || len(*articleList) < num) {
		url := baseUrl + strconv.Itoa(offset)
		dataByte := utils.WxRequest(url, &e.Conf, true)
		var dataJson map[string]interface{}
		json.Unmarshal(dataByte, &dataJson)
		e.Conf["offset"] = offset
		offset = int(dataJson["next_offset"].(float64))
		canMsgContinue = int(dataJson["can_msg_continue"].(float64))
		generalMsgList := []byte(dataJson["general_msg_list"].(string))
		gmList, _ := simplejson.NewJson(generalMsgList)
		gmListArr, _ := gmList.Get("list").Array()
		for _, generalMessage := range gmListArr {
			generalMessage := generalMessage.(map[string]interface{})
			commMsgInfo := generalMessage["comm_msg_info"].(map[string]interface{})
			//防止程序因为纯文本推送崩溃
			appMsgExtInfo := generalMessage["app_msg_ext_info"]
			if appMsgExtInfo == nil {
				continue
			}
			app_msg_ext_info := generalMessage["app_msg_ext_info"].(map[string]interface{})
			multiAppMsgItemList := app_msg_ext_info["multi_app_msg_item_list"].([]interface{})

			article := DownloadTaskWxArticle{Context: e}
			dateTime, _ := commMsgInfo["datetime"].(json.Number).Int64()
			article.Date = dateTime
			article.Url = strings.ReplaceAll(app_msg_ext_info["content_url"].(string), "&amp;", "&")
			article.Url = strings.ReplaceAll(article.Url, "https", "http")
			article.Url = strings.ReplaceAll(article.Url, "http", "https")
			article.FileName = app_msg_ext_info["title"].(string)
			//fmt.Println(article)
			if len(article.Url) > 4 {
				cpStat, _ := app_msg_ext_info["copyright_stat"].(json.Number).Int64()
				noOrigCheck := e.Conf["noOriginCheck"].(bool)
				// 如果原创校验关闭，或者当前文章是原创文章，则追加
				if noOrigCheck || (!noOrigCheck && cpStat == 11) {
					*articleList = append(*articleList, &article)
					if len(*articleList) >= num && num >= 0 {
						return
					}
				}
			}

			for _, subArticleObj := range multiAppMsgItemList {
				subArticle := DownloadTaskWxArticle{Context: e}
				subArticle.Date = dateTime
				subArticle.Url = strings.ReplaceAll(subArticleObj.(map[string]interface{})["content_url"].(string), "&amp;", "&")
				subArticle.Url = strings.ReplaceAll(subArticle.Url, "https", "http")
				subArticle.Url = strings.ReplaceAll(subArticle.Url, "http", "https")
				subArticle.FileName = subArticleObj.(map[string]interface{})["title"].(string)

				if len(subArticle.Url) > 4 {
					cpStat, _ := subArticleObj.(map[string]interface{})["copyright_stat"].(json.Number).Int64()
					noOrigCheck := e.Conf["noOriginCheck"].(bool)
					if noOrigCheck || (!noOrigCheck && cpStat == 11) {
						*articleList = append(*articleList, &subArticle)
						if len(*articleList) >= num && num >= 0 {
							return
						}
					}
				}
			}
		}
		fmt.Printf("正在收集文章，已收集%v篇，偏移量%v\r", len(*articleList), offset)
		time.Sleep(time.Second * 2)
	}
	fmt.Printf("\n收集完毕,在命令行中添加 -off %v 可以从（接近）当前位置处继续下载\n\n", offset)
}

func (e *EngineWxHomepage) ParseDownloadTask() []base.IDownload {
	e.Tasks = make([]base.IDownload, 0)
	err := homePageEnvInit(e)
	if err != nil {
		fmt.Println("程序中止，原因: ", err)
		os.Exit(-1)
	}

	// 正式开始收集文章
	cookies := map[string]string{}
	e.Conf["cookies"] = cookies
	baseUrl := e.Conf["refUrl"].(string)

	data := utils.WxRequest(baseUrl, &e.Conf, true)
	appMsgTokens := utils.TokenReg.FindAllStringSubmatch(string(data), 1)
	if len(appMsgTokens) > 0 {
		e.Conf["appMsgToken"] = appMsgTokens[0][1]
	}
	baseUrl = strings.Replace(baseUrl, "home", "getmsg", 1)
	//baseURL被转换为用于列出文章的请求
	baseUrl += "&f=json&count=10&is_ok=1&offset="
	parseHomepage(baseUrl, e)
	return e.Tasks
}

func (e *EngineWxHomepage) SyncDownloadTask() []base.IDownload {
	// 对下载任务进行同步（去掉已完成的任务），在通用下载器中不做处理
	tasks := make([]base.IDownload, 0)

	for _, task := range e.Tasks {
		article := task.(interface{}).(*DownloadTaskWxArticle)

		// 对文件名进行规范化
		article.FileName = utils.IllegalChar1.ReplaceAllString(article.FileName, "_")
		article.FileName = utils.IllegalChar2.ReplaceAllString(article.FileName, "")

		filePath := e.Conf["savePath"].(string) + article.FileName + ".html"
		fileDesc, _ := os.Stat(filePath)
		if fileDesc != nil {
			if fileDesc.ModTime().Unix() == article.Date {
				fmt.Printf("文章【%s】已存在，且修改日期相同，判定为已下载，跳过\n", article.FileName)
				continue
			} else {
				fmt.Printf("文章【%s】已存在，但修改日期不同，判定为重复标题，追加时间戳\n", article.FileName)
				article.FileName += "_" + strconv.FormatInt(article.Date*1000, 10)
			}
		}
		tasks = append(tasks, article)
	}
	return tasks
}
