package service

import (
	"encoding/json"
	"errors"
	"fmt"
	r "gitee.com/nanakura/go-ramda"
	"github.com/PuerkitoBio/goquery"
	"github.com/imroc/req/v3"
	"log"
	"net/http"
	"os"
	"time"
)

var (
	globalClient *req.Client
	proxies      []string
	globalDoc    *goquery.Document
)

type configModel struct {
	Proxy []string `json:"proxy"`
}

func InitProxies() {
	bytes, err := os.ReadFile("config/config.json")
	if err != nil {
		log.Printf("config parse error: %s\n", err)
		return
	}
	var config configModel
	err = json.Unmarshal(bytes, &config)
	if err != nil {
		log.Printf("get proxy failed: %s\n", err)
	} else {
		proxies = config.Proxy
		log.Printf("parse success, proxy pool count: %d\n", len(proxies))
	}
}

func NewAutoChangeProxyClient() *req.Client {
	InitProxies()
	if len(proxies) == 0 {
		log.Println("proxy pool is empty")
	}
	client := req.C()
	client.SetTimeout(5 * time.Second).
		EnableDumpEachRequest().
		SetCommonRetryCount(len(proxies)).
		SetCommonRetryCondition(func(resp *req.Response, err error) bool {
			return err != nil || resp.StatusCode == http.StatusTooManyRequests
		}).
		SetProxyURL("http://112.80.248.73:80").
		SetCommonRetryHook(func(resp *req.Response, err error) {
			c := client.Clone().SetProxyURL(proxies[resp.Request.RetryAttempt-1]) // Create a client with proxy
			resp.Request.SetClient(c)                                             // Change the client of request dynamically.
		}).
		SetOutputDirectory("tmp")
	return client
}

func init() {
	globalClient = NewAutoChangeProxyClient()
}

func GetDocument(url string) (doc *goquery.Document, err error) {
	resp, err := globalClient.R().
		SetHeaders(map[string]string{
			"User-Agent":      "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.104 Safari/537.36 Core/1.53.4882.400 QQBrowser/9.7.13059.400",
			"Accept":          "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
			"Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6,zh-TW;q=0.5",
		}).
		Get(url)
	if err != nil {
		err = fmt.Errorf("failed to parse html: %s, raw content:\n%s", err.Error(), resp.Dump())
		return
	}
	doc, err = goquery.NewDocumentFromReader(resp.Body)
	if err != nil {
		err = fmt.Errorf("%s, raw content:\n%s", err.Error(), resp.Dump())
	}
	return
}

func SetCurrentDocument(url string) (err error) {
	globalDoc, err = GetDocument(url)
	return
}

func Crawl(url string, callback func(doc *goquery.Document) error) (err error) {
	if err = SetCurrentDocument(url); err != nil {
		return
	}
	err = callback(globalDoc)
	return
}

func LazyCrawl(callback func(doc *goquery.Document) error) (err error) {
	return callback(globalDoc)
}

func DownloadToTmpDir(url, filename string) (err error) {
	_, err = globalClient.R().SetOutputFile(filename).Get(url)
	return
}

func DownloadPics(url, picSelector string) (res []string, err error) {
	//s := "#content > div > div.article > ol > li > div"
	var srcList []string
	//err := crawler.Crawl("https://movie.douban.com/", func(doc *goquery.Document) error {
	err = Crawl(url, func(doc *goquery.Document) error {
		doc.
			Find(picSelector).
			Each(func(i int, selection *goquery.Selection) {
				src, exists := selection.Attr("src")
				if !exists || src == "" {
					err = errors.New("selector error")
					return
				}
				srcList = append(srcList, src)
			})
		return nil
	})
	if err != nil {
		return
	}
	getFilename := r.Compose2(r.Last[string], r.Split("/"))
	for _, src := range srcList {
		err = DownloadToTmpDir(src, getFilename(src))
		if err != nil {
			return
		}
		res = append(res, fmt.Sprintf("tmp/%s", src))
	}
	return
}

func GetTitleTable(url, urlSelector, textSelector string) (titleTableList []map[string]string, err error) {
	var titleList []string
	var urlList []string
	err = SetCurrentDocument(url)
	if err != nil {
		return
	}
	err = LazyCrawl(func(doc *goquery.Document) error {
		doc.
			Find(textSelector).
			Each(func(i int, selection *goquery.Selection) {
				title := selection.Text()
				titleList = append(titleList, title)
			})
		return nil
	})
	if err != nil {
		return
	}

	err = LazyCrawl(func(doc *goquery.Document) error {
		doc.
			Find(urlSelector).
			Each(func(i int, selection *goquery.Selection) {
				title, ok := selection.Attr("href")
				if ok {
					urlList = append(urlList, title)
				}
			})
		return nil
	})
	if err != nil {
		return
	}
	if len(titleList) != len(urlList) {
		err = errors.New("can't map title and url")
		return
	}
	titleTableList = r.ZipWith(func(title, url string) map[string]string {
		tmpMapper := make(map[string]string)
		tmpMapper["title"] = title
		tmpMapper["url"] = url
		return tmpMapper
	})(titleList)(urlList)
	return
}

// GetContent 获取网页内容
func GetContent(url, imgSelector, textSelector string) (res map[string]any, err error) {
	res = make(map[string]any)
	var text string
	var hrefs []string
	err = SetCurrentDocument(url)
	_ = LazyCrawl(func(doc *goquery.Document) error {
		doc.Find(imgSelector).Each(func(i int, selection *goquery.Selection) {
			href, exists := selection.Attr("href")
			if exists {
				hrefs = append(hrefs, href)
			}
		})
		return nil
	})
	if len(hrefs) == 0 {
		_ = LazyCrawl(func(doc *goquery.Document) error {
			html, err := doc.Find(textSelector).Html()
			if err != nil {
				return err
			}
			text = html
			return nil
		})
		res["content"] = []string{text}
		res["isPics"] = false
		return
	}
	res["content"] = hrefs
	res["isPics"] = true
	return
}
