package fa

import (
	"bytes"
	"fmt"
	"gallery/crawler"
	"gallery/crawler/utils"
	"log"
	"os"
	"path/filepath"
	"strings"
	"sync"
	"time"

	"github.com/PuerkitoBio/goquery"
	"github.com/fatih/color"
)

// 迁移
var (
	sem chan struct{}
	wg  *sync.WaitGroup
	bar *utils.Bar
)

// Start 启动爬虫
func Start(user string) {
	// 更新输出目录
	crawler.Dir = filepath.Join(crawler.Dir, user, "/")
	// 创建目标路径
	os.MkdirAll(crawler.Dir, os.ModeDir)

	sem = crawler.MyClient.Ch
	wg = &crawler.MyClient.WaitGroup

	// 开始计时
	timeStart := time.Now()

	subs, pages := getSubmissions(user)
	// 注意覆盖问题
	bar = utils.NewBar(int32(subs))
	switch {
	case crawler.Page > 0:
		// 获取指定页数
		log.Println("获取第", crawler.Page, "页")
		data := get(fmt.Sprintf("%s/%s/%d", start, user, crawler.Page))
		downloadImg(data, wg)
	default:
		log.Printf("获取全部图片，总计：%d个项目，%d页\n\n", subs, pages)
		for p := 0; p < pages; p++ {
			data := get(fmt.Sprintf("%s/%s/%d", start, user, p+1))
			downloadImg(data, wg)
		}
	}
	wg.Wait()
	close(sem)
	timeEnd := time.Since(timeStart)
	log.Println(crawler.Info("\n完成!\t耗时："), timeEnd)
}

// downloadImg 从预览界面获取所有的图片
func downloadImg(data []byte, wg *sync.WaitGroup) {
	links := parseLink("figure b a", "href", data)
	for _, e := range links {
		// 链接筛选
		if strings.Contains(e, "/view") {
			wg.Add(1)
			sem <- struct{}{}
			// 避免意外的抢占情况，使用uri作为参数
			go func(uri string) {
				// 进入每一个图片的详细界面然后下载图片
				uri = fmt.Sprintf("%s%s", host, uri)
				tmp := get(uri)
				doc, _ := goquery.NewDocumentFromReader(bytes.NewReader(tmp))
				alt := doc.Find(".submission-title p").First().Text()
				dlink := doc.Find(".aligncenter.auto_link a").FilterFunction(func(i int, s *goquery.Selection) bool {
					return strings.Contains(s.Text(), "Download")
				}).AttrOr("href", "")

				defer func() {
					wg.Done()
					<-sem
					fmt.Print(color.MagentaString(bar.AddAndShow(1)))
				}()

				if alt == "" || dlink == "" {
					color.Red("已经触发反爬措施!降低速度...")
					crawler.Duration = 3
					return
				}
				dlink = fmt.Sprintf("https:%s", dlink)
				// 处理图片名称
				fname := genName(dlink, alt)
				// 以当前用户权限写入
				out := filepath.Join(crawler.Dir, fname)
				dFile(dlink, out)
			}(e)
		}
		// 降低访问频率
		time.Sleep(time.Second * time.Duration(crawler.Duration))
	}
}
