package crawler

import (
	"fmt"
	"github.com/go-redis/redis/v8"
	"github.com/gocolly/colly"
	"myai/core/types"
	"myai/service/crawler/format"
	"myai/utils"
)

type CrawlersManagerService struct {
	Config    *types.AppConfig
	RedisCli  *redis.Client
	Collector *colly.Collector
	Urls      map[string]format.CrawlerDataFormatInter
}

func NewCrawlerManagerService(config *types.AppConfig, redisCli *redis.Client) *CrawlersManagerService {
	cms := &CrawlersManagerService{
		Config:    config,
		RedisCli:  redisCli,
		Collector: NewCollector(userAgent, true, true),
		Urls:      map[string]format.CrawlerDataFormatInter{},
	}
	weiBoService := format.NewWeiBoFormatService(redisCli)
	touTiaoService := format.NewTouTiaoFormatService(redisCli)
	cms.Urls[weiBoService.Url] = weiBoService
	cms.Urls[touTiaoService.Url] = touTiaoService
	return cms
}

// NewCollector 创建收集器
func NewCollector(userAgent string, allowURLRevisit bool, async bool) *colly.Collector {
	c := colly.NewCollector()
	if userAgent != "" {
		c.UserAgent = userAgent
	}
	c.AllowURLRevisit = allowURLRevisit
	c.Async = async
	return c
}

// ExecCrawler 执行爬虫方法
func (s *CrawlersManagerService) ExecCrawler() {
	s.Collector.OnRequest(s.CallRequest)
	s.Collector.OnResponse(s.CallResponse)
	s.Collector.OnError(s.CallError)
	for url, _ := range s.Urls {
		if err := s.Collector.Visit(url); err != nil {
			log.Errorf("访问API：%s 错误：%v\n", url, err)
		}
	}
	// 等待请求完成
	s.Collector.Wait()
}

// CallRequest 请求之外的回调
func (s *CrawlersManagerService) CallRequest(r *colly.Request) {
	host := r.URL.Host
	if host == "weibo.com" {
		r.Headers.Set("Accept", "application/json, text/plain, */*")
		r.Headers.Set("Accept-Language", "zh-CN,zh;q=0.9,en;q=0.8")
		r.Headers.Set("Connection", "keep-alive")
		r.Headers.Set("Referer", "https://weibo.com/")
		r.Headers.Set("Sec-Fetch-Dest", "empty")
		r.Headers.Set("Sec-Fetch-Mode", "cors")
		r.Headers.Set("Sec-Fetch-Site", "same-origin")
		r.Headers.Set("X-Requested-With", "XMLHttpRequest")
	}
}

// CallResponse 响应回调
func (s *CrawlersManagerService) CallResponse(r *colly.Response) {
	queryMap := r.Request.URL.Query()
	params := ""
	if len(queryMap) >= 1 {
		params = utils.MapToQueryMulti(queryMap)
	}
	fullUrl := ""
	if params != "" {
		fullUrl = fmt.Sprintf("%s://%s%s?%s", r.Request.URL.Scheme, r.Request.URL.Host, r.Request.URL.Path, params)
	} else {
		fullUrl = fmt.Sprintf("%s://%s%s", r.Request.URL.Scheme, r.Request.URL.Host, r.Request.URL.Path)
	}
	handler := s.Urls[fullUrl]
	handler.Format(r.Body)
}

// CallError 错误回调
func (s *CrawlersManagerService) CallError(r *colly.Response, err error) {
	fullUrl := fmt.Sprintf("%s://%s/%s", r.Request.URL.Scheme, r.Request.URL.Host, r.Request.URL.Path)
	log.Errorf("请求失败：%s - %d\n错误：%v", fullUrl, r.StatusCode, err)
}
