package engine

/**
简单的单任务版爬虫
*/

import (
	"demo02/crawler/fetcher"
	"demo02/crawler/models"
	"log"
)

type SimpleEngine struct{}

func (s SimpleEngine) Run(rs ...Request) {
	var requests []Request

	var errorDown = map[string]int{}

	requests = append(requests, rs...)

	for len(requests) > 0 {
		r := requests[0]
		requests = requests[1:]

		md5str := md5encode(r.Url)

		if num, ok := errorDown[md5str]; ok && num >= 3 {
			log.Printf("下载失败 \t url:%s \n", r.Url)
			continue
		}

		parseResult, err := worker(r)

		if err != nil {
			errorDown[md5str] += 1
			requests = append(requests, r)
			log.Printf("下载失败，当前第 %d 次，URL:%s，Error：%s", errorDown[md5str], r.Url, err.Error())
			continue
		} else {
			// 如果存在errorDown中 删除
			if _, ok := errorDown[md5str]; ok {
				delete(errorDown, md5str)
			}
		}

		if parseResult.Requests != nil {
			requests = append(requests, parseResult.Requests...)
		}

		for _, item := range parseResult.Items {
			switch item.(type) {
			case models.Models:
				item.(models.Models).Print()
			default:
				log.Printf("爬取到的值是: %v", item)
			}
		}
	}
}

func worker(r Request) (ParseRequest, error) {
	log.Printf("下载：%s \n", r.Url)
	body, err := fetcher.Fetcher(r.Url, r.Code)
	if err != nil {
		return ParseRequest{}, err
	}
	return r.ParserFunc(body), nil
}
