package spider

import (
	. "spider/common"
	. "spider/schedule"

	"spider/thirdparty/log4go.v1"
)

var SpiderName string = "CrazySpider"     //spider's name.
var StartUrls []string                    //urls start to crawl.
var DefaultSpider = NewSpider(SpiderName) //base spider.
var AllowedDomains = []string{}
var log = log4go.NewDefaultLogger(log4go.DEBUG)
var AlllowedDomain []string

type CrawlSpider struct {
	*Spider
	schedule ISchedule //schedule interface so you can implement you own.
}

func NewCrawlSpider() *CrawlSpider {
	schedule := NewDefaultSchedule(DefaultSpider)
	return &CrawlSpider{
		Spider:   DefaultSpider, //base spider inset.
		schedule: schedule,      //use default schedule.
	}
}
func (self *CrawlSpider) InitConfig(config func(config *SpiderConfig)) {
	if config != nil {
		config(self.Context.Config)
	}
	if self.Context.Config.ENABLE_BLOOM_FILTER {
		self.Context.BloomFilter = new(BloomFilter)
	}
}

//handle the request for setting.
func (self *CrawlSpider) HandleRequest(handler func(req *Request)) *CrawlSpider {
	self.Context.Handler = handler
	return self
}

//process the response.
func (self *CrawlSpider) Process(processer func(resp *Response) (error, *PageItem)) *CrawlSpider {
	self.Context.Processer = processer
	return self
}

//everything ok and start the spider.
func (self *CrawlSpider) Crawl() {
	log.Info("========start crawl spider========")
	for _, url := range StartUrls {
		self.Context.AddRequest(url, self.Context.Processer)
	}
	self.schedule.Schedule()
}
