package common

import (
	"net/http"
	"path"
	"time"
)

const (
	CRAWLED_SUCCESS = true
	CRAWLED_FAILED  = !CRAWLED_SUCCESS
)

type UrlContext struct {
	Url      string
	Callback func(resp *Response) (error, *PageItem)
}
type WorkResult struct {
	Req    *Request
	Result bool
}

func NewWorkResult(req *Request) *WorkResult {
	return &WorkResult{Req: req}
}

type SpiderContext struct {
	Config          *SpiderConfig
	HttpClient      *http.Client
	Stats           *SpiderStats
	ExcludeFilter   UrlFilter
	IncludeFilter   UrlFilter
	BloomFilter     *BloomFilter
	Processer       func(resp *Response) (error, *PageItem) //process the response
	Handler         func(req *Request)                      //handle the request for setting
	UrlContextPipe  chan *UrlContext                        //url transport pipe
	WorkResultPipe  chan *WorkResult                        //work result out
	DownloadUrlPipe chan *UrlContext                        //put download urls.
	SpiderClose     chan bool                               //when spider close
}

func NewSpdierContext(config *SpiderConfig) (context *SpiderContext) {
	transport := new(SpiderTransport)
	transport.config = config
	context = &SpiderContext{
		Config:          config,
		HttpClient:      transport.Client(),
		UrlContextPipe:  make(chan *UrlContext, 1000),
		DownloadUrlPipe: make(chan *UrlContext, 1000),
		WorkResultPipe:  make(chan *WorkResult, 1000),
		SpiderClose:     make(chan bool, 1),
	}
	stats := NewStats(context)
	context.Stats = stats
	return

}
func (self *SpiderContext) urlFilter(url string) string {
	if url = NormalizeUrl(url, ""); url == "" {
		return ""
	}
	if self.Config.ENABLE_BLOOM_FILTER && self.BloomFilter != nil {
		if self.BloomFilter.Match(url) {
			logger.Info("url filter#BloomFilter#url exists:%s", url)
			return ""
		} else {
			logger.Info("url filter#BloomFilter#add url:%s", url)
			self.BloomFilter.Set(url)
		}
	}
	return url
}
func (self *SpiderContext) AddRequest(url string, callback func(resp *Response) (error, *PageItem)) {
	url = self.urlFilter(url)
	if url == "" {
		return
	}
	urlContext := &UrlContext{Url: url, Callback: callback}
	self.UrlContextPipe <- urlContext
}
func (self *SpiderContext) AddDownload(url string) {
	url = self.urlFilter(url)
	if url == "" {
		return
	}
	urlContext := &UrlContext{Url: url, Callback: nil}
	self.DownloadUrlPipe <- urlContext
}
func (self *SpiderContext) AddExcludeFilter(filter UrlFilter) {
	self.ExcludeFilter = filter
}
func (self *SpiderContext) AddIncludeFilter(filter UrlFilter) {
	self.IncludeFilter = filter
}
func (self *SpiderContext) DownloaderWork() {
	taskNum := self.Config.DOWNLOADER_NUM
	for i := 0; i < taskNum; i++ {
		go func() {
			for {
				var request *Request
				var err error
				select {
				case url := <-self.DownloadUrlPipe:
					time.Sleep(self.Config.DELAY)
					request = NewRequestWithContext(self)
					request, err = request.NewRequest(url.Url)
					if err != nil {
						logger.Error("download#request:%s", err)
						return
					}
					response := NewResponse(request)
					err = response.Download(self.Config.DOWNLOAD_DIR, path.Base(url.Url))
					if err != nil {
						logger.Error("download#response:%s", err)
					}
				}
			}
		}()
	}
}
func (self *SpiderContext) SpiderWork() {
	taskNum := self.Config.CONCURRENT_REQUEST
	logger.Debug("%d tasks work", taskNum)
	for i := 0; i < taskNum; i++ {
		go func() {
			var request *Request
			var err error
			for {
				select {
				case urlContext := <-self.UrlContextPipe:
					defer func() {
						if x := recover(); x != nil {
							logger.Critical("work#fuck:%s", x)
							return
						}
					}()
					time.Sleep(self.Config.DELAY)
					logger.Debug("work#crawled:%s", urlContext.Url)
					request = NewRequestWithContext(self)
					request, err = request.NewRequest(urlContext.Url)
					if err != nil {
						logger.Error("work#request:%s", err)
						break
					}
					result := NewWorkResult(request)
					if self.Handler != nil {
						self.Handler(request)
					}
					response := NewResponse(request)
					if urlContext.Callback != nil {
						err, _ = urlContext.Callback(response)
						if err != nil {
							logger.Error("work#callback:%s", err.Error())
							result.Result = CRAWLED_FAILED
							self.WorkResultPipe <- result
							break
						} else {
							logger.Info("crawled %s ok", urlContext.Url)
							result.Result = CRAWLED_SUCCESS
							self.WorkResultPipe <- result
							break
						}
					}
					if self.Processer != nil {
						err, _ = self.Processer(response)
						if err != nil {
							logger.Error("work#processer:%s", err.Error())
							result.Result = CRAWLED_FAILED
							self.WorkResultPipe <- result
							break
						} else {
							logger.Info("crawled %s ok", urlContext.Url)
							result.Result = CRAWLED_SUCCESS
							self.WorkResultPipe <- result
							break
						}
					}
				}
			}
		}()
	}
}

type Spider struct {
	Context *SpiderContext
}

func NewSpider(name string) *Spider {
	return &Spider{Context: DefaultSpiderContext}
}
func (self *Spider) Fetch(url string) (*Response, error) {
	request := NewRequestWithContext(self.Context)
	request, err := request.NewRequest(url)
	if err != nil {
		return nil, err
	}
	return NewResponse(request), nil
}
