package colite

import (
	"bytes"
	"crypto/rand"
	"encoding/json"
	"errors"
	"fmt"
	"hash/fnv"
	"io"
	"io/ioutil"
	"net/http"
	"net/http/cookiejar"
	"net/url"
	"strings"
	"sync"
	"sync/atomic"
	"time"

	"gitee.com/i2worker/colite/debug"
	"gitee.com/i2worker/colite/storage"
	"github.com/PuerkitoBio/goquery"
)

// Collector 数据抓取控制类
type Collector struct {
	// ID Collector的标识符
	ID uint32

	// UserAgent HTTP请求头User-Agent
	UserAgent string

	// MaxBodySize HTTP响应主体接收最大值，0为无限制，默认为10MB
	MaxBodySize int

	// Async 是否异步抓取，需用c.Wait()等待所有HTTP请求完成
	Async bool

	// AllowURLRevisit 是否允许重复抓取同个网址
	AllowURLRevisit bool

	// ParseHTTPErrorResponse 是否处理非2xx的HTTP响应错误
	ParseHTTPErrorResponse bool

	// RedirectHandler HTTP重定向处理方法，使用c.SetRedirectHandler()设置
	redirectHandler func(req *http.Request, via []*http.Request) error

	requestCallbacks         []RequestCallback         // 请求处理链
	responseHeadersCallbacks []ResponseHeadersCallback // 响应头处理链
	errorCallbacks           []ErrorCallback           // 错误处理链
	responseCallbacks        []ResponseCallback        // 响应处理链
	htmlCallbacks            []*htmlCallbackContainer  // HTML处理链
	scrapedCallbacks         []ScrapedCallback         // 抓取完成处理链
	requestCount             uint32                    // 请求次数
	responseCount            uint32                    // 响应次数
	debugger                 debug.Debugger            // 调试器
	store                    storage.Storage           // 存储器
	backend                  *httpBackend              // HTTP客户端
	wg                       *sync.WaitGroup
	lock                     *sync.RWMutex
}

// RequestCallback OnRequest回调函数
type RequestCallback func(*Request)

// ResponseHeadersCallback OnResponseHeaders回调函数
type ResponseHeadersCallback func(*Response)

// ErrorCallback OnError回调函数
type ErrorCallback func(*Response, error)

// ResponseCallback OnResponse回调函数
type ResponseCallback func(*Response)

// HTMLCallback OnHTML回调函数
type HTMLCallback func(*HTMLElement)

// htmlCallbackContainer OnHTML回调函数容器
type htmlCallbackContainer struct {
	Selector string
	Function HTMLCallback
}

// ScrapedCallback OnScraped回调函数
type ScrapedCallback func(*Response)

// ProxyFunc 代理设置函数
type ProxyFunc func(*http.Request) (*url.URL, error)

type key int

// ProxyURLKey 代理地址上下文键
const ProxyURLKey key = iota

// collectorCounter Collector计数器
var collectorCounter uint32

var (
	// ErrMissingURL 网址为空的错误
	ErrMissingURL = errors.New("Missing URL")

	// ErrAlreadyVisited 访问已经访问过网址的错误
	ErrAlreadyVisited = errors.New("URL already visited")

	// ErrNoPattern 没有定义匹配域名规则的错误
	ErrNoPattern = errors.New("No pattern defined in LimitRule")

	// ErrAbortedAfterHeaders OnResponseHeaders中止传输时返回的错误
	ErrAbortedAfterHeaders = errors.New("Aborted after receiving response headers")

	// ErrNoCookieJar 没有启用Cookie的错误
	ErrNoCookieJar = errors.New("Cookie jar is not available")

	// ErrEmptyProxyURL 代理地址列表为空的错误
	ErrEmptyProxyURL = errors.New("Proxy URL list is empty")

	// ErrQueueFull 请求队列已满的错误
	ErrQueueFull = errors.New("Queue MaxSize reached")
)

// NewCollector 用默认配置创建Collector实例
func NewCollector() *Collector {
	c := &Collector{}
	c.Init()
	return c
}

// Init 初始化Collector
func (c *Collector) Init() {
	// 初始化默认设置
	c.ID = atomic.AddUint32(&collectorCounter, 1)
	c.UserAgent = "colite - Colly Lite: Scraping Framework for Gophers"
	c.MaxBodySize = 10 * 1024 * 1024

	// 初始化存储器
	c.store = &storage.InMemoryStorage{}
	c.store.Init()

	// 初始化HTTP客户端
	c.backend = &httpBackend{}
	jar, _ := cookiejar.New(nil)
	c.backend.Init(jar)
	c.backend.Client.CheckRedirect = c.checkRedirectFunc() // 设置重定向处理方法

	// 初始化互斥对象
	c.wg = &sync.WaitGroup{}
	c.lock = &sync.RWMutex{}
}

// checkRedirectFunc HTTP重定向处理方法
func (c *Collector) checkRedirectFunc() func(req *http.Request, via []*http.Request) error {
	return func(req *http.Request, via []*http.Request) error {
		if c.redirectHandler != nil {
			return c.redirectHandler(req, via)
		}
		if len(via) >= 10 { // 最多重定向10次
			return http.ErrUseLastResponse
		}
		lastRequest := via[len(via)-1]
		if req.URL.Host != lastRequest.URL.Host {
			req.Header.Del("Authorization")
		}
		return nil
	}
}

// Visit 发送GET请求(不保留上下文环境)
func (c *Collector) Visit(URL string) error {
	return c.scrape(URL, "GET", nil, nil, nil, true)
}

// HasVisited 检查指定网址是否已经访问
func (c *Collector) HasVisited(URL string) (bool, error) {
	h := fnv.New64a()
	h.Write([]byte(URL))
	return c.store.IsVisited(h.Sum64())
}

// Head 发送HEAD请求
func (c *Collector) Head(URL string) error {
	return c.scrape(URL, "HEAD", nil, nil, nil, false)
}

// Post 发送POST请求(包含提交数据)
func (c *Collector) Post(URL string, requestData map[string]string) error {
	return c.scrape(URL, "POST", createFormReader(requestData), nil, nil, true)
}

// PostRaw 发送POST请求(包含实体数据)
func (c *Collector) PostRaw(URL string, requestData []byte) error {
	return c.scrape(URL, "POST", bytes.NewReader(requestData), nil, nil, true)
}

// PostMultipart 发送多部分实体POST请求(包含多部分实体数据)
func (c *Collector) PostMultipart(URL string, requestData map[string][]byte) error {
	boundary := randomBoundary()
	hdr := http.Header{}
	hdr.Set("Content-Type", "multipart/form-data; boundary="+boundary)
	hdr.Set("User-Agent", c.UserAgent)
	return c.scrape(URL, "POST", createMultipartReader(boundary, requestData), nil, hdr, true)
}

// Request 发送指定方法的请求
func (c *Collector) Request(method, URL string, requestData io.Reader, ctx *Context, hdr http.Header) error {
	return c.scrape(URL, method, requestData, ctx, hdr, true)
}

// createFormReader 创建POST表单提交数据
func createFormReader(data map[string]string) io.Reader {
	form := url.Values{}
	for k, v := range data {
		form.Add(k, v)
	}
	return strings.NewReader(form.Encode())
}

// randomBoundary 随机生成Boundary
func randomBoundary() string {
	var buffer [30]byte
	_, err := io.ReadFull(rand.Reader, buffer[:])
	if err != nil {
		panic(err)
	}
	return fmt.Sprintf("%x", buffer[:])
}

// createMultipartReader 创建多部分POST表单提交数据
func createMultipartReader(boundary string, data map[string][]byte) io.Reader {
	var body []byte
	buffer := bytes.NewBuffer(body)
	dashBoundary := "--" + boundary
	buffer.WriteString("Content-type: multipart/form-data; boundary=" + boundary + "\n\n")
	for contentType, content := range data {
		buffer.WriteString(dashBoundary + "\n")
		buffer.WriteString("Content-Disposition: form-data; name=" + contentType + "\n")
		buffer.WriteString(fmt.Sprintf("Content-Length: %d \n\n", len(content)))
		buffer.Write(content)
		buffer.WriteString("\n")
	}
	buffer.WriteString(dashBoundary + "--\n\n")
	return buffer
}

// scrape 构建HTTP请求
func (c *Collector) scrape(u, method string, requestData io.Reader, ctx *Context, hdr http.Header, checkRevisit bool) error {
	// 检查请求URL
	parsedURL, err := url.Parse(u)
	if err != nil {
		return err
	}
	if err := c.checkRequest(u, parsedURL, method, checkRevisit); err != nil {
		return err
	}

	// 构建HTTP请求
	if hdr == nil {
		hdr = http.Header{"User-Agent": []string{c.UserAgent}}
	}
	rc, ok := requestData.(io.ReadCloser)
	if !ok && requestData != nil {
		rc = ioutil.NopCloser(requestData)
	}
	host := parsedURL.Host
	if hostHeader := hdr.Get("Host"); hostHeader != "" {
		host = hostHeader
	}
	req := &http.Request{
		Method:     method,
		URL:        parsedURL,
		Header:     hdr,
		Body:       rc,
		Host:       host,
		Proto:      "HTTP/1.1",
		ProtoMajor: 1,
		ProtoMinor: 1,
	}
	setRequestBody(req, requestData)

	// 启动HTTP请求
	u = parsedURL.String()
	c.wg.Add(1)
	if c.Async {
		go c.fetch(u, method, requestData, ctx, hdr, req)
		return nil
	}
	return c.fetch(u, method, requestData, ctx, hdr, req)
}

// checkRequest 检查请求URL是否符合要求
func (c *Collector) checkRequest(u string, parsedURL *url.URL, method string, checkRevisit bool) error {
	if u == "" {
		return ErrMissingURL
	}

	// 检查是否已经访问
	if checkRevisit && !c.AllowURLRevisit && method == "GET" {
		h := fnv.New64a()
		h.Write([]byte(u))
		uHash := h.Sum64()
		visited, err := c.store.IsVisited(uHash)
		if err != nil {
			return err
		}
		if visited {
			return ErrAlreadyVisited
		}
		return c.store.Visited(uHash) //设置为已访问
	}

	return nil
}

// setRequestBody 设置HTTP请求数据获取方法(复制数据关闭响应后仍能使用)
func setRequestBody(req *http.Request, body io.Reader) {
	if body != nil {
		switch v := body.(type) {
		case *bytes.Buffer:
			req.ContentLength = int64(v.Len())
			buf := v.Bytes()
			req.GetBody = func() (io.ReadCloser, error) {
				r := bytes.NewReader(buf)
				return ioutil.NopCloser(r), nil
			}
		case *bytes.Reader:
			req.ContentLength = int64(v.Len())
			snapshot := *v
			req.GetBody = func() (io.ReadCloser, error) {
				r := snapshot
				return ioutil.NopCloser(&r), nil
			}
		case *strings.Reader:
			req.ContentLength = int64(v.Len())
			snapshot := *v
			req.GetBody = func() (io.ReadCloser, error) {
				r := snapshot
				return ioutil.NopCloser(&r), nil
			}
		}
		if req.GetBody != nil && req.ContentLength == 0 {
			req.Body = http.NoBody
			req.GetBody = func() (io.ReadCloser, error) { return http.NoBody, nil }
		}
	}
}

// fetch 执行HTTP请求
func (c *Collector) fetch(u, method string, requestData io.Reader, ctx *Context, hdr http.Header, req *http.Request) error {
	// 创建请求类
	defer c.wg.Done()
	if ctx == nil {
		ctx = NewContext()
	}
	request := &Request{
		ID:        atomic.AddUint32(&c.requestCount, 1),
		URL:       req.URL,
		Method:    method,
		Headers:   &req.Header,
		Body:      requestData,
		Ctx:       ctx,
		collector: c,
	}

	// 执行请求处理链
	c.handleOnRequest(request)
	if request.abort {
		return nil
	}

	// 添加请求头
	if req.Header.Get("Accept") == "" {
		req.Header.Set("Accept", "*/*")
	}
	if method == "POST" && req.Header.Get("Content-Type") == "" {
		req.Header.Add("Content-Type", "application/x-www-form-urlencoded")
	}

	// 创建响应头处理函数
	checkHeadersFunc := func(statusCode int, headers http.Header) bool {
		c.handleOnResponseHeaders(&Response{StatusCode: statusCode, Headers: &headers, Request: request, Ctx: ctx})
		return !request.abort
	}

	// 获取响应主体数据
	origURL := req.URL
	response, err := c.backend.Do(req, c.MaxBodySize, checkHeadersFunc)

	// 获取代理地址
	if proxyURL, ok := req.Context().Value(ProxyURLKey).(string); ok {
		request.ProxyURL = proxyURL
	}

	// 执行错误处理链
	if err := c.handleOnError(response, err, request, ctx); err != nil {
		return err
	}

	// 修正响应主体数据
	if req.URL != origURL {
		request.URL = req.URL
		request.Headers = &req.Header
	}
	atomic.AddUint32(&c.responseCount, 1)
	response.Request = request
	response.Ctx = ctx
	err = response.fixCharset(request.ResponseCharacterEncoding)
	if err != nil {
		return err
	}

	// 执行响应处理链
	c.handleOnResponse(response)

	// 执行HTML处理链
	err = c.handleOnHTML(response)
	if err != nil {
		c.handleOnError(response, err, request, ctx)
	}

	// 执行抓取完成处理链
	c.handleOnScraped(response)

	return nil
}

// handleOnRequest 执行请求处理回调函数
func (c *Collector) handleOnRequest(r *Request) {
	if c.debugger != nil {
		c.debugger.Event(createEvent("request", r.ID, c.ID, map[string]string{
			"url": r.URL.String(),
		}))
	}
	for _, f := range c.requestCallbacks {
		f(r)
	}
}

// handleOnResponseHeaders 执行响应头处理回调函数
func (c *Collector) handleOnResponseHeaders(r *Response) {
	if c.debugger != nil {
		c.debugger.Event(createEvent("responseHeaders", r.Request.ID, c.ID, map[string]string{
			"url":    r.Request.URL.String(),
			"status": http.StatusText(r.StatusCode),
		}))
	}
	for _, f := range c.responseHeadersCallbacks {
		f(r)
	}
}

// handleOnResponse 执行响应处理回调函数
func (c *Collector) handleOnResponse(r *Response) {
	if c.debugger != nil {
		c.debugger.Event(createEvent("response", r.Request.ID, c.ID, map[string]string{
			"url":    r.Request.URL.String(),
			"status": http.StatusText(r.StatusCode),
		}))
	}
	for _, f := range c.responseCallbacks {
		f(r)
	}
}

// handleOnHTML 执行HTML处理回调函数
func (c *Collector) handleOnHTML(resp *Response) error {
	if len(c.htmlCallbacks) == 0 || !strings.Contains(strings.ToLower(resp.Headers.Get("Content-Type")), "html") {
		return nil
	}

	// 转为GoQuery
	doc, err := goquery.NewDocumentFromReader(bytes.NewBuffer(resp.Body))
	if err != nil {
		return err
	}

	// 获取基准URL
	if href, found := doc.Find("base[href]").Attr("href"); found {
		resp.Request.baseURL, _ = url.Parse(href)
	}

	// 执行GoQuery查询后再执行回调函数
	for _, cc := range c.htmlCallbacks {
		i := 0
		doc.Find(cc.Selector).Each(func(_ int, s *goquery.Selection) {
			for _, n := range s.Nodes {
				e := NewHTMLElementFromSelectionNode(resp, s, n, i)
				i++
				if c.debugger != nil {
					c.debugger.Event(createEvent("html", resp.Request.ID, c.ID, map[string]string{
						"selector": cc.Selector,
						"url":      resp.Request.URL.String(),
					}))
				}
				cc.Function(e)
			}
		})
	}
	return nil
}

// handleOnError 执行错误处理回调函数
func (c *Collector) handleOnError(response *Response, err error, request *Request, ctx *Context) error {
	if err == nil && (c.ParseHTTPErrorResponse || response.StatusCode < 203) {
		return nil
	}
	if err == nil && response.StatusCode >= 203 {
		err = errors.New(http.StatusText(response.StatusCode))
	}
	if response == nil {
		response = &Response{
			Request: request,
			Ctx:     ctx,
		}
	}
	if c.debugger != nil {
		c.debugger.Event(createEvent("error", request.ID, c.ID, map[string]string{
			"url":    request.URL.String(),
			"status": http.StatusText(response.StatusCode),
		}))
	}
	if response.Request == nil {
		response.Request = request
	}
	if response.Ctx == nil {
		response.Ctx = request.Ctx
	}
	for _, f := range c.errorCallbacks {
		f(response, err)
	}
	return err
}

// handleOnScraped 执行抓取完成回调函数
func (c *Collector) handleOnScraped(r *Response) {
	if c.debugger != nil {
		c.debugger.Event(createEvent("scraped", r.Request.ID, c.ID, map[string]string{
			"url": r.Request.URL.String(),
		}))
	}
	for _, f := range c.scrapedCallbacks {
		f(r)
	}
}

// createEvent 创建调试事件
func createEvent(eventType string, requestID, collectorID uint32, kvargs map[string]string) *debug.Event {
	return &debug.Event{
		CollectorID: collectorID,
		RequestID:   requestID,
		Type:        eventType,
		Values:      kvargs,
	}
}

// OnRequest 注册请求处理回调函数
func (c *Collector) OnRequest(f RequestCallback) {
	c.lock.Lock()
	if c.requestCallbacks == nil {
		c.requestCallbacks = make([]RequestCallback, 0, 4)
	}
	c.requestCallbacks = append(c.requestCallbacks, f)
	c.lock.Unlock()
}

// OnResponseHeaders 注册响应头处理回调函数(响应头已接收但响应主体没有读取时调用)
func (c *Collector) OnResponseHeaders(f ResponseHeadersCallback) {
	c.lock.Lock()
	if c.responseHeadersCallbacks == nil {
		c.responseHeadersCallbacks = make([]ResponseHeadersCallback, 0, 4)
	}
	c.responseHeadersCallbacks = append(c.responseHeadersCallbacks, f)
	c.lock.Unlock()
}

// OnResponse 注册响应处理回调函数
func (c *Collector) OnResponse(f ResponseCallback) {
	c.lock.Lock()
	if c.responseCallbacks == nil {
		c.responseCallbacks = make([]ResponseCallback, 0, 4)
	}
	c.responseCallbacks = append(c.responseCallbacks, f)
	c.lock.Unlock()
}

// OnHTML 注册HTML处理回调函数
func (c *Collector) OnHTML(goquerySelector string, f HTMLCallback) {
	c.lock.Lock()
	if c.htmlCallbacks == nil {
		c.htmlCallbacks = make([]*htmlCallbackContainer, 0, 4)
	}
	c.htmlCallbacks = append(c.htmlCallbacks, &htmlCallbackContainer{
		Selector: goquerySelector,
		Function: f,
	})
	c.lock.Unlock()
}

// OnHTMLDetach 注销HTML处理回调函数
func (c *Collector) OnHTMLDetach(goquerySelector string) {
	c.lock.Lock()
	deleteIdx := -1
	for i, cc := range c.htmlCallbacks {
		if cc.Selector == goquerySelector {
			deleteIdx = i
			break
		}
	}
	if deleteIdx != -1 {
		c.htmlCallbacks = append(c.htmlCallbacks[:deleteIdx], c.htmlCallbacks[deleteIdx+1:]...)
	}
	c.lock.Unlock()
}

// OnError 注册出错处理回调函数
func (c *Collector) OnError(f ErrorCallback) {
	c.lock.Lock()
	if c.errorCallbacks == nil {
		c.errorCallbacks = make([]ErrorCallback, 0, 4)
	}
	c.errorCallbacks = append(c.errorCallbacks, f)
	c.lock.Unlock()
}

// OnScraped 注册抓取完成处理回调函数
func (c *Collector) OnScraped(f ScrapedCallback) {
	c.lock.Lock()
	if c.scrapedCallbacks == nil {
		c.scrapedCallbacks = make([]ScrapedCallback, 0, 4)
	}
	c.scrapedCallbacks = append(c.scrapedCallbacks, f)
	c.lock.Unlock()
}

// Wait 异步执行时等待所有请求完成
func (c *Collector) Wait() {
	c.wg.Wait()
}

// SetDebugger 设置Collector的调试器
func (c *Collector) SetDebugger(d debug.Debugger) {
	d.Init()
	c.debugger = d
}

// SetStorage 设置Collector的存储器
func (c *Collector) SetStorage(s storage.Storage) error {
	if err := s.Init(); err != nil {
		return err
	}
	c.store = s
	c.backend.Client.Jar = createJar(s) //共享Cookie
	return nil
}

// Limit 增加限制规则
func (c *Collector) Limit(rule *LimitRule) error {
	return c.backend.Limit(rule)
}

// Limits 增加限制规则组
func (c *Collector) Limits(rules []*LimitRule) error {
	return c.backend.Limits(rules)
}

// WithTransport 设置http.RoundTripper
func (c *Collector) WithTransport(transport http.RoundTripper) {
	c.backend.Client.Transport = transport
}

// SetClient 设置http.Client
func (c *Collector) SetClient(client *http.Client) {
	c.backend.Client = client
}

// SetRequestTimeout 设置请求超时时间(默认10秒)
func (c *Collector) SetRequestTimeout(timeout time.Duration) {
	c.backend.Client.Timeout = timeout
}

// SetRedirectHandler 设置重定向处理器
func (c *Collector) SetRedirectHandler(f func(req *http.Request, via []*http.Request) error) {
	c.redirectHandler = f
	c.backend.Client.CheckRedirect = c.checkRedirectFunc()
}

// SetCookieJar 设置Cookie
func (c *Collector) SetCookieJar(j http.CookieJar) {
	c.backend.Client.Jar = j
}

// DisableCookies 禁用Cookie
func (c *Collector) DisableCookies() {
	c.backend.Client.Jar = nil
}

// SetCookies 设置指定URL的Cookie
func (c *Collector) SetCookies(URL string, cookies []*http.Cookie) error {
	if c.backend.Client.Jar == nil {
		return ErrNoCookieJar
	}
	u, err := url.Parse(URL)
	if err != nil {
		return err
	}
	c.backend.Client.Jar.SetCookies(u, cookies)
	return nil
}

// Cookies 返回指定URL的Cookie
func (c *Collector) Cookies(URL string) []*http.Cookie {
	if c.backend.Client.Jar == nil {
		return nil
	}
	u, err := url.Parse(URL)
	if err != nil {
		return nil
	}
	return c.backend.Client.Jar.Cookies(u)
}

// SetProxy 设置代理地址
func (c *Collector) SetProxy(proxyURL string) error {
	proxyParsed, err := url.Parse(proxyURL)
	if err != nil {
		return err
	}
	c.SetProxyFunc(http.ProxyURL(proxyParsed))
	return nil
}

// SetProxyFunc 设置代理处理函数
func (c *Collector) SetProxyFunc(p ProxyFunc) {
	t, ok := c.backend.Client.Transport.(*http.Transport)
	if c.backend.Client.Transport != nil && ok {
		t.Proxy = p
	} else {
		c.backend.Client.Transport = &http.Transport{
			Proxy: p,
		}
	}
}

// UnmarshalRequest 从序列化数据中创建请求
func (c *Collector) UnmarshalRequest(r []byte) (*Request, error) {
	// 先创建序列化请求
	req := &serializableRequest{}
	err := json.Unmarshal(r, req)
	if err != nil {
		return nil, err
	}

	// 补充相关数据
	u, err := url.Parse(req.URL)
	if err != nil {
		return nil, err
	}
	ctx := NewContext()
	for k, v := range req.Ctx {
		ctx.Put(k, v)
	}

	return &Request{
		ID:        atomic.AddUint32(&c.requestCount, 1),
		URL:       u,
		Method:    req.Method,
		Headers:   &req.Headers,
		Body:      bytes.NewReader(req.Body),
		Ctx:       ctx,
		collector: c,
	}, nil
}

// Clone 复制Collector(共享HTTP、Cache、Cookie)
func (c *Collector) Clone() *Collector {
	return &Collector{
		ID:                     atomic.AddUint32(&collectorCounter, 1),
		UserAgent:              c.UserAgent,
		MaxBodySize:            c.MaxBodySize,
		Async:                  c.Async,
		AllowURLRevisit:        c.AllowURLRevisit,
		ParseHTTPErrorResponse: c.ParseHTTPErrorResponse,

		redirectHandler:   c.redirectHandler,
		requestCallbacks:  make([]RequestCallback, 0, 8),
		errorCallbacks:    make([]ErrorCallback, 0, 8),
		responseCallbacks: make([]ResponseCallback, 0, 8),
		htmlCallbacks:     make([]*htmlCallbackContainer, 0, 8),
		scrapedCallbacks:  make([]ScrapedCallback, 0, 8),
		debugger:          c.debugger,
		store:             c.store,
		backend:           c.backend,
		wg:                &sync.WaitGroup{},
		lock:              c.lock,
	}
}

// String 将Collector转为字符串表示
func (c *Collector) String() string {
	return fmt.Sprintf(
		"Requests made: %d (%d responses) | Callbacks: OnRequest: %d, OnResponse: %d, OnHTML: %d, OnError: %d",
		c.requestCount,
		c.responseCount,
		len(c.requestCallbacks),
		len(c.responseCallbacks),
		len(c.htmlCallbacks),
		len(c.errorCallbacks),
	)
}

// Cookie序列器
type cookieJarSerializer struct {
	store storage.Storage
	lock  *sync.RWMutex
}

// createJar 创建Cooike
func createJar(s storage.Storage) http.CookieJar {
	return &cookieJarSerializer{store: s, lock: &sync.RWMutex{}}
}

// SetCookies 设置Collector存储器中指定URL的Cookie
func (j *cookieJarSerializer) SetCookies(u *url.URL, cookies []*http.Cookie) {
	j.lock.Lock()
	defer j.lock.Unlock()
	cookieStr := j.store.Cookies(u)

	cnew := make([]*http.Cookie, len(cookies))
	copy(cnew, cookies)
	existing := storage.UnstringifyCookies(cookieStr)
	for _, c := range existing {
		if !storage.ContainsCookie(cnew, c.Name) {
			cnew = append(cnew, c)
		}
	}
	j.store.SetCookies(u, storage.StringifyCookies(cnew))
}

// Cookies 获取Collector存储器中指定URL的Cookie
func (j *cookieJarSerializer) Cookies(u *url.URL) []*http.Cookie {
	cookies := storage.UnstringifyCookies(j.store.Cookies(u))
	now := time.Now()
	cnew := make([]*http.Cookie, 0, len(cookies))
	for _, c := range cookies {
		if c.RawExpires != "" && c.Expires.Before(now) {
			continue
		}
		if c.Secure && u.Scheme != "https" {
			continue
		}
		cnew = append(cnew, c)
	}
	return cnew
}
