package crawl

import (
	"context"
	"log"
	"net/http"
	"net/http/cookiejar"
	"net/url"
	"time"
)

type Crawler struct {
	cOpts    []ClientOption
	rOpts    []RequestOption
	jar      http.CookieJar
	next     chan *Crawl
	errs     chan error
	maxTry   int
	trySleep func(i int) time.Duration
}

func New() *Crawler {
	return &Crawler{errs: make(chan error), next: make(chan *Crawl, 5)}
}

func (c *Crawler) CookieEnabled(enabled ...bool) *Crawler {
	if len(enabled) == 0 || enabled[0] {
		jar, _ := cookiejar.New(nil)
		c.jar = jar
	} else {
		c.jar = nil
	}
	return c
}

func (c *Crawler) GetCookie(u *url.URL, key string) string {
	if c.jar != nil {
		cookies := c.jar.Cookies(u)
		for _, cookie := range cookies {
			if cookie.Name == key {
				return cookie.Value
			}
		}
	}
	return ""
}

func (c *Crawler) SetCookie(u *url.URL, cookie *http.Cookie) {
	if c.jar != nil {
		cookies := c.jar.Cookies(u)
		for i, ck := range cookies {
			if ck.Name == cookie.Name {
				cookies[i] = cookie
				c.jar.SetCookies(u, cookies)
				return
			}
		}
		cookies = append(cookies, cookie)
		c.jar.SetCookies(u, cookies)
	}
}

func (c *Crawler) ClientOpt(options ...ClientOption) *Crawler {
	c.cOpts = append(c.cOpts, options...)
	return c
}

func (c *Crawler) RequestOpt(options ...RequestOption) *Crawler {
	c.rOpts = append(c.rOpts, options...)
	return c
}

func (c *Crawler) MaxTry(maxTry int) *Crawler {
	c.maxTry = maxTry
	return c
}

func (c *Crawler) ErrTry(trySleep func(i int) time.Duration) *Crawler {
	c.trySleep = trySleep
	return c
}

func (c *Crawler) Crawl() *Crawl {
	return With(c)
}

func (c *Crawler) Enqueue(crawl *Crawl) *Crawler {
	go func() { c.next <- crawl }()
	return c
}

func (c *Crawler) Start(ctx context.Context) error {
	for {
		select {
		case err := <-c.errs:
			log.Printf("%v\n", err)
		case <-ctx.Done():
			return ctx.Err()
		case crawl := <-c.next:
			for i := 0; i < getExecTimes(c.maxTry); i++ {
				if c.trySleep != nil {
					if err := Sleep(ctx, c.trySleep(i)); err != nil {
						c.errs <- err
						return err
					}
				}
				if err := crawl.Run(ctx); err != nil {
					c.errs <- err
				} else {
					break
				}
			}
		}
	}
}

func Sleep(ctx context.Context, sleep time.Duration) error {
	if sleep <= 0 {
		return nil
	}
	tc, cancel := context.WithTimeout(context.Background(), sleep)
	defer cancel()

	select {
	case <-ctx.Done():
		return ctx.Err()
	case <-tc.Done():
		return nil
	}
}

func getExecTimes(maxTry int) int {
	if maxTry < 0 {
		return 1
	}
	return maxTry + 1
}
