package main

import "io"
import "fmt"
import "bytes"
import "time"
import "net/http"
import "net/url"
import "github.com/golang/glog"
import "github.com/PuerkitoBio/goquery"
import "github.com/ncabatoff/fetchbot"
import "code.google.com/p/fetchbolt/db"
import "strings"

type fetchResult struct {
	*fetchCommand
	hrefs []string
	err   error
	at    time.Time
}

type fetchCommand struct {
	fetchbot.Cmd
	db.NodeId
}

func (f *fetchResult) String() string {
	return fmt.Sprintf("U=%s T=%s E=%v L=%v", f.URL().String(), f.at, f.err, f.hrefs)
}

// dispatcher manages the queue of work to do and the worker pool
type dispatcher struct {
	baseurl *url.URL
	nexturl chan string
	results chan fetchResult
	db      *db.DB
	obeyRobots bool
	CrawlDelay time.Duration
}

// newDispatcher opens the bolt db at path and the named bucket within.
// It then spawns goroutines to crawl all the items in the bucket.
func newDispatcher(dbpath string, hostscheme string, obeyRobots bool, crawlDelay time.Duration) (*dispatcher, error) {
	u, err := url.Parse(hostscheme)
	if err != nil {
		return nil, fmt.Errorf("resolve URL '%s': %v\n", hostscheme, err)
	}
	d := dispatcher{
		baseurl: u,
		CrawlDelay: crawlDelay,
		obeyRobots: obeyRobots,
		nexturl: make(chan string),
		results: make(chan fetchResult),
	}
	if err := d.initdb(dbpath); err != nil {
		werr := fmt.Errorf("error initializing bolt: %v", err)
		glog.Error(werr)
		return nil, werr
	}
	return &d, nil
}

func (d *dispatcher) initdb(path string) error {
	if db, err := db.OpenDbForUrl(path, d.baseurl.String()); err != nil {
		return err
	} else {
		d.db = db
		return nil
	}
}

func (d *dispatcher) getNextCmd() (*fetchCommand, <-chan time.Time) {
	next, t, err := d.db.GetFirst()
	if err != nil {
		glog.Fatalf("error reading queue: %v", err)
	}

	if next == nil {
		ns, err := d.db.GetNodes([]string{d.baseurl.String()})
		if err != nil {
			glog.Fatal(err)
		}
		err = d.db.SetRefreshTimeIfUnset(ns, time.Now().Add(time.Second*-1))
		if err != nil {
			glog.Fatal(err)
		}
		next, t, err = d.db.GetFirst()
		if err != nil {
			glog.Fatalf("error reading queue: %v", err)
		}
		if next == nil {
			glog.Fatalf("error reading queue: %v", err)
		}
	}

	glog.V(1).Infof("next url=[%s] with fetchAt=%s", next.NodeName(), t)

	now := time.Now()
	if now.Before(t) {
		delay := t.Sub(now)
		glog.Infof("will sleep up to %s", delay)
		return nil, time.NewTimer(delay).C
	}

	nexturl := string(next.NodeName())
	u, err := d.baseurl.Parse(nexturl)
	if err != nil {
		// This is a more plausible failure, though in principle the
		// only URLs we write to the DB are ones that have already been
		// parsed. TODO make sure this is enforced everywhere.
		glog.Fatalf("error parsing queue item '%s': %v", nexturl, err)
	}
	return &fetchCommand{fetchbot.Cmd{u, "HEAD"}, next}, nil
}

func (d *dispatcher) main() {
	w := d.newWorker(d.baseurl, d.obeyRobots, d.CrawlDelay)
	w.out = d.results
	go w.main()

	// getNextCmd returns either a command or a sleep chan.
	// If it's a command, send it and wait for the result.
	// Otherwise wait for the sleep to complete.
	nextcmd, sleepchan := d.getNextCmd()
	if nextcmd != nil {
		w.in <- nextcmd
	}

	// Re-use the hrefs array to avoid re-allocation
	hrefs := make([]string, 0 , 1000)
	last := ""
	for {
		select {
		case r := <-d.results:
			hrefs = getUniqueSortedStrings(r.hrefs, hrefs[:0])
			d.handleResult(w, r, hrefs)
		case <-sleepchan:
		}

		nextcmd, sleepchan = d.getNextCmd()
		s := ""
		if nextcmd != nil {
			s = nextcmd.URL().String()
			if s == last {
				sleepTime := time.Second * 5
				glog.V(1).Info("same URL as last, sleeping %s", sleepTime)
				time.Sleep(sleepTime)
			}
			w.in <- nextcmd
		}
		last = s
	}
}

func (d *dispatcher) handleResult(w *worker, r fetchResult, uniqueHrefs []string) {
	glog.V(1).Info("got result: ", r)
	if r.err != nil {
		// TODO record failure, bump URL or host schedule
		return
	}

	nodes, err := d.db.GetNodes(uniqueHrefs)
	if err != nil {
		glog.Errorf("error handling result: %v", err)
		return
	}

	err = d.db.SetEdges(r.NodeId, nodes)
	if err != nil {
		glog.Fatalf("error recording edges: %v", err)
	}

	urlpfx := d.baseurl.String() + "/"
	onhost := db.FilterNodes(nodes, func(s string) bool {
		return strings.HasPrefix(s, urlpfx)
	})

	fetchAt := time.Now().Add(time.Second * -1)
	err = d.db.SetRefreshTimeIfUnset(onhost, fetchAt)
	if err != nil {
		glog.Fatalf("error updating refresh times: %v", err)
	}

	refreshThisAt := time.Now().Add(time.Hour * 12)
	err = d.db.SetRefreshTime(r.NodeId, refreshThisAt)
	if err != nil {
		glog.Fatalf("error updating refresh times: %v", err)
	}
}

type botResult struct {
	cmd fetchbot.Command
	res *http.Response
	body []byte
	err error
}

type worker struct {
	baseurl *url.URL
	in   chan *fetchCommand
	res  chan botResult
	out  chan fetchResult
	bot  chan fetchbot.Command
	obeyRobots bool
	CrawlDelay time.Duration
}

func (d *dispatcher) newWorker(base *url.URL, obeyRobots bool, crawlDelay time.Duration) *worker {
	w := worker{
		baseurl: base,
		obeyRobots: obeyRobots,
		CrawlDelay: crawlDelay,
		in: make(chan *fetchCommand),
		bot: make(chan fetchbot.Command),
		out: make(chan fetchResult),
		res: make(chan botResult),
	}
	w.init()
	return &w
}

func (w *worker) init() {
	cfg := fetchbot.DefaultCrawlConfig
	cfg.CrawlDelay = w.CrawlDelay
	cfg.UserAgent = "fetchbolt (code.google.com/p/ncabatoff/fetchbolt)"

	if w.obeyRobots {
		go fetchbot.NewHostFetcher(cfg, w.baseurl, w, w.bot).Run()
	} else {
		go fetchbot.NewUnsafeHostFetcher(cfg, w.baseurl, w, w.bot).Run()
	}
}

func (w *worker) main() {
	for s := range w.in {
		w.fetch(s)
	}
}

func (w *worker) fetch(fc *fetchCommand) {
	glog.V(2).Infof("b fetch %v", *fc)
	w.bot <- fc
	bres := <-w.res
	fr := fetchResult{fetchCommand: fc, at: time.Now(), err: bres.err}
	defer func() {
		glog.V(2).Infof("e fetch %v", *fc)
		w.out <- fr
	}()

	if bres.err != nil || "text/html" != getContentType(bres.res) {
		return
	}

	fc.Cmd.M = "GET"
	w.bot <- fc
	bres = <-w.res
	fr.at = time.Now()
	fr.err = bres.err
	if fr.err != nil {
		return
	}

	glog.V(2).Infof("parsing HTML")
	doc, err := goquery.NewDocumentFromReader(bytes.NewReader(bres.body))
	if err != nil {
		fr.err = err
		return
	}

	fr.hrefs = getLinks(fc.Cmd.URL(), doc)
}

func (w *worker) HandleCmd(cmd fetchbot.Command, res *http.Response, err error) {
	if err != nil {
		glog.Errorf("[HTTP failure (res=%s)] %s %s", res, cmd.Method(), cmd.URL())
	} else {
		glog.V(2).Infof("[HTTP result %d] %s %s - %s", res.StatusCode, cmd.Method(), cmd.URL(), res.Header.Get("Content-Type"))
	}

	var body []byte

	if res != nil {
		// We need to read and copy the body now because Fetcher (our caller)
		// is keen to close it and will do as soon as we return.
		buflen := 1000
		if res.ContentLength > 0 {
			buflen = int(res.ContentLength)
		}
		b := bytes.NewBuffer(make([]byte, buflen))
		if _, err := io.Copy(b, res.Body); err != nil {
			glog.Fatalf("Error reading response body: %v", err)
		}
		body = b.Bytes()
	}
	w.res <- botResult{cmd, res, body, err}
}

