package rsswatchergaeapp

import (
	"bytes"
	"encoding/xml"
	"fmt"
	"html"
	"html/template"
	"io"
	"io/ioutil"
	"net/http"
	nmail "net/mail"
	"net/url"
	"strings"
	"time"

	"appengine"
	"appengine/datastore"
	amail "appengine/mail"
	"appengine/memcache"
	"appengine/taskqueue"
	"appengine/urlfetch"
)

var bodyTemplate = template.Must(template.New("").Parse(
	`<a href="{{.Link}}">{{.Link}}</a><br><br>{{.Content}}`))

func init() {
	http.HandleFunc("/", serveRoot)
	http.HandleFunc("/crawl", serveCrawl)
	http.HandleFunc("/cron", serveCron)
	http.HandleFunc("/delDatastore", serveDelDatastore)
	http.HandleFunc("/delMemcache", serveDelMemcache)
}

func firstThatIsnt(src, filter error) error {
	if m, ok := src.(appengine.MultiError); ok {
		for _, err := range m {
			if err != nil && err != filter {
				return err
			}
		}
		return nil
	}
	if src != filter {
		return src
	}
	return nil
}

func serveXXX(w http.ResponseWriter, code int, msg string) {
	w.WriteHeader(code)
	w.Header().Set("Content-Type", "text/plain; charset=utf-8")
	w.Write([]byte(msg))
}

func serveCrawl(w http.ResponseWriter, r *http.Request) {
	c := appengine.NewContext(r)

	key := r.FormValue("key")
	sub, ok := subscriptions[key]
	if !ok {
		serveXXX(w, 200, fmt.Sprintf("no key %q", key))
		return
	}
	if prevErrCount, _ := memcache.Increment(c, key, 0, 0); prevErrCount >= 3 {
		c.Infof("crawl: too many recent errors for %s (%q)", key, sub)
		serveXXX(w, 200, fmt.Sprintf("too many recent errors for %s (%q)", key, sub))
		return
	}
	crawlOK := false
	defer func() {
		if !crawlOK {
			n, _ := memcache.Increment(c, key, +1, 0)
			c.Infof("crawl: %d recent errors for %s (%q)", n, key, sub)
		}
	}()

	w.Header().Set("Content-Type", "text/plain; charset=utf-8")
	debug := (io.Writer)(nil)
	if r.FormValue("debug") != "" {
		debug = w
	}
	res, err := urlfetch.Client(c).Get(sub)
	if err != nil {
		c.Errorf("%s (%q): urlfetch get: %v", key, sub, err)
		serveXXX(w, 500, "Internal Server Error")
		return
	}
	defer res.Body.Close()
	items, err := parse(debug, key, res.Body)
	if err != nil {
		c.Errorf("%s (%q): %v", key, sub, err)
		serveXXX(w, 500, "Internal Server Error")
		return
	}
	if err := sendDiff(c, debug, key, items); err != nil {
		c.Errorf("%s (%q): %v", key, sub, err)
		serveXXX(w, 500, "Internal Server Error")
		return
	}
	if debug == nil {
		serveXXX(w, 200, "OK")
	}
	crawlOK = true
}

func serveCron(w http.ResponseWriter, r *http.Request) {
	c := appengine.NewContext(r)
	keys := make([]string, 0, len(subscriptions))
	tasks := make([]*taskqueue.Task, 0, len(subscriptions))
	for key := range subscriptions {
		keys = append(keys, key)
		tasks = append(tasks, taskqueue.NewPOSTTask("/crawl", url.Values{
			"key": {key},
		}))
	}
	if err := firstThatIsnt(memcache.DeleteMulti(c, keys), memcache.ErrCacheMiss); err != nil {
		c.Errorf("memcache delete-multi: %v", err)
		serveXXX(w, 500, "Internal Server Error")
		return
	}
	if _, err := taskqueue.AddMulti(c, tasks, ""); err != nil {
		c.Errorf("taskqueue add-multi: %v", err)
		serveXXX(w, 500, "Internal Server Error")
		return
	}
	serveXXX(w, 200, "OK")
}

func serveDelDatastore(w http.ResponseWriter, r *http.Request) {
	c := appengine.NewContext(r)
	datastore.Delete(c, datastore.NewKey(c, "crawlState", r.FormValue("key"), 0, nil))
	serveXXX(w, 200, "OK")
}

func serveDelMemcache(w http.ResponseWriter, r *http.Request) {
	c := appengine.NewContext(r)
	memcache.Delete(c, r.FormValue("key"))
	serveXXX(w, 200, "OK")
}

func serveRoot(w http.ResponseWriter, r *http.Request) {
	serveXXX(w, 200, "Version 1")
}

type item struct {
	Title, Link string
	Content     template.HTML
}

func sendDiff(c appengine.Context, debug io.Writer, key string, items []item) error {
	var crawlState struct {
		Link []string `datastore:",noindex"`
	}

	prevMap := map[string]bool{}
	dKey := datastore.NewKey(c, "crawlState", key, 0, nil)
	if err := datastore.Get(c, dKey, &crawlState); err != nil {
		if err != datastore.ErrNoSuchEntity {
			return fmt.Errorf("datastore get: %v", err)
		}
	} else {
		for _, link := range crawlState.Link {
			prevMap[link] = true
		}
	}
	crawlState.Link = make([]string, 0, len(items))

	if debug != nil {
		if _, err := fmt.Fprintf(debug, "%d items.", len(items)); err != nil {
			return fmt.Errorf("debug write: %v", err)
		}
	}

	// This is unscientific, but these little naps might help mail
	// being sent in chronological order.
	nap := 2 * time.Second
	for time.Duration(len(items))*nap > 30*time.Second {
		nap /= 2
	}

	to := []string{admin}
	headers := nmail.Header{
		"List-Id": []string{fmt.Sprintf("<%s.example.com>", key)},
	}
	mailErr := error(nil)
	// Walk the items slice from oldest to newest.
	for i := len(items) - 1; i >= 0; i-- {
		t := items[i]
		if prevMap[t.Link] {
			if debug != nil {
				if _, err := fmt.Fprintf(debug, "\n\n%q is a dupe.", t.Link); err != nil {
					return fmt.Errorf("debug write: %v", err)
				}
			}
			crawlState.Link = append(crawlState.Link, t.Link)
			continue
		}
		prevMap[t.Link] = true

		buf := &bytes.Buffer{}
		if err := bodyTemplate.Execute(buf, t); err != nil {
			return fmt.Errorf("template execute: %v", err)
		}
		msg := &amail.Message{
			Sender:   admin,
			To:       to,
			Subject:  t.Title,
			HTMLBody: buf.String(),
			Headers:  headers,
		}
		if debug != nil {
			if _, err := fmt.Fprintf(debug, "\n\n%q", msg); err != nil {
				return fmt.Errorf("debug write: %v", err)
			}
		} else {
			if mailErr = amail.Send(c, msg); mailErr != nil {
				crawlState.Link = crawlState.Link[:0]
				delete(prevMap, t.Link)
				for link := range prevMap {
					crawlState.Link = append(crawlState.Link, link)
				}
				break
			}
			time.Sleep(nap)
		}
		crawlState.Link = append(crawlState.Link, t.Link)
	}

	if _, err := datastore.Put(c, dKey, &crawlState); err != nil {
		return fmt.Errorf("datastore put: %v", err)
	}
	if mailErr != nil {
		return fmt.Errorf("mail send: %v", mailErr)
	}
	return nil
}

var (
	bChannel0 = []byte(`<channel `)
	bChannel1 = []byte(`<channel>`)
	bFeed0    = []byte(`<feed `)
	bFeed1    = []byte(`<feed>`)
)

func parse(debug io.Writer, key string, r io.Reader) (items []item, err error) {
	b, err := ioutil.ReadAll(r)
	if err != nil {
		return nil, err
	}
	if f := hacks[key]; f != nil {
		b = f(b)
	}
	head := b
	if len(head) > 1024 {
		head = head[:1024]
	}
	i := bytes.Index(head, bChannel0)
	if i == -1 {
		i = bytes.Index(head, bChannel1)
	}
	j := bytes.Index(head, bFeed0)
	if j == -1 {
		j = bytes.Index(head, bFeed1)
	}
	switch {
	case i >= 0 && (i < j || j < 0):
		return parseRSS20(debug, b)
	case j >= 0 && (j < i || i < 0):
		return parseAtom(debug, b)
	}
	return nil, fmt.Errorf("unrecognized head: %q", head)
}

func parseAtom(debug io.Writer, b []byte) (items []item, err error) {
	doc := &struct {
		Entry []struct {
			Title string `xml:"title"`
			Link  []struct {
				Href string `xml:"href,attr"`
			} `xml:"link"`
			Content struct {
				Type     string `xml:"type,attr"`
				InnerXML string `xml:",innerxml"`
			} `xml:"content"`
			Summary struct {
				Type     string `xml:"type,attr"`
				InnerXML string `xml:",innerxml"`
			} `xml:"summary"`
		} `xml:"entry"`
	}{}
	if err := xml.NewDecoder(bytes.NewReader(b)).Decode(doc); err != nil {
		return nil, fmt.Errorf("xml decode: %v", err)
	}
	for _, x := range doc.Entry {
		link := "#"
		if n := len(x.Link); n > 0 {
			link = x.Link[n-1].Href
		}
		typ, inner := x.Content.Type, strings.TrimSpace(x.Content.InnerXML)
		if inner == "" {
			typ, inner = x.Summary.Type, strings.TrimSpace(x.Summary.InnerXML)
		}
		if typ != "xhtml" {
			inner = html.UnescapeString(inner)
		}
		items = append(items, item{
			x.Title,
			link,
			template.HTML(inner),
		})
	}
	return items, nil
}

func parseRSS20(debug io.Writer, b []byte) (items []item, err error) {
	doc := &struct {
		Channel struct {
			Item []struct {
				Title       string `xml:"title"`
				Link        string `xml:"link"`
				Encoded     string `xml:"encoded"`
				Description string `xml:"description"`
			} `xml:"item"`
		} `xml:"channel"`
	}{}
	if err := xml.NewDecoder(bytes.NewReader(b)).Decode(doc); err != nil {
		return nil, fmt.Errorf("xml decode: %v", err)
	}
	for _, x := range doc.Channel.Item {
		inner := strings.TrimSpace(x.Encoded)
		if inner == "" {
			inner = strings.TrimSpace(x.Description)
		}
		items = append(items, item{
			x.Title,
			x.Link,
			template.HTML(inner),
		})
	}
	return items, nil
}
