package feedfinder

import (
	"net/http"
	"regexp"
	"io/ioutil"
	"strings"
	"encoding/xml"
	"time"
	"errors"
)

type RSS struct {
	RSS xml.Name `xml:"rss"`
	C []Channel `xml:"channel"`
}

type Channel struct {
	Channel xml.Name `xml:"channel"`
	Title string `xml:"title"`
	Description string `xml:"description"`
	Link string `xml:"link"`
	Items []Item `xml:"item"`
}

type Item struct {
	Item xml.Name `xml:"item"`
	Title string `xml:"title"`
	Description string `xml:"description"`
	Link string `xml:"link"`
	Guid string `xml:"guid"`
	PubDate string `xml:"pubDate"`
}

type Atom struct {
	Feed xml.Name `xml:"feed"`
	Id string `xml:"id"`
	Title string `xml:"title"`
	E []Entry `xml:"entry"`
}

type Entry struct {
	Entry xml.Name `xml:"entry"`
	Id string `xml:"id"`
	Updated string `xml:"updated"`
	Link string `xml:"link"`
	Title string `xml:"title"`
	Summary string `xml:"summary"`
}

type Stories []*Story

type Story struct {
	Title string
	Summary string
	Link string
	Date time.Time
	Feed string
	FeedLink string
}

func (s Stories) Len() int {
	return len(s)
}

func (s Stories) Less(i, j int) bool {
	// reverse sort
	return s[i].Date.After(s[j].Date)
}

func (s Stories) Swap(i, j int) {
	s[i], s[j] = s[j], s[i]
}

// return a slice of rss feed urls from a given url
func Feed(url string) (string, error) {
	if !strings.HasPrefix(url, "http") {
		url = "http://" + url
	}

	resp, err := http.Get(url)
	if err != nil {
		return "", err
	}

	defer resp.Body.Close()

	body, err := ioutil.ReadAll(resp.Body)
	if err != nil {
		return "", err
	}

	// parse the body for rss feed tags
	// feed tags should be in the form of:
	// 	<link ... type="application/rss+xml" ...>
	// or some variation, then from there, that tag
	// should contain an href field which is the rss feed
	re, err := regexp.Compile("<link.*?type=\"application/(atom|rss)\\+xml\".*?>")
	if err != nil {
		return "", err
	}

	m := re.FindString(string(body))

	re, err = regexp.Compile("href=\".*?\"")
	if err != nil {
		return "", err
	}

	m = re.FindString(m)		
	if m != "" {
		i := strings.Index(m, "\"")
		if i != -1 {
			a := m[i+1:len(m)-1]
			if strings.HasPrefix(a, "/") {
				a = url + a
			}
			return a, nil
		}
	}

	return "", nil
}

func FetchStories(url string) (Stories, error) {
	if !strings.HasPrefix(url, "http") {
		url = "http://" + url
	}

	resp, err := http.Get(url)
	if err != nil {
		return nil, err
	}

	defer resp.Body.Close()

	body, err := ioutil.ReadAll(resp.Body)
	if err != nil {
		return nil, err
	}
	
	var r RSS
	var a Atom
	err = xml.Unmarshal(body, &r)
	if err != nil {
		return nil, err
	}
	err = xml.Unmarshal(body, &a)
	if err != nil {
		return nil, err
	}

	var ret Stories
	switch {
	case len(r.C) > 0: // RSS feed
		for _, v := range r.C[0].Items {
			d, err := findDate(v.PubDate)
			if err != nil {
				return nil, err
			}
			ret = append(ret, &Story{
				Title: v.Title,
				Summary: v.Description,
				Link: v.Link,
				Date: d,
				Feed: r.C[0].Title,
				FeedLink: r.C[0].Link,
			})
		}		
	case len(a.E) > 0: // Atom feed
		for _, v := range a.E {
			d, err := findDate(v.Updated)
			if err != nil {
				return nil, err
			}
			ret = append(ret, &Story{
				Title: v.Title,
				Summary: v.Summary,
				Link: v.Link,
				Date: d,
				Feed: a.Title,
			})
		}
	}

	return ret, nil
}

func findDate(d string) (t time.Time, err error) {
	// RSS and Atom are notorious for NOT following date standards specified in their own standard. Thanks guys.
	formats := []string{
		time.RFC822,
		time.RFC822Z,
		time.RFC850,
		time.RFC1123,
		time.RFC1123Z,
		time.RFC3339,
		time.RFC3339Nano,
	}
	for _, v := range formats {
		t, err = time.Parse(v, d)
		if err == nil {
			return
		}
	}
	err = errors.New("could not parse time")
	return
}

