import feedparser
import urllib
import urllib2
import time
import traceback
import string
import random
import os
import xmlrpclib
from datetime import datetime, timedelta
from instafeed.utils import utc
import xml.etree.ElementTree as ET
from django.db import models
from django.conf import settings
from instafeed.conduits.models import Conduit
from django.contrib.auth.models import User

class SubscriptionError(RuntimeError):
    pass

class CloudServer(models.Model):
    name = models.CharField("Server Name", max_length=100)
    url = models.URLField("Feed URL")
    user = models.ForeignKey(User)
    last_update = models.DateTimeField(blank=True, null=True)
    last_entry_id = models.CharField(max_length=200, blank=True)

    def __unicode__(self):
        return self.name + " - URL: " + self.url

    def get_cloudfeed_url(self):
        "returns the url of the feed with the cloud tag added"
        return "http://%s/feed_cloudserver/%s" % (settings.SITE_NAME, self.id)

    def subscribe(self, notifyProcedure, port, path, protocol, urls,
        ip=None, domain=None):
        urlCheckOk = False
        valid_urls = (self.url, self.get_cloudfeed_url())
        for url in urls:
            if url.lower() in valid_urls:
                urlCheckOk = True
                break
        if not urlCheckOk:
            raise SubscriptionError("The url you sent (%s) doesn't match the one I'm serving (%s)" % (urls,
                valid_urls))
        if ip is None and domain is None:
            raise SubscriptionError("IP or Domain required")
        if path[0] != '/': path = '/' + path
        url = "http://%s:%s%s" % (domain or ip, port, path)
        params = {'url': self.url}
        if domain:
            challenge = "".join(random.sample(string.ascii_letters, 20))
            params['challenge'] = challenge
        encodedParams = urllib.urlencode(params)
        try:
            answer = urllib2.urlopen(url, encodedParams).read().strip()
        except urllib2.HTTPError, e:
            raise SubscriptionError("HTTPError notifying %s: %s" % (url, e))
        except urllib2.URLError, e:
            raise SubscriptionError("URLError notifying %s: %s" % (url, e))
        if domain and answer != challenge:
            raise SubscriptionError("Challenge verification failed. Expected: '%s', Received: '%s'." %
                    (challenge, answer))
        try:
            suscriptor = ServerSubscriber.objects.get(server=self, host=domain or ip, port=port,
                path=path or "", protocol=protocol, notifyProcedure=notifyProcedure or "")
        except ServerSubscriber.DoesNotExist:
            suscriptor = ServerSubscriber(server=self, host=domain or ip, port=port,
                path=path or "", protocol=protocol, notifyProcedure=notifyProcedure or "")
        suscriptor.fail_count = 0
        suscriptor.expiration_date = datetime.now() + timedelta(hours=25)
        suscriptor.save()
        return True

    def getNewEntries(self):
        """Fetchs the feed and guess which entries are new since last update.
           Returns the new entries in chronological order.
        """
        feed = feedparser.parse(self.url)
        if not feed.entries:
            return []
        sorted_entries = sorted(feed.entries, key=lambda e: e.date_parsed, reverse=True)
        if not self.last_entry_id:
            # No previous entries received on this feed
            self.last_update = datetime(tzinfo=utc, *sorted_entries[0].date_parsed[:7])
            self.last_entry_id = sorted_entries[0].id
            self.save()
            return []
        last_entry_index = -1
        for i, entry in enumerate(sorted_entries):
            if entry.id == self.last_entry_id:
                last_entry_index = i
                break
        if last_entry_index == -1:
            # The last_update entry doesn't exists anymore. Take only the last entry as new
            new_entries = [sorted_entries[0]]
        elif last_entry_index > 0:
            new_entries = sorted_entries[:last_entry_index]
            new_entries.reverse()
        else:
            # No new entries
            return []
        self.last_update = datetime(tzinfo=utc, *new_entries[-1].date_parsed[:7])
        self.last_entry_id = new_entries[-1].id
        self.save()
        return new_entries

    def notifySubscribers(self):
        """checks for new entries and sends notification to all subscribers
           on new entries regenerates cloudfeed
        """
        newEntries = self.getNewEntries()
        ok = 0
        errors = []
        if len(newEntries) > 0:
            for subscriber in self.serversubscriber_set.all():
                try:
                    subscriber.notify()
                    ok += 1
                except:
                    errors.append((subscriber, traceback.format_exc()))
        return ok, errors

    def get_cloudfeed_path(self):
        "return the path where the cloud feed should reside"
        return os.path.join(settings.FEEDS_PATH, 'cloudfeed-%s.xml' % str(self.id))

    def gen_cloudfeed(self):
        "read feed from url, adds a cloud tag, writes it to disk"
        attrs = {'domain': settings.SITE_NAME,
                 'port': '80',
                 'path': '/subscribe_to_cloudserver/%s' % self.id,
                 'registerProcedure': '',
                 'protocol': 'http-post'}
        original_feed = ET.parse(urllib2.urlopen(self.url))
        channel = original_feed.getroot()[0]
        cloud_tag = ET.SubElement(channel, 'cloud', attrs)
        with open(self.get_cloudfeed_path(), 'w') as feed_file:
            original_feed.write(feed_file)

class ServerSubscriber(models.Model):
    server = models.ForeignKey(CloudServer)
    host = models.CharField("IP or domain name of suscriptor", max_length=200)
    port = models.PositiveIntegerField()
    path = models.CharField(max_length=200)
    notifyProcedure = models.CharField(max_length=200)
    protocol = models.CharField(max_length=10,
                choices=[("xml-rpc", "xml-rpc"), ("http-post", "http-post")])
    fail_count = models.PositiveIntegerField(default=0)
    expiration_date = models.DateTimeField()

    class Meta:
        unique_together = ("server", "host", "port", "path", "protocol", "notifyProcedure")

    def __unicode__(self):
        return u"subscriber %s to %s" % (self.id, self.server.name)

    def get_notify_url(self):
        "returns the url where the subscriber should be notified about updates"
        path = self.path
        if path[0] == '/':
            path = path[1:]
        return "http://%s:%s/%s" % (self.host, self.port, path)

    def notify(self):
        "sends notification from cloud server about new entry"
        if self.protocol == 'http-post':
            params = urllib.urlencode({'url': self.server.url})
            try:
                urllib2.urlopen(self.get_notify_url(), params)
            except urllib2.URLError, e:
                self.fail_count += 1
                self.save()
                raise urllib2.URLError(e)
        elif self.protocol == 'xml-rpc':
            xmlrpc_server = xmlrpclib.ServerProxy(self.get_notify_url())
            try:
                #TODO: use notifyProcedure instead of rssCloud.ping
                xmlrpc_server.rssCloud.ping(self.server.url)
            except xmlrpclib.Error, e:
                self.fail_count += 1
                self.save()
                raise xmlrpclib.Error, e
        self.fail_count = 0
        self.save()

class FeedSubscription(models.Model):
    name = models.CharField("Feed Name", max_length=100)
    url = models.URLField("URL")
    user = models.ForeignKey(User)
    conduits = models.ManyToManyField(Conduit, blank=True)
    last_update = models.DateTimeField(blank=True, null=True)
    last_entry_id = models.CharField(max_length=200, blank=True)

    def __unicode__(self):
        return self.name + " - URL: " + self.url

    def subscribe(self, domain=None):
        """RSSCloud subscription"""
        feed = feedparser.parse(self.url)
        cloud = feed.feed.cloud
        url = "http://%s:%s%s" % (cloud.domain, cloud.port, cloud.path)
        data = {"notifyProcedure":"",
            "port": settings.RSSCLOUD_NOTIFY_PORT,
            "path": "/notifyUpdate/%s" % self.id,
            "protocol": "http-post", "url1": self.url}
        if domain is not None:
            data['domain'] = domain
        encodedData = urllib.urlencode(data)
        try:
            answer = urllib2.urlopen(url, encodedData)
        except urllib2.URLError, e:
            return False, "Error connecting to url (%s) %s." % (url, e.read())
        tree = ET.parse(answer)
        root = tree.getroot()
        if not root.tag == "notifyResult":
            return False, "Invalid response, doesn't have 'notifyResult' as root-node."
        else:
            return root.attrib["success"] == "true", root.attrib["msg"]

    def _fetchLastEntry(self):
        lastEntry = feed.entries[0]
        return lastEntry

    def getNewEntries(self):
        """Fetchs the feed and guess which entries are new since last update.
           Returns the new entries in chronological order.
        """
        feed = feedparser.parse(self.url)
        if not feed.entries:
            return []
        sorted_entries = sorted(feed.entries, key=lambda e: e.date_parsed, reverse=True)
        if not self.last_entry_id:
            # No previous entries received on this feed
            self.last_update = datetime(tzinfo=utc, *sorted_entries[0].date_parsed[:7])
            self.last_entry_id = sorted_entries[0].id
            self.save()
            return []
        last_entry_index = -1
        for i, entry in enumerate(sorted_entries):
            if entry.id == self.last_entry_id:
                last_entry_index = i
                break
        if last_entry_index == -1:
            # The last_update entry doesn't exists anymore. Take only the last entry as new
            new_entries = [sorted_entries[0]]
        elif last_entry_index > 0:
            new_entries = sorted_entries[:last_entry_index]
            new_entries.reverse()
        else:
            # No new entries
            return []
        self.last_update = datetime(tzinfo=utc, *new_entries[-1].date_parsed[:7])
        self.last_entry_id = new_entries[-1].id
        self.save()
        return new_entries

    def sendNewEntries(self):
        newEntries = self.getNewEntries()
        for feedEntry in newEntries:
            if not hasattr(feedEntry, "title"):
                feedEntry.title = "--No title--"
            if not hasattr(feedEntry, "link"):
                feedEntry.link = self.url
            ok = 0
            errors = []
            for conduit in self.conduits.all():
                try:
                    conduit.sendFeed(self, feedEntry)
                    ok += 1
                except:
                    errors.append((conduit, traceback.format_exc()))
            yield feedEntry, ok, errors
