# -*- coding: utf-8 -*-

import urllib2
from stream import feeds, utils

twitter_url = 'http://twitter.com/statuses/user_timeline.json?screen_name=command000&count=10'
twitter_data = { twitter_url : open('stream/testcases/command000_twitter.json').read() }
lj_url = 'http://command0.livejournal.com/data/rss'
lj_data = { lj_url : feeds.feedparser.parse(open('stream/testcases/command0_livejournal.xml').read()) }
delicious_url = 'http://feeds.delicious.com/v2/json/command0?count=10'
delicious_data = { delicious_url : open('stream/testcases/command0_delicious.json').read() }
feed_data = { 'http://company.yandex.ru/blog/index.rss' : feeds.feedparser.parse(open('stream/testcases/data/russian.xml').read()),
        'http://feeds.feedburner.com/codinghorror' : feeds.feedparser.parse(open('stream/testcases/codinghorror_atom.xml').read()) }
feed_discover = { 'http://yandex.ru' : 'http://company.yandex.ru/blog/index.rss',
        'http://codinghorror.com' : 'http://feeds.feedburner.com/codinghorror' }

class FeedProvider:
    """
    A mock for feed finding and retrieving.
    """
    def __init__(self, data_dict = {}, discover_dict = {}, not_found_callback = None):
        '''
        data_dict maps URLs to their contents.
        discover_dict maps URLs to other URLs (like links to feeds on pages).
        '''
        self.data_dict = data_dict
        self.discover_dict = discover_dict

        if not_found_callback is None:
            self.not_found_callback = self.default_not_found_callback
        else:
            self.not_found_callback = not_found_callback

        self.requests = []

    def retrieve_feed(self, url):
        self.requests.append(url)
        if url in self.data_dict:
            return self.data_dict[url]
        else:
            return self.not_found_callback(url)

    def default_not_found_callback(self, url):
        if url.startswith(('http://feeds.delicious.com', 'http://twitter.com')):
            raise urllib2.HTTPError, (url, 404, '', '', None)
        return utils.PropertyBag(bozo = 1, bozo_exception = urllib2.HTTPError(url, 404, '', '', None))

    def find_feed(self, url):
        self.requests.append(url)
        if url in self.discover_dict:
            return self.discover_dict[url]
        else:
            return url

    def get_requests(self):
        return self.requests;
