#
# peerscape.replication.crawl
#
# Copyright 2008-2009 Helsinki Institute for Information Technology
# and the authors.
#
# Authors: Ken Rimey <rimey@hiit.fi>
#

# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

from __future__ import with_statement

import time
import logging

from peerscape import DB
from peerscape.replication.relate import *

# XXX Don't automatically subscribe to the friends of a friend or the
# members of a group when there are very many.

class Crawler(object):
    def __init__(self, path):
        self.path = path
        self.subscribed = set()

    def loop(self, throttle=50):
        while True:
            try:
                t0 = time.clock()
                self.update()
                t1 = time.clock()
                time.sleep(max(2, throttle * (t1 - t0)))
            except Exception:
                logging.exception('Exception in crawler:')
                time.sleep(60)

    def update(self):
        self.db = DB(self.path, create=False, cache_persona=True)
        try:
            iid, description = self.pick_unsubscribed_iid()
            if iid:
                logging.info('Crawler subscribing to %s: %s', description, iid)
                self.db.subscribe(iid)
        finally:
            self.db.close()

    def pick_unsubscribed_iid(self):
        db = self.db
        with db.read_transaction():
            try:
                me = db.get_my_iid()
            except LookupError:
                return None, None

            return (self.pick_one(list_friends(db, me), "friend")
                    or self.pick_one(list_links(db, me),
                                     "my content")
                    or self.pick_one(list_friends_of_friends(db, me),
                                     "friend of friend")
                    or self.pick_one(list_fellow_members(db, me),
                                     "fellow group member")
                    or self.pick_one(list_links_of_friends(db, me),
                                     "friend's content")
                    or self.pick_one(list_links_of_groups(db, me),
                                     "group content")
                    or (None, None))

    def pick_one(self, iids, description):
        for iid in iids:
            if iid in self.subscribed:
                continue
            elif self.db.get_dataset_info(iid):
                self.subscribed.add(iid)
            else:
                return iid, description
