#
# fusion.sync.sync_session
#
# Copyright 2007 Helsinki Institute for Information Technology
# and the authors.
#
# Authors: Ken Rimey <rimey@hiit.fi>
#

# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

from __future__ import with_statement

import hmac
import time
import hashlib
import logging
from os import urandom
from heapq import heappush, heappop
from binascii import hexlify, unhexlify

from messkit.session import Session
from fusion.core.item import decode_item
from fusion.db.exceptions import *

class SyncSession(Session):
    def __init__(self, stream, db, mgr=None, batch_size=200,
                 my_id=None, peer_id=None, listening_address=None,
                 progress=None, sync_complete=None, **options):
        Session.__init__(self, stream, handler_prefix='handle_', **options)

        self.db = db
        self.mgr = mgr
        self.batch_size = batch_size
        self.progress_callback = progress
        self.sync_complete_callback = sync_complete

        self.my_id = my_id
        self.peer_ids = set()
        if peer_id:
            self.peer_ids.add(peer_id)

        self.listening_address = listening_address
        self.my_inward_address = None
        self.my_outward_address = None
        self.peer_address = None
        self.remote_host = None
        self.remote_port = None

        self.datasets = set()
        self.datasets_offered = {} # Map IID hash to (challenge, IID) pair.
        self.datasets_available = {} # Map IID hash to challenge.
        self.datasets_requested = set()
        self.datasets_served = set()

        self.mark = 0
        self.items_to_offer = []  # heap of (num, id, depends) triples
        self.items_to_filter = set()

        self.started = False
        self.waiting_for_ack = 0
        self.remaining_to_receive = 0
        self.sync_complete_to_be_sent = True
        self.sync_complete_sent = False
        self.sync_complete_received = False

        self.__bytes_in = self.__bytes_out = 0
        self.__in_timestamp = self.__out_timestamp = time.time()

        if self.mgr is not None:
            self.mgr.add(self)

    def startup(self, error):
        if error:
            logging.debug('%s failed.' % self.get_description())
            return

        logging.info('%s established.' % self.get_description())

        self.my_inward_address = self.stream.socket.getsockname()
        self.peer_address = self.stream.socket.getpeername()

        host, port = self.peer_address
        self.remote_host = host
        if self.stream.initiating:
            self.remote_port = port

        self.started = True
        self.send_hello()
        self.update()

        self.register_function(self.handle_hello)
        self.register_function(self.handle_dataset_available)
        self.register_function(self.handle_dataset_available_ack)
        self.register_function(self.handle_dataset_request)
        self.register_function(self.handle_dataset_cancel)
        self.register_function(self.handle_items_available)
        self.register_function(self.handle_items_available_ack)
        self.register_function(self.handle_item_request)
        self.register_function(self.handle_item_response)
        self.register_function(self.handle_sync_complete)

    def shutdown(self):
        if self.started:
            logging.info('%s terminated.' % self.get_description())

        if self.mgr is not None:
            self.mgr.discard(self)

    def send_hello(self):
        self.send('hello',
                  my_id = self.my_id,
                  listening_address = self.listening_address,
                  your_address = self.peer_address)

    def handle_hello(self, my_id=None,
                     listening_address=None,
                     your_address=None,
                     **future_expansion):
        if my_id:
            self.peer_ids.add(my_id)

        if listening_address:
            host, port = listening_address
            self.remote_port = port

        if your_address:
            host, port = your_address
            self.my_outward_address = host, port

    def update(self):
        if self.started and not self.waiting_for_ack:
            with self.db.exclusive_transaction():
                if not self.waiting_for_ack:
                    self.update_datasets()

                if not self.waiting_for_ack:
                    self.update_items()

                if not self.waiting_for_ack and self.sync_complete_to_be_sent:
                    self.send_sync_complete()

        self.keep_alive()

    def keep_alive(self, interval=30):
        t = time.time()

        if self.stream.bytes_in > self.__bytes_in:
            self.__bytes_in = self.stream.bytes_in
            self.__in_timestamp = t

        if self.stream.bytes_out > self.__bytes_out:
            self.__bytes_out = self.stream.bytes_out
            self.__out_timestamp = t

        if t > self.__in_timestamp + 1.5 * interval:
            if self.started:
                logging.debug('%s timed out.' % self.get_description())
            self.close()

        if t > self.__out_timestamp + interval:
            if self.started:
                self.send('hello')

    def update_datasets(self):
        # XXX Need to detect when a dataset is removed and then
        # immediately reimported.
        old_datasets = self.datasets
        self.datasets = set(self.db.list_subscriptions())
        removed = old_datasets - self.datasets
        added = self.datasets - old_datasets

        for iid in removed:
            self.datasets_served.discard(iid)
            if iid in self.datasets_requested:
                self.send_dataset_cancel(iid)

        for iid in added:
            self.send_dataset_available(iid)
            if self.hash(iid) in self.datasets_available:
                self.send_dataset_request(iid)

    def update_items(self):
        ids = self.pop_items()

        if not ids:
            self.refill()
            ids = self.pop_items()

        if ids:
            self.send_items_available(ids)

    def refill(self):
        old_mark = self.mark
        self.mark = self.db.get_current_mark()
        for iid in self.datasets_served:
            self.serve_items(iid, last_mark=old_mark)

    def pop_items(self):
        ids = []
        while len(ids) < self.batch_size and self.items_to_offer:
            num, id, depends = heappop(self.items_to_offer)
            if depends:
                self.serve_dataset(depends)
                heappush(self.items_to_offer, (num, id, None))
            elif id not in self.items_to_filter:
                ids.append(id)
            else:
                # We might as well clean up a little, but the list of
                # IDs to filter will still tend to grow.
                self.items_to_filter.remove(id)

        return ids

    def serve_dataset(self, iid):
        if iid not in self.datasets_served:
            self.datasets_served.add(iid)
            self.serve_items(iid)

    def serve_items(self, iid, last_mark=0):
        if last_mark < self.mark:
            for triple in self.db.gen_sync_agenda(iid, last_mark = last_mark,
                                                  current_mark = self.mark):
                heappush(self.items_to_offer, triple)

    def send_dataset_available(self, iid):
        challenge = urandom(20)
        hash = self.hash(iid)
        self.datasets_offered[hash] = challenge, iid
        self.send('dataset_available', hash, challenge)
        self.waiting_for_ack += 1

    def handle_dataset_available(self, hash, challenge):
        if hash in self.datasets_available:
            # XXX There could be IDs queued up at the other end for
            # items that are no longer present, such that these will
            # be added to the filter after it has been cleared.
            self.items_to_filter.clear()

        self.datasets_available[hash] = challenge

        iid = self.invert_hash(hash)
        if iid is not None:
            self.send_dataset_request(iid)

        self.send_dataset_available_ack()

    def send_dataset_available_ack(self):
        self.send('dataset_available_ack')

    def handle_dataset_available_ack(self):
        self.waiting_for_ack -= 1
        self.update()

    def send_dataset_request(self, iid):
        self.datasets_requested.add(iid)

        # This is like SKID2, except that SKID2 would add the identity
        # of the other endpoint as a third component of the content
        # subject to the hmac below, would not accept multiple
        # responses to the same challenge, and is probably intended
        # for use with a secret shared by just two endpoints.
        hash = self.hash(iid)
        challenge = self.datasets_available[hash]
        counterchallenge = urandom(20)
        response = self.hmac(iid, counterchallenge + challenge)
        self.send('dataset_request', hash, counterchallenge, response)

    def handle_dataset_request(self, hash, counterchallenge, response):
        if hash in self.datasets_offered:
            challenge, iid = self.datasets_offered[hash]
            expected = self.hmac(iid, counterchallenge + challenge)
            if response == expected:
                self.serve_dataset(iid)
            else:
                logging.warning('Subscription request failed challenge-response.')

    def send_dataset_cancel(self, iid):
        self.datasets_requested.remove(iid)
        self.send('dataset_cancel', iid)

    def handle_dataset_cancel(self, iid):
        self.datasets_served.discard(iid)

    def send_items_available(self, ids):
        self.send('items_available', map(unhexlify, ids),
                  remaining=len(self.items_to_offer))
        self.sync_complete_to_be_sent = True
        self.waiting_for_ack += 1

    def handle_items_available(self, ids, remaining):
        ids = map(hexlify, ids)

        requested = 0
        with self.db.transaction():
            for id in ids:
                self.items_to_filter.add(id)
                if self.db.hash_to_num(id) is None:
                    self.send_item_request(id)
                    requested += 1
        remaining += requested

        self.send_items_available_ack()
        if remaining:
            self.progress(remaining)

    def send_items_available_ack(self):
        self.send('items_available_ack')

    def handle_items_available_ack(self):
        self.waiting_for_ack -= 1
        self.update()

    def send_item_request(self, id):
        self.send('item_request', unhexlify(id))

    def handle_item_request(self, id):
        id = hexlify(id)
        item = self.db.get_item_by_hash(id)
        if item is not None:
            iid = hexlify(item.iid)
            if iid in self.datasets_served:
                self.send_item_response(item)

    def send_item_response(self, item):
        self.send('item_response', item.encode())

    def handle_item_response(self, item):
        # XXX Check that the item belongs to a dataset that we have
        # directly or indirectly requested.
        item = decode_item(item)
        try:
            if item.is_root():
                self.db.subscribe(item.iid) # XXX
            self.db.put_item(item)
        except Unauthorized:
            # This is possible in innocent circumstances.
            logging.debug('Refused unauthorized item.')

    def send_sync_complete(self):
        self.send('sync_complete')
        self.sync_complete_to_be_sent = False
        if not self.sync_complete_sent:
            self.sync_complete_sent = True
            if self.sync_complete_received:
                self.sync_complete()

    def handle_sync_complete(self):
        self.progress(0)
        if not self.sync_complete_received:
            self.sync_complete_received = True
            if self.sync_complete_sent:
                self.sync_complete()

    def progress(self, count):
        logging.debug('%d remaining.' % count)
        self.remaining_to_receive = count
        self.issue_callback(self.progress_callback, count)

    def sync_complete(self):
        logging.debug('Sync complete.')
        self.issue_callback(self.sync_complete_callback)

    def hmac(self, iid, content):
        iid = unhexlify(iid)
        return hmac.new(iid, content, hashlib.sha1).digest()

    def hash(self, iid):
        iid = unhexlify(iid)
        return hashlib.sha1(iid).digest()

    def invert_hash(self, hash):
        for iid in self.db.list_subscriptions():
            if self.hash(iid) == hash:
                return iid

        return None

    def get_description(self):
        host, port = self.stream.address
        if self.stream.initiating:
            return 'Connection to %s port %s' % (host, port)
        else:
            return 'Connection from %s' % host
