#
# openlookup/server/sync.py
#
# Copyright 2008-2009 Helsinki Institute for Information Technology
# and the authors.
#
# Authors: Ken Rimey <rimey@hiit.fi>
#

# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

from __future__ import with_statement

import time
import logging

from messkit.simple_stream import StreamManager
from messkit.session import Session

from openlookup.version import VERSION, Version
from openlookup.lib.meter import Meter
from openlookup.engine.item import recreate_item, MAX_TTL
from openlookup.engine.db import DB

SYNC_PORT = 5850

CONNECTION_LIMIT = 5

RATE_LIMIT = 10**6 // 8
RATE_LIMIT_INTERVAL = 5.0

ECHO_SUPPRESSION_CACHE_SIZE = 10**4

MAX_ITEMS_TO_SEND = 100
MIN_STACK_SIZE = 1000
MAX_STACK_SIZE = 10**7

KEEP_ALIVE_INTERVAL = 20
KEEP_ALIVE_TIMEOUT = 120

EVENTING_TIME = 0.1
CLEANING_INTERVAL = 1.0
MAINTENANCE_INTERVAL = 60

class SyncManager(object):
    def __init__(self, filename, fast, daemon=None):
        self.filename = filename
        self.daemon = daemon
        self.fast = fast

        self.sessions = set()
        self.incoming = set()
        self.outward_ip_addresses = set()

        self.last_cleaning = 0
        self.cleaning_needed = False

        self.meter = Meter()
        self.last_maintenance = 0
        self.stream_mgr = StreamManager()

    def create_server(self, host='', port=SYNC_PORT):
        if not host:
            logging.info('Listening for sync connections on port %s.', port)
        else:
            logging.info('Listening for sync connections on %s port %s.',
                         host, port)
        server = SyncServer(self)
        server.listen((host, port))
        return server

    def create_session(self, host='localhost', port=SYNC_PORT):
        logging.debug('Connecting to host %s, port %s.', host, port)
        stream = self.stream_mgr.create_stream()
        session = SyncSession(self, stream)
        session.connect((host, port))
        return session

    def loop(self):
        self.db = DB(self.filename, create=False, fast=self.fast)
        try:
            while self.stream_mgr.map:
                try:
                    t0 = time.time()
                    self.once()
                    dt = time.time() - t0
                    if dt > 2.0:
                        logging.warning('Event loop time = %.1f', dt)
                except Exception:
                    logging.exception('Exception in sync manager:')
        except KeyboardInterrupt:
            pass
        finally:
            self.db.close()

    def once(self):
        self.forward = self.db.get_info('forward', True)

        self.do_eventing()

        if time.time() - self.last_cleaning >= CLEANING_INTERVAL:
            self.cleaning_needed = True

        if self.incoming or self.cleaning_needed:
            self.update_database()

        self.meter.update(self.db.get_mark())

        if time.time() - self.last_maintenance >= MAINTENANCE_INTERVAL:
            self.last_maintenance = time.time()
            self.do_maintenance()

    def do_eventing(self, remaining=EVENTING_TIME):
        deadline = time.time() + remaining
        while remaining > 0:
            self.stream_mgr.once(timeout=remaining)
            for session in self.sessions:
                session.update()
            remaining = deadline - time.time()

    def update_database(self):
        with self.db.transaction():
            now = time.time()

            while self.incoming:
                # If an exception occurs here, for instance because the
                # disk is full, we throw the incoming items away rather
                # than let them accumulate without limit in self.incoming.
                item = self.incoming.pop()
                ttl = item.expiration - now
                if 5 <= ttl <= MAX_TTL + 5 * 60:
                    self.db.put(item)

            self.last_cleaning = now
            self.cleaning_needed = self.db.clean()

    def do_maintenance(self):
        for address in self.db.get_info('peers', []):
            if not any(address in session.addresses
                       for session in self.sessions):
                self.create_session(*address)

    def gen_connection_info(self):
        sessions = list(self.sessions)
        sessions.sort(key=lambda session: session.stats.start_timestamp)

        yield SyncSession.INFO_LABELS
        for session in sessions:
            if session.started:
                yield session.get_info()

class SyncServer(object):
    def __init__(self, mgr):
        self.mgr = mgr
        self.server = mgr.stream_mgr.create_server()

    def listen(self, address):
        self.server.listen(address)
        self.server.accept(self.handle_accept)

    def handle_accept(self, stream_mgr, *args):
        stream = stream_mgr.create_stream(*args)

        limit = self.mgr.db.get_info('connection_limit', CONNECTION_LIMIT)
        if len(self.mgr.sessions) < limit:
            SyncSession(self.mgr, stream)
        else:
            stream.close()

        self.server.accept(self.handle_accept)

class SyncSession(Session):
    def __init__(self, mgr, stream):
        Session.__init__(self, stream,
                         handler_prefix='handle_')
        self.mgr = mgr
        self.started = False
        self.hello_received = False

        self.name = None
        self.version = None
        self.sync_port = None
        self.http_port = None

        self.items_in = 0
        self.items_out = 0
        self.meter_in = Meter()
        self.meter_out = Meter()

        self.__bytes_in = self.__bytes_out = 0
        self.__in_timestamp = self.__out_timestamp = time.time()

        self.mgr.sessions.add(self)

    @property
    def addresses(self):
        if self.stream.initiating:
            yield self.stream.address   # (hostname, port)
            if self.started:
                yield self.peer_address # (ip address, port)
        else:
            if self.started and self.sync_port is not None:
                yield self.peer_address[0], self.sync_port

    def startup(self, error):
        if error:
            self.debug('Failed.')
            return

        self.info('Established.')

        self.peer_address = self.stream.socket.getpeername()

        rate = self.mgr.db.get_info('rate_limit', RATE_LIMIT)
        self.stream.set_rate_limit_read(rate, rate * RATE_LIMIT_INTERVAL)

        self.recent = set()

        self.stack = []
        self.stack_top = self.stack_bottom = self.mgr.db.get_mark()

        self.started = True

        self.register_function(self.handle_hello)
        self.send_hello()

    def shutdown(self):
        self.mgr.sessions.remove(self)
        if self.started:
            self.info('Terminated.')

    def update(self):
        if self.hello_received and self.stream.wbuf.size == 0:
            self.send_some_items()

        self.keep_alive()

        self.meter_in.update(self.stats.bytes_in)
        self.meter_out.update(self.stats.bytes_out)

    def keep_alive(self):
        t = time.time()

        if self.stream.bytes_in > self.__bytes_in:
            self.__bytes_in = self.stream.bytes_in
            self.__in_timestamp = t

        if self.stream.bytes_out > self.__bytes_out:
            self.__bytes_out = self.stream.bytes_out
            self.__out_timestamp = t

        if t > self.__in_timestamp + KEEP_ALIVE_TIMEOUT:
            if self.started:
                self.info('Timed out.')
            self.close(flush=False)
        elif t > self.__out_timestamp + KEEP_ALIVE_INTERVAL:
            if self.started and self.stream.wbuf.size == 0:
                self.debug('Sending keep-alive hello.')
                self.send('hello')

    def send_hello(self):
        info = dict()

        info['version'] = VERSION

        try:
            info['name'] = self.mgr.db.get_info('name')
        except LookupError:
            pass

        if self.mgr.daemon:
            info['sync_port'] = self.mgr.daemon.sync_port
            info['http_port'] = self.mgr.daemon.http_port

        info['your_address'] = self.peer_address

        info['timestamp'] = time.time()

        self.send('hello', **info)

    def handle_hello(self, name=None, sync_port=None, http_port=None,
                     your_address=None, version=None, timestamp=None,
                     **future_expansion):
        if not self.hello_received:
            self.hello_received = True
            self.register_function(self.handle_put)

        if timestamp:
            self.info('Clock delta = %.1f', timestamp - time.time())

        if name:
            self.name = name
            self.debug('Name = "%s"', name)

        if version:
            self.version = str(version)
            self.debug('Version = %s', version)

        if sync_port:
            self.sync_port = sync_port

        if http_port:
            self.http_port = http_port

        if your_address:
            ip = your_address[0]
            if not ip.startswith('127.'):
                self.mgr.outward_ip_addresses.add(ip)

    def handle_put(self, data):
        try:
            item = recreate_item(**data)
            item.validate()
        except (TypeError, ValueError):
            logging.exception('Error decoding received item.')
        else:
            self.items_in += 1
            self.mgr.incoming.add(item)

            self.recent.add(hash(item))
            while len(self.recent) > ECHO_SUPPRESSION_CACHE_SIZE:
                self.recent.pop()

    def send_some_items(self):
        min_priority = 0 if self.mgr.forward else 1

        # Add new items to the top of the stack of unsent items.
        nums = self.mgr.db.list_from(self.stack_top, priority=min_priority)
        if nums:
            self.stack.extend(nums)
            self.stack_top = nums[-1]

        # Lazily fill the stack with unsent items from below the bottom.
        if len(self.stack) < MIN_STACK_SIZE and self.stack_bottom > 0:
            nums = self.mgr.db.list_to(self.stack_bottom, MIN_STACK_SIZE,
                                       priority=min_priority)
            if nums:
                self.stack[:0] = nums
                self.stack_bottom = nums[0] - 1
            else:
                self.stack_bottom = 0

        # Prevent the stack from growing without bound by truncating
        # it from the bottom.  Older items that we are unable to track
        # never get sent.
        if len(self.stack) > MAX_STACK_SIZE:
            excess = len(self.stack) - MAX_STACK_SIZE
            sacrifice = excess + MAX_STACK_SIZE // 10
            self.stack[:sacrifice] = []
            if self.stack_bottom > 0:
                self.stack_bottom = 0
                self.warning('Sync failed on stack overflow.')
            else:
                self.warning('Stack overflow.')

        # Now pop some items off the top of the stack and transmit them.
        count = 0
        while self.stack and count < MAX_ITEMS_TO_SEND:
            num = self.stack.pop()
            item = self.mgr.db.get_by_num(num)
            if item:
                if hash(item) in self.recent:
                    self.recent.discard(hash(item))
                elif item.type == 'singleton' and Version(self.version) < Version('2.0'):
                    continue    # XXX temporary migration hack
                else:
                    # The following is a no-op if the stream has been closed.
                    # Unfortunately, items_out gets incremented nevertheless.
                    self.send('put', item.to_dict())
                    self.items_out += 1
                    count += 1

    INFO_LABELS = ('Kind', 'IP address', 'Peering port', 'Access port',
                   'KB/sec in', 'KB/sec out', 'Items in', 'Items out',
                   'Backlog')

    def get_info(self):
        if self.stream.initiating:
            kind = 'Initiating'
        else:
            kind = 'Accepting'

        host = self.peer_address[0]

        backlog = str(len(self.stack))
        if self.stack_bottom > 0:
            backlog += '+'

        return (kind, host,
                self.sync_port, self.http_port,
                self.meter_in.get_rate(1000),
                self.meter_out.get_rate(1000),
                self.items_in, self.items_out, backlog)

    def warning(self, msg, *args):
        logging.warning('%s: ' + msg, self.get_description(), *args)

    def info(self, msg, *args):
        logging.info('%s: ' + msg, self.get_description(), *args)

    def debug(self, msg, *args):
        logging.debug('%s: ' + msg, self.get_description(), *args)
