"""Backend to update project information from the Cheeseshop.

The expectation is that this backend will be launched by a daily cron job to
update every project. It is done daily so that stats do not get too outdated
w/o wasting quota on more frequent updates that are not needed (eg. getting
every change since a specified time which can include individual updates to
classifiers, etc.).

Because staying within free quota is a key design restraint, everything is done
as asynchronously and quickly as possible (within reason) in order to minimize
wasting instance-hours waiting for things. This means that (nearly) all network
requests are done asynchronously so as to not waste time waiting for the
Cheeseshop to reply. Calls to the Cheeseshop are done as XML-RPC multi-call
requests (unfortunately the JSON interface does not support multi-calls).
Everything is also done using callbacks (which, while not being threaded, are
assumed to be for coherent thinking) so as to not waste time figuring out what
to do next.

"""
from __future__ import unicode_literals

import _strptime;import time  # Works around threaded race condition
import datetime
import functools
import logging
import operator
import Queue as queue
import time
import xmlrpclib

import webapp2

from google.appengine import runtime
from google.appengine.api import taskqueue
from google.appengine.api import urlfetch
from google.appengine.ext import db

import distutils2.version
import distutils2.errors

import models
import util


class AsyncMultiXMLRPC(object):

    """Make asynchronous, multi-call, XML-RPC calls.

    Class exists entirely because functools.partial() objects won't let you
    modify their 'args' attribute and the callback to an urlfetch.RPC object is
    not passed the RPC object itself.

    """

    CALL_LIMIT = 100  # Cap on a multi-call

    def __init__(self, fxn_name, xmlrpc_args, callback, callback_args,
                 async_queue):
        assert len(xmlrpc_args) <= self.CALL_LIMIT

        self.callback = callback
        self.callback_args = callback_args
        self.rpc = urlfetch.create_rpc(deadline=30, callback=self)
        self.async_queue = async_queue

        multi_args = []
        for arg in xmlrpc_args:
            multi_args.append({'methodName': fxn_name, 'params': arg})
        xmlrpc_call = xmlrpclib.dumps((multi_args,), 'system.multicall')
        urlfetch.make_fetch_call(self.rpc, 'http://pypi.python.org/pypi',
                                 payload=xmlrpc_call, method=urlfetch.POST,
                                 headers={'Content-Type': 'text/xml'})
        # Don't put the RPC object in the queue before there is actually
        # something to wait on.
        async_queue.put_nowait(self.rpc)

    def __call__(self):
        """Call the callback with its arguments and data received from the
        XML-RPC call."""
        args = [self.async_queue]
        args.append(self.callback_args)
        # Main loop's wait() guarantees no errors.
        response = self.rpc.get_result()
        # TODO check for XML-RPC call errors
        data = []
        # Response comes back as (([[[result]], ...],), None)
        data_lists = xmlrpclib.loads(response.content, use_datetime=True)[0][0]
        for data_list in data_lists:
            # Unfortunately all basestrings are str and not unicode, and in
            # order to fix that problem would require unpacking the list and
            # reconstructing it.
            data.append(data_list[0])
        args.append(data)
        self.callback(*args)


def group_by_len(content, length):
    """Group the content in the sequence by a specified total count."""
    groups = []
    batch = []
    for item in content:
        batch.append(item)
        if len(batch) == length:
            groups.append(batch)
            batch = []
    else:
        if batch:
            groups.append(batch)
    return groups


def project_names():
    """Get the complete list on the Cheeseshop.

    Since you can't do anything without this list, this call DOES block.

    """
    client = xmlrpclib.ServerProxy('http://pypi.python.org/pypi')
    return client.list_packages()


def process_releases(async_queue, project_names, data):
    """Process the project release details for all projects listed in
    project_names."""
    args = []
    for name, project_versions in zip(project_names, data):
        versions = []
        for version in project_versions:
            try:
                parsed_version = distutils2.version.NormalizedVersion(version,
                                                error_on_huge_major_num=False)
            except distutils2.errors.IrrationalVersionError:
                # Don't care about ridiculous version numbers.
                continue
            versions.append((version, parsed_version))
        if not versions:
        #    logging.info('{} has no sane version numbers ({})'.format(name,
        #                                                              versions))
            continue
        else:
            newest = max(versions, key=operator.itemgetter(1))
            args.append((name, newest[0]))
    AsyncMultiXMLRPC('release_data', args, process_metadata, args, async_queue)


def process_metadata(async_queue, project_releases, data):
    """Process the metadata for a project release based on (name, release) pairs
    in project_releases."""
    callback_args = []
    # While technically not necessary to re-package, makes reasoning easier.
    for (name, version), metadata in zip(project_releases, data):
        callback_args.append((name, version, metadata))
    AsyncMultiXMLRPC('release_urls', project_releases, process_files,
                     callback_args, async_queue)


def process_files(async_queue, project_details, data):
    """Process release URL details for a project based on (name, release,
    metadata) triples in project_details."""
    call_limit = AsyncMultiXMLRPC.CALL_LIMIT
    entities = []
    for (name, version, metadata), files_data in zip(project_details, data):
        entities.append(process(name, version, metadata, files_data))
    db.put(entities)  # TODO Async?


def process(name, version, metadata, file_data):
    """Update/create an entity for each specific project release."""
    project = models.ProjectModel.get_for_mutating(name, version)
    project.check_classifiers(metadata)
    project.update_downloads(file_data)
    return project


def enqueue(project_names):
    """Enqueue a list of project names for updating."""
    taskqueue.add(url='/_queue/update', payload='\n'.join(project_names),
                  method='POST', queue_name='Cheeseshop-updates')


# How much a single push queue task can handle w/o going over quota.
UPDATE_BATCH_SIZE =  AsyncMultiXMLRPC.CALL_LIMIT * 3

def enqueue_updates(limit=None, name=None):
    """Fetch the list of projects on the Cheeseshop and enqueue tasks to update
    their details.

    If 'limit' is not None, limit the number of project enqueued to that
    number.

    """
    if name is not None:
        projects = [name]
    else:
        projects = project_names()
    if limit is not None:
        projects = projects[:limit]
    logging.info('Updating {} projects'.format(len(projects)))
    batches = group_by_len(projects, UPDATE_BATCH_SIZE)
    projects_enqueued = 0
    for index, batch in enumerate(batches):
        enqueue(batch)
        projects_enqueued += len(batch)
    return projects


class StartHandler(webapp2.RequestHandler):

    """Request handler to being the enqueuing of tasks to update project
    details."""

    def get(self):
        limit = self.request.get('limit', None)
        name = self.request.get('name', None)
        if limit is not None:
            limit = int(limit)
        projects = enqueue_updates(limit, name)
        self.response.out.write('<br>'.join(projects))


def update_projects(project_names):
    def batch_call(batch_args):
        """Fire off an XML-RPC batch call to start things going."""
        xmlrpc_args = [(x,) for x in batch]
        AsyncMultiXMLRPC('package_releases', xmlrpc_args,
                         process_releases, batch, async_queue)

    async_queue = queue.Queue()
    batches = group_by_len(project_names, AsyncMultiXMLRPC.CALL_LIMIT)
    for batch in batches:
        batch_call(batch)
    rpc_waits = 0
    try:
        while True:
            rpc = async_queue.get_nowait()
            try:
                rpc.wait()
            except urlfetch.DownloadError as exc:
                logging.warning('an RPC call timed out: {}'.format(str(exc)))
            except runtime.DeadlineExceededError as exc:
                logging.warning('an RPC call triggered a DeadlineExceededError: '
                             '{}'.format(str(exc)))
            else:
                rpc_waits += 1
    except queue.Empty:
        pass


class UpdateHandler(webapp2.RequestHandler):

    """Request handler for testing purposes of backends."""

    def post(self):
        project_names = self.request.body.splitlines()
        update_projects(project_names)


app = webapp2.WSGIApplication([('/_queue/start', StartHandler),
                               ('/_queue/update', UpdateHandler),
                              ],
                               debug=util.DEBUG)
