# XXX make all workers indempotent
import datetime
import logging
import operator
import os
import pickle
import re
import urllib2
from google.appengine.api import taskqueue
from google.appengine.ext import db, webapp
from google.appengine.ext.webapp import util
import simplejson as json
import cheeseshop
from main import DEBUG, Handler, normalized_version
import models


class CreateIndexHandler(Handler):

    """Dispatch handling the creation of the project index."""

    def post(self):
        """Handle creating entities for pre-existing projects.

        projects: [str]
            Listof projects from the Cheeseshop.

        """
        db.run_in_transaction(self.txn)

    def txn(self):
        """Dispatch to the appropriate task queue and continue processing
        projects (if needed)."""
        # XXX Enqueue up to four projects at once
        entity = models.ProjectDetails.singleton()
        new_projects_pickle = entity.new_projects_pickle
        if not new_projects_pickle:
            return
        new_projects = pickle.loads(new_projects_pickle)
        project_name = new_projects.pop(0)
        task_args = self.add_project(project_name)
        taskqueue.add(transactional=True, **task_args)
        if new_projects:
            entity.new_projects_pickle = pickle.dumps(new_projects)
            taskqueue.add(url='/worker/create_index', transactional=True)
        else:
            entity.new_projects_pickle = None
        entity.put()

    def add_project(self, project_name):
        """Add a project during an index refresh."""
        args = {}
        args['url'] = '/worker/add_project'
        args['params'] = {'project': project_name}
        self.json_dict(args['params'])
        args['queue_name'] = 'pypi-queue'
        return args


class AddProjectHandler(Handler):

    """Add a project to the index for the first time.

    It is assumed the project already existed on the Cheeseshop and thus has a
    history to process.

    """

    def post(self):
        """Add a "new" project to the website index."""
        project_name = self.request_json('project')
        details = self.project_version_details(project_name)
        if not details:
            db.run_in_transaction(self.txn, project_name, None, None, None,
                                  None)
        else:
            db.run_in_transaction(self.txn, project_name, *details)

    def project_version_details(self, project_name):
        """Figure out the version details for a pre-existing project."""
        # XXX rework to use Project.data_update() and .urls_update()
        # XXX rework to batch version updates and then send to another worker
        #     which does the batch fetch of version details to populate entity
        stuff = cheeseshop.project_refresh(project_name)
        if stuff is None:
            return None
        (latest_release, latest_details), version_stuff, timestamp = stuff
        latest_classifiers = latest_details.get('classifiers', [])
        assert latest_release == version_stuff[-1][0]
        download_totals = []
        for version, release_urls in version_stuff:
            total = sum(x.get('downloads', 0) for x in release_urls)
            download_totals.append([version, total])
        latest_downloads = download_totals[-1][1]
        download_totals.sort(key=lambda x: x[1])
        largest_release = download_totals[-1][0]
        largest_downloads = download_totals[-1][1]
        return (latest_classifiers, (latest_release, latest_downloads),
                (largest_release, largest_downloads), timestamp)

    def txn(self, project_name, classifiers, latest_details, largest_details,
            timestamp):
        """Create a new project entity with the relevant version details
        set.

        * project_name: str
        * classifiers: [str]
            Classifiers for the latest release.
        * latest_details: (str, int)
            The latest release and its download total.
        * largest_details: (str, int)
            The largest release and its download total.
        * timestamp: datetime.datetime

        """
        # XXX make so that this can act as a refresh of a project's details w/o
        #     losing its py3k details?
        entity = models.Project.create(project_name)
        if latest_details:
            entity.latest_release = latest_details[0]
            entity.latest_downloads = latest_details[1]
        if largest_details:
            entity.largest_release = largest_details[0]
            entity.largest_downloads = largest_details[1]
        if classifiers:
            entity.py3k_classifier(classifiers)
        entity.put()

    def txn_brand_spanking_new(self, project_name):
        """Create a new entity for the project."""
        entity = models.Project.create(project_name)
        entity.put()


class LatestChangesHandler(Handler):

    """Sort and dispatch changes since the last sync with the Cheeseshop."""

    def get(self):
        """Enqueue the update from the Cheeseshop.

        Expected to be called from a cron job.

        """
        taskqueue.add(url='/worker/latest_changes', method='POST')

    def post(self):
        """Fetch and sort/group project updates."""
        updates = cheeseshop.updates()
        timestamp = datetime.datetime.utcnow()
        groupings = self.group(updates)
        db.run_in_transaction(self.txn, timestamp, groupings)

    def newest_release(self, old, new):
        """Return the newest release."""
        if not old:
            return new
        elif not new:
            return old
        elif normalized_version(new) >= normalized_version(old):
            return new
        else:
            return old

    def group(self, updates):
        """Group project updates together.

        Each project gets a dict that flags specifically what needs updating
        for a project.

        * create: bool
            New project, so need to create a new entity.
        * update: str
            Version has received a relevant update.
        * Python 3 file: (str, str)
            The project added a Python 3-specific file.
        * remove: str
            A version of the project has been removed.


        """
        # Do not handle: docupdate, remove <...> <...>,
        # update <non-classifier>, add <non-3.x> file <...>
        groupings = {}
        py3k_added_file = re.compile(r'add (3\.\d) file (.+)')
        updates.sort(key=operator.itemgetter(2))  # Sort on timestamp
        # Without tracking every *visible* release and then deleting
        # appropriately when a release is removed -- essentially becoming a
        # PyPI mirror -- there is no reason to track removals and instead do
        # occasional sanity checks. So keep this simple.
        for name, version, timestamp, action in updates:
            if action == 'create':
                groupings.setdefault(name, {})['create'] = True
            elif action.startswith('update') and 'classifiers' in action:
                groupings.setdefault(name, {})['update'] = version
            elif action == 'new release':
                changes = groupings.setdefault(name, {})
                prev = changes.get('update')
                newest = self.newest_release(prev, version)
                changes['update'] = newest
            elif action == 'remove':
                try:
                    changes = groupings[name]
                except KeyError:
                    continue
                if 'update' in changes and changes['update'] == version:
                    del changes['update']
                    if not changes:
                        del groupings[name]
            else:
                match = py3k_added_file.match(action)
                if match:
                    details = match.groups()
                    groupings.setdefault(name, {})['Python 3 file'] = details
        return groupings

    def txn(self, timestamp, groupings):
        """Dispatch the project updates to be processed and record the fact
        that the project index has been updated."""
        params = {'updates': groupings}
        self.json_dict(params)
        taskqueue.add(url='/worker/update_projects', params=params,
                      transactional=True, queue_name='pypi-queue')
        project_details = models.ProjectDetails.singleton()
        project_details.latest_pypi_changes = timestamp
        project_details.put()


class UpdateProjectsHandler(Handler):

    """Process the updates as sent over from the Cheeseshop.

    See LatestChangesHandler.group() for possible updates.

    """

    def post(self):
        groupings = self.request_json('updates')
        if not groupings:
            return
        db.run_in_transaction(self.txn, groupings, self.prep(groupings))

    def prep(self, groupings):
        """Do the requisite Cheeseshop requests in a batch."""
        # Cap how many projects are handle in any one task run as the
        # Cheeseshop can handle 100 batched calls and each project can end
        # up triggering up to 3 calls.
        to_handle = groupings.keys()[:20]
        requesting = []
        for name in to_handle:
            changes = groupings[name]
            if 'update' in changes:
                requesting.append((name, changes['update'], 'data'))
                requesting.append((name, changes['update'], 'urls'))
            if 'Python 3 file' in changes:
                requesting.append((name, changes['Python 3 file'][0], 'urls'))
        results = cheeseshop.batch_request(requesting)
        returning = {}
        for (name, version, want), result in zip(requesting, results):
            info = returning.setdefault(name, {})
            if want == 'urls':
                for thing in result:
                    try:
                        # XXX can use datetime.date.toordinal()
                        del thing['upload_time']
                    except KeyError:
                        pass
            elif want == 'data':
                try:
                    # Dead weight
                    del result['description']
                except KeyError:
                    pass
            info.setdefault(version, {})[want] = result
        return returning

    def txn(self, groupings, pypi_prep):
        """Commit the changes."""
        project_details = models.ProjectDetails.singleton()
        latest_changes_pickle = project_details.latest_changes_pickle
        if latest_changes_pickle:
            latest_changes = pickle.loads(latest_changes_pickle)
        else:
            latest_changes = {}
        sending_everything = {}
        for name, info in pypi_prep.iteritems():
            try:
                changes = groupings.pop(name)
            except KeyError:
                continue
            sending = {}
            # Get/create entity.
            if 'create' in changes:
                sending['create'] = True
            # Add Python 3 support if a Python 3 file was found.
            if 'Python 3 file' in changes:
                v, f = changes['Python 3 file']
                sending.setdefault(v, {})['Python 3 file'] = f
            # Freshen up the entity based on info from the Cheeseshop.
            for version, stuff in info.iteritems():
                if 'data' in stuff and stuff['data']:
                    sending.setdefault(version, {})['data'] = stuff['data']
                if 'urls' in stuff and stuff['urls']:
                    sending.setdefault(version, {})['urls'] = stuff['urls']
            sending_everything[name] = sending
        latest_changes.update(sending_everything)
        project_details.latest_changes_pickle = pickle.dumps(latest_changes)
        project_details.put()
        # If there is still stuff to update, then keep on going.
        if groupings and pypi_prep:
            assert len(groupings) < initial_grouping_size
            params = {'updates': groupings}
            self.json_dict(params)
            taskqueue.add(url='/worker/update_projects', params=params,
                          queue_name='pypi-queue', transactional=True)

        # Send off stuff to update the entities in another queue, one by one.
        else:
            taskqueue.add(url='/worker/project_update_dispatch',
                          transactional=True)


class ProjectUpdateDispatchHandler(Handler):

    """Fire off individual project updates."""

    def post(self):
        returned = db.run_in_transaction(self.txn)
        if returned:
            name, update = returned
            handler = ProjectUpdateHandler()
            handler.txn(name, update)

    def txn(self):
        entity = models.ProjectDetails.singleton()
        if not entity.latest_changes_pickle:
            return
        stuff = pickle.loads(entity.latest_changes_pickle)
        try:
            name, update = stuff.popitem()
        except KeyError:
            return
        entity.latest_changes_pickle = pickle.dumps(stuff) if stuff else None
        entity.put()
        params = {'name': name, 'update': update}
        self.json_dict(params)
        if stuff:
            taskqueue.add(url='/worker/project_update_dispatch',
                          transactional=True)
        try:
            taskqueue.add(url='/worker/project_update', params=params,
                          transactional=True)
        except taskqueue.TaskTooLargeError:
            logging.warning('project update for %s is too large; updating '
                            'outside of the transation' % name)
            return name, update


class ProjectUpdateHandler(Handler):

    """Update a project based on data downloaded by /worker/update_projects."""

    def post(self):
        name = self.request_json('name')
        update = self.request_json('update')
        db.run_in_transaction(self.txn, name, update)

    def txn(self, name, stuff):
        entity = models.Project.get_by_key_name(name)
        # Taking a "I screwed up" view instead of being very strict about this
        if not entity:
            entity = models.Project.create(name)
        if 'create' in stuff:
            del stuff['create']
        for version, info in stuff.iteritems():
            if 'Python 3 file' in info:
                file_name = info['Python 3 file']
                note = '%s file specified to support Python %s' % (file_name,
                                                                   version)
                entity.py3k_support_added(note)
            if 'data' in info:
                entity.data_update(version, info['data'])
            if 'urls' in info:
                entity.urls_update(version, info['urls'])
        entity.put()


class UpdateDownloadsHandler(Handler):

    """Update the download totals for projects."""

    def get(self):
        """Start the process.

        Expected to be called as a cron job.

        """
        taskqueue.add(url='/worker/update_downloads', method='POST',
                      queue_name='pypi-queue')

    def post(self):
        """Update the download count for some projects."""
        cursor = self.request.get('start_cursor', None)
        query = models.Project.all()
        if cursor:
            query.with_cursor(cursor)
        if not query.count(1):  # All done!
            return
        urls, new_cursor = self.prep(query)
        # Since downloads are updated daily, don't worry about a failure.
        self.txn(urls, new_cursor)

    def prep(self, query):
        """Query the Cheeseshop for the appropriate information."""
        requests = []
        query_iter = iter(query)
        premature_ending = False
        # PyPI can take a 100 queries, but a project can add up to two queries.
        # Also need enough task queue addition space to continue with the
        # query.
        while len(requests) < 98:
            try:
                entity = query_iter.next()
                requests.append((entity.name, entity.latest_release, 'urls'))
                if entity.latest_release != entity.largest_release:
                    requests.append((entity.name, entity.largest_release,
                                     'urls'))
            except StopIteration:
                premature_ending = True
                break
        results = cheeseshop.batch_request(requests)
        returning = {}
        for (name, version, _), result in zip(requests, results):
            for thing in result:
                # Do what we can to cut the size of the data down to fit within
                # 10K.
                try:
                    del thing['url']
                    del thing['filename']
                    del thing['md5_digest']
                    del thing['has_sig']
                    del thing['comment_text']
                except KeyError:
                    pass
                try:  # JSON doesn't like datetime
                    upload_time = thing['upload_time']
                    if upload_time is None:
                        del thing['upload_time']
                    else:
                        thing['upload_time'] = upload_time.toordinal()
                except KeyError:
                    pass
            returning.setdefault(name, {})[version] = result
        return returning, query.cursor() if not premature_ending else None

    def txn(self, urls_data, cursor):
        """Update the project download counts and continue the update process
        (if necessary)."""
        for name, details in urls_data.iteritems():
            params = {'name': name, 'update': details}
            self.json_dict(params)
            taskqueue.add(url='/worker/download_update', params=params)
        if cursor:
            taskqueue.add(url='/worker/update_downloads',
                          params={'start_cursor': cursor},
                          queue_name='pypi-queue')


class DownloadUpdateHandler(Handler):

    """Update the download statistics for a project."""

    def post(self):
        name = self.request_json('name')
        update = self.request_json('update')
        db.run_in_transaction(self.txn, name, update)

    def txn(self, name, urls_dict):
        """Update a project's download stats."""
        entity = models.Project.get_by_key_name(name)
        for version, urls in urls_dict.iteritems():
            entity.urls_update(version, urls)
        entity.put()


class UpdatePollHandler(Handler):

    def get(self):
        taskqueue.add(url='/worker/update_poll', method='POST')

    def post(self):
        poll_data = self.poll_data()
        sorted_data = sorted(poll_data.iteritems(), key=operator.itemgetter(1),
                             reverse=True)
        self.store_data(sorted_data)

    def poll_data(self):
        json_file = urllib2.urlopen('http://www.python.org/3kpoll?json')
        json_data = json_file.read()
        return json.loads(json_data)

    def store_data(self, sorted_data):
        poll_data = sorted_data[:50]
        for name, count in poll_data:
            entity = models.Project.get_by_key_name(name)
            if entity is None:
                continue
            entity.poll_count = count
            entity.put()


urls = [('/worker/create_index', CreateIndexHandler),
        ('/worker/add_project', AddProjectHandler),
        ('/worker/latest_changes', LatestChangesHandler),
        ('/worker/update_projects', UpdateProjectsHandler),
        ('/worker/project_update_dispatch', ProjectUpdateDispatchHandler),
        ('/worker/project_update', ProjectUpdateHandler),
        ('/worker/update_downloads', UpdateDownloadsHandler),
        ('/worker/download_update', DownloadUpdateHandler),
        ('/worker/update_poll', UpdatePollHandler),
       ]


def application():
    return webapp.WSGIApplication(urls, debug=DEBUG)

def main():
    util.run_wsgi_app(application())


if __name__ == '__main__':
    main()
