#!/usr/bin/env python
"""Code to find the 'root revision' of a remote hg repo.

The 'root revision' in this case is the oldest revision which has no
parents - in some cases, this serves as a useful identifier for
determining which repositories are related.
"""
import logging
import optparse
import os
import pickle
import sys

from mercurial import commands
from mercurial import error
from mercurial import hg
from mercurial import node
from mercurial import ui

logger = logging.getLogger(__name__)

def find_hg_repos(path):
    """Given a path, find all hg repositories underneath it."""
    for path, dirs, files in os.walk(path):
        if '.hg' in dirs:
            yield path


def local_root_revisions(repos):
    """Given a list of local repositories, return a map of {urls: root revs}.

    This works by taking all the entries in the repo's .hg/hgrc's
    [paths] section and associating them with the local repo's
    revision 0 sha.
    """
    predictions = {}
    u = ui.ui()
    for rpath in repos:
        r = hg.repository(u, rpath)
        for _, path in r.ui.configitems('paths'):
            predictions[path] = node.hex(r[0].node())
    return predictions


def find_root_revision(url, predictions):
    """Given a URL and some predictions, try to determine the root rev.

    This tries the equivalent of `hg id -r 0 -R $url` first.

    If that fails (which it will against any repository which does not
    use revlog for storage and has some other storage primitive), look
    up the given URL in predictions. If the URL is found there, try
    `hg id -r $node -R $url` and see if that returns a sha. If so,
    then the prediciton was right and we're done.

    Failing that, try all candidate root rev IDs from predictions, in
    hopes that one will work. If not, return None since we failed.

    NOTE: we could probably do slightly better between the second
    attempt and the third attempt, by using something like Levenshtein
    distance to try the root rev associated with the more appropriate
    URLs before just resorting to brute force.
    """
    u = ui.ui()
    r = hg.repository(u, url)
    try:
        return node.hex(r.lookup(0))
    except error.RepoError:
        logger.info('rev 0 lookup failed, using fallbacks')
    best_guess = predictions.get(url, None)
    if best_guess:
        try:
            logger.info('lookup %s as first guess', best_guess)
            return node.hex(r.lookup(best_guess))
        except error.RepoError:
            pass
    for worth_trying in (set(predictions.values()) - set([best_guess])):
        try:
            logger.info('lookup %s', worth_trying)
            root = node.hex(r.lookup(worth_trying))
            return root
        except error.RepoError:
            pass
    return None


def main(argv=sys.argv):
    os.environ['HGRCPATH'] = '/dev/null'
    p = optparse.OptionParser()
    p.add_option('-l', '--repo-search-path', default='.',
                 dest='repos_search_path',
                 help='Directory in which we should find Mercurial repositories.')
    p.add_option('-r', '--remote-repo', default='',
                 dest='remote_repo_url',
                 help='Remote repo whose root commit we want to identify.')
    p.add_option('-c', '--cache-file', default='/tmp/what-remote-cache.pck',
                 dest='cache_file',
                 help='Cache file where we can store remote root node IDs.')
    p.add_option('-v', '--verbose', default=False, action='store_true',
                 dest='verbose',
                 help='Display some logging information.')
    p.add_option('-d', '--debug', default=False, action='store_true',
                 dest='debug',
                 help='Display lots of logging information.')
    opts, args = p.parse_args(argv)

    if opts.debug:
      log_level = logging.DEBUG
    elif opts.verbose:
      log_level = logging.INFO
    else:
      log_level = logging.WARNING
    logging.basicConfig(level=log_level, stream=sys.stderr)

    cache = {}
    if os.path.exists(opts.cache_file):
        try:
            cache = pickle.load(open(opts.cache_file))
            logger.debug('found cache: %r', cache)
        except EOFError:
            pass
    repos = list(find_hg_repos(opts.repos_search_path))
    predictions = local_root_revisions(repos)
    logger.debug('predictions using local repositories: %r', predictions)
    predictions.update(cache)
    if opts.remote_repo_url:
        found = find_root_revision(opts.remote_repo_url, predictions)
        if found:
            cache[opts.remote_repo_url] = found
            pickle.dump(cache, open(opts.cache_file, 'w'))
            print 'root rev is', found
        else:
            print 'root rev unknown'


if __name__ == '__main__':
    main()
