Dataset Viewer
Auto-converted to Parquet
repo
stringclasses
283 values
pull_number
int64
5
44.1k
instance_id
stringlengths
13
45
issue_numbers
sequencelengths
1
4
base_commit
stringlengths
40
40
patch
stringlengths
179
224k
test_patch
stringlengths
94
7.54M
problem_statement
stringlengths
4
256k
hints_text
stringlengths
0
294k
created_at
timestamp[us]
version
stringclasses
1 value
conda/conda
620
conda__conda-620
[ "599" ]
c453be49e865297bf12858548a6b3e7891a8cb43
diff --git a/conda/resolve.py b/conda/resolve.py --- a/conda/resolve.py +++ b/conda/resolve.py @@ -30,6 +30,11 @@ def normalized_version(version): return version +class NoPackagesFound(RuntimeError): + def __init__(self, msg, pkg): + super(NoPackagesFound, self).__init__(msg) + self.pkg = pkg + const_pat = re.compile(r'([=<>!]{1,2})(\S+)$') def ver_eval(version, constraint): """ @@ -243,7 +248,7 @@ def track_features(self, fn): def get_pkgs(self, ms, max_only=False): pkgs = [Package(fn, self.index[fn]) for fn in self.find_matches(ms)] if not pkgs: - raise RuntimeError("No packages found matching: %s" % ms) + raise NoPackagesFound("No packages found matching: %s" % ms, ms.spec) if max_only: maxpkg = max(pkgs) ret = [] @@ -262,7 +267,7 @@ def get_pkgs(self, ms, max_only=False): def get_max_dists(self, ms): pkgs = self.get_pkgs(ms, max_only=True) if not pkgs: - raise RuntimeError("No packages found matching: %s" % ms) + raise NoPackagesFound("No packages found matching: %s" % ms, ms.spec) for pkg in pkgs: yield pkg.fn @@ -371,11 +376,22 @@ def generate_version_eq(self, v, dists, include0=False): def get_dists(self, specs, max_only=False): dists = {} for spec in specs: + found = False + notfound = [] for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only): if pkg.fn in dists: + found = True continue - dists.update(self.all_deps(pkg.fn, max_only=max_only)) - dists[pkg.fn] = pkg + try: + dists.update(self.all_deps(pkg.fn, max_only=max_only)) + except NoPackagesFound as e: + # Ignore any package that has nonexisting dependencies. + notfound.append(e.pkg) + else: + dists[pkg.fn] = pkg + found = True + if not found: + raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), None) return dists @@ -387,25 +403,31 @@ def solve2(self, specs, features, guess=True, alg='sorter', returnall=False): # complicated cases that the pseudo-boolean solver does, but it's also # much faster when it does work. - dists = self.get_dists(specs, max_only=True) - - v = {} # map fn to variable number - w = {} # map variable number to fn - i = -1 # in case the loop doesn't run - for i, fn in enumerate(sorted(dists)): - v[fn] = i + 1 - w[i + 1] = fn - m = i + 1 - - dotlog.debug("Solving using max dists only") - clauses = self.gen_clauses(v, dists, specs, features) - solutions = min_sat(clauses) - - if len(solutions) == 1: - ret = [w[lit] for lit in solutions.pop(0) if 0 < lit] - if returnall: - return [ret] - return ret + try: + dists = self.get_dists(specs, max_only=True) + except NoPackagesFound: + # Handle packages that are not included because some dependencies + # couldn't be found. + pass + else: + v = {} # map fn to variable number + w = {} # map variable number to fn + i = -1 # in case the loop doesn't run + for i, fn in enumerate(sorted(dists)): + v[fn] = i + 1 + w[i + 1] = fn + m = i + 1 + + dotlog.debug("Solving using max dists only") + clauses = self.gen_clauses(v, dists, specs, features) + solutions = min_sat(clauses) + + + if len(solutions) == 1: + ret = [w[lit] for lit in solutions.pop(0) if 0 < lit] + if returnall: + return [ret] + return ret dists = self.get_dists(specs)
diff --git a/tests/test_resolve.py b/tests/test_resolve.py --- a/tests/test_resolve.py +++ b/tests/test_resolve.py @@ -3,13 +3,15 @@ import unittest from os.path import dirname, join -from conda.resolve import ver_eval, VersionSpec, MatchSpec, Package, Resolve +from conda.resolve import ver_eval, VersionSpec, MatchSpec, Package, Resolve, NoPackagesFound from .helpers import raises with open(join(dirname(__file__), 'index.json')) as fi: - r = Resolve(json.load(fi)) + index = json.load(fi) + +r = Resolve(index) f_mkl = set(['mkl']) @@ -672,9 +674,183 @@ def test_unsat(): def test_nonexistent(): r.msd_cache = {} - assert raises(RuntimeError, lambda: r.solve(['notarealpackage 2.0*']), 'No packages found') + assert raises(NoPackagesFound, lambda: r.solve(['notarealpackage 2.0*']), 'No packages found') # This exact version of NumPy does not exist - assert raises(RuntimeError, lambda: r.solve(['numpy 1.5']), 'No packages found') + assert raises(NoPackagesFound, lambda: r.solve(['numpy 1.5']), 'No packages found') + +def test_nonexistent_deps(): + index2 = index.copy() + index2['mypackage-1.0-py33_0.tar.bz2'] = { + 'build': 'py33_0', + 'build_number': 0, + 'depends': ['nose', 'python 3.3*', 'notarealpackage 2.0*'], + 'name': 'mypackage', + 'requires': ['nose 1.2.1', 'python 3.3'], + 'version': '1.0', + } + index2['mypackage-1.1-py33_0.tar.bz2'] = { + 'build': 'py33_0', + 'build_number': 0, + 'depends': ['nose', 'python 3.3*'], + 'name': 'mypackage', + 'requires': ['nose 1.2.1', 'python 3.3'], + 'version': '1.1', + } + r = Resolve(index2) + + assert set(r.find_matches(MatchSpec('mypackage'))) == { + 'mypackage-1.0-py33_0.tar.bz2', + 'mypackage-1.1-py33_0.tar.bz2', + } + assert set(r.get_dists(['mypackage']).keys()) == { + 'mypackage-1.1-py33_0.tar.bz2', + 'nose-1.1.2-py26_0.tar.bz2', + 'nose-1.1.2-py27_0.tar.bz2', + 'nose-1.1.2-py33_0.tar.bz2', + 'nose-1.2.1-py26_0.tar.bz2', + 'nose-1.2.1-py27_0.tar.bz2', + 'nose-1.2.1-py33_0.tar.bz2', + 'nose-1.3.0-py26_0.tar.bz2', + 'nose-1.3.0-py27_0.tar.bz2', + 'nose-1.3.0-py33_0.tar.bz2', + 'openssl-1.0.1c-0.tar.bz2', + 'python-2.6.8-1.tar.bz2', + 'python-2.6.8-2.tar.bz2', + 'python-2.6.8-3.tar.bz2', + 'python-2.6.8-4.tar.bz2', + 'python-2.6.8-5.tar.bz2', + 'python-2.6.8-6.tar.bz2', + 'python-2.7.3-2.tar.bz2', + 'python-2.7.3-3.tar.bz2', + 'python-2.7.3-4.tar.bz2', + 'python-2.7.3-5.tar.bz2', + 'python-2.7.3-6.tar.bz2', + 'python-2.7.3-7.tar.bz2', + 'python-2.7.4-0.tar.bz2', + 'python-2.7.5-0.tar.bz2', + 'python-3.3.0-2.tar.bz2', + 'python-3.3.0-3.tar.bz2', + 'python-3.3.0-4.tar.bz2', + 'python-3.3.0-pro0.tar.bz2', + 'python-3.3.0-pro1.tar.bz2', + 'python-3.3.1-0.tar.bz2', + 'python-3.3.2-0.tar.bz2', + 'readline-6.2-0.tar.bz2', + 'sqlite-3.7.13-0.tar.bz2', + 'system-5.8-0.tar.bz2', + 'system-5.8-1.tar.bz2', + 'tk-8.5.13-0.tar.bz2', + 'zlib-1.2.7-0.tar.bz2', + } + + assert set(r.get_dists(['mypackage'], max_only=True).keys()) == { + 'mypackage-1.1-py33_0.tar.bz2', + 'nose-1.3.0-py26_0.tar.bz2', + 'nose-1.3.0-py27_0.tar.bz2', + 'nose-1.3.0-py33_0.tar.bz2', + 'openssl-1.0.1c-0.tar.bz2', + 'python-2.6.8-6.tar.bz2', + 'python-2.7.5-0.tar.bz2', + 'python-3.3.2-0.tar.bz2', + 'readline-6.2-0.tar.bz2', + 'sqlite-3.7.13-0.tar.bz2', + 'system-5.8-1.tar.bz2', + 'tk-8.5.13-0.tar.bz2', + 'zlib-1.2.7-0.tar.bz2', + } + + assert r.solve(['mypackage']) == r.solve(['mypackage 1.1']) == [ + 'mypackage-1.1-py33_0.tar.bz2', + 'nose-1.3.0-py33_0.tar.bz2', + 'openssl-1.0.1c-0.tar.bz2', + 'python-3.3.2-0.tar.bz2', + 'readline-6.2-0.tar.bz2', + 'sqlite-3.7.13-0.tar.bz2', + 'system-5.8-1.tar.bz2', + 'tk-8.5.13-0.tar.bz2', + 'zlib-1.2.7-0.tar.bz2', + ] + assert raises(RuntimeError, lambda: r.solve(['mypackage 1.0'])) + + # This time, the latest version is messed up + index3 = index.copy() + index3['mypackage-1.1-py33_0.tar.bz2'] = { + 'build': 'py33_0', + 'build_number': 0, + 'depends': ['nose', 'python 3.3*', 'notarealpackage 2.0*'], + 'name': 'mypackage', + 'requires': ['nose 1.2.1', 'python 3.3'], + 'version': '1.1', + } + index3['mypackage-1.0-py33_0.tar.bz2'] = { + 'build': 'py33_0', + 'build_number': 0, + 'depends': ['nose', 'python 3.3*'], + 'name': 'mypackage', + 'requires': ['nose 1.2.1', 'python 3.3'], + 'version': '1.0', + } + r = Resolve(index3) + + assert set(r.find_matches(MatchSpec('mypackage'))) == { + 'mypackage-1.0-py33_0.tar.bz2', + 'mypackage-1.1-py33_0.tar.bz2', + } + assert set(r.get_dists(['mypackage']).keys()) == { + 'mypackage-1.0-py33_0.tar.bz2', + 'nose-1.1.2-py26_0.tar.bz2', + 'nose-1.1.2-py27_0.tar.bz2', + 'nose-1.1.2-py33_0.tar.bz2', + 'nose-1.2.1-py26_0.tar.bz2', + 'nose-1.2.1-py27_0.tar.bz2', + 'nose-1.2.1-py33_0.tar.bz2', + 'nose-1.3.0-py26_0.tar.bz2', + 'nose-1.3.0-py27_0.tar.bz2', + 'nose-1.3.0-py33_0.tar.bz2', + 'openssl-1.0.1c-0.tar.bz2', + 'python-2.6.8-1.tar.bz2', + 'python-2.6.8-2.tar.bz2', + 'python-2.6.8-3.tar.bz2', + 'python-2.6.8-4.tar.bz2', + 'python-2.6.8-5.tar.bz2', + 'python-2.6.8-6.tar.bz2', + 'python-2.7.3-2.tar.bz2', + 'python-2.7.3-3.tar.bz2', + 'python-2.7.3-4.tar.bz2', + 'python-2.7.3-5.tar.bz2', + 'python-2.7.3-6.tar.bz2', + 'python-2.7.3-7.tar.bz2', + 'python-2.7.4-0.tar.bz2', + 'python-2.7.5-0.tar.bz2', + 'python-3.3.0-2.tar.bz2', + 'python-3.3.0-3.tar.bz2', + 'python-3.3.0-4.tar.bz2', + 'python-3.3.0-pro0.tar.bz2', + 'python-3.3.0-pro1.tar.bz2', + 'python-3.3.1-0.tar.bz2', + 'python-3.3.2-0.tar.bz2', + 'readline-6.2-0.tar.bz2', + 'sqlite-3.7.13-0.tar.bz2', + 'system-5.8-0.tar.bz2', + 'system-5.8-1.tar.bz2', + 'tk-8.5.13-0.tar.bz2', + 'zlib-1.2.7-0.tar.bz2', + } + + assert raises(RuntimeError, lambda: r.get_dists(['mypackage'], max_only=True)) + + assert r.solve(['mypackage']) == r.solve(['mypackage 1.0']) == [ + 'mypackage-1.0-py33_0.tar.bz2', + 'nose-1.3.0-py33_0.tar.bz2', + 'openssl-1.0.1c-0.tar.bz2', + 'python-3.3.2-0.tar.bz2', + 'readline-6.2-0.tar.bz2', + 'sqlite-3.7.13-0.tar.bz2', + 'system-5.8-1.tar.bz2', + 'tk-8.5.13-0.tar.bz2', + 'zlib-1.2.7-0.tar.bz2', + ] + assert raises(NoPackagesFound, lambda: r.solve(['mypackage 1.1'])) def test_package_ordering(): sympy_071 = Package('sympy-0.7.1-py27_0.tar.bz2', r.index['sympy-0.7.1-py27_0.tar.bz2'])
Don't bail when a dependency can't be found When a dependency for a package can't be found, conda bails completely, but this can happen e.g., just for some old builds of something. So we should just exclude any package like this from the solver.
It can also happen if someone has a package on their binstar but not all the dependencies for it.
2014-03-24T18:23:13
-1.0
conda/conda
662
conda__conda-662
[ "464" ]
3d4118668fca738984cce13d9235e0fc11a79df4
diff --git a/conda/cli/main_remove.py b/conda/cli/main_remove.py --- a/conda/cli/main_remove.py +++ b/conda/cli/main_remove.py @@ -63,6 +63,7 @@ def execute(args, parser): from conda.api import get_index from conda.cli import pscheck from conda.install import rm_rf, linked + from conda import config if not (args.all or args.package_names): sys.exit('Error: no package names supplied,\n' @@ -71,12 +72,11 @@ def execute(args, parser): prefix = common.get_prefix(args) common.check_write('remove', prefix) - index = None + common.ensure_override_channels_requires_channel(args) + channel_urls = args.channel or () + index = get_index(channel_urls=channel_urls, + prepend=not args.override_channels) if args.features: - common.ensure_override_channels_requires_channel(args) - channel_urls = args.channel or () - index = get_index(channel_urls=channel_urls, - prepend=not args.override_channels) features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) diff --git a/conda/plan.py b/conda/plan.py --- a/conda/plan.py +++ b/conda/plan.py @@ -20,7 +20,7 @@ from conda import install from conda.fetch import fetch_pkg from conda.history import History -from conda.resolve import MatchSpec, Resolve +from conda.resolve import MatchSpec, Resolve, Package from conda.utils import md5_file, human_bytes log = getLogger(__name__) @@ -60,7 +60,7 @@ def split_linkarg(arg): linktype = install.LINK_HARD return dist, pkgs_dir, int(linktype) -def display_actions(actions, index=None): +def display_actions(actions, index): if actions.get(FETCH): print("\nThe following packages will be downloaded:\n") @@ -79,19 +79,113 @@ def display_actions(actions, index=None): print(" " * 43 + "Total: %14s" % human_bytes(sum(index[dist + '.tar.bz2']['size'] for dist in actions[FETCH]))) - if actions.get(UNLINK): - print("\nThe following packages will be UN-linked:\n") - print_dists([ - (dist, None) - for dist in actions[UNLINK]]) - if actions.get(LINK): - print("\nThe following packages will be linked:\n") - lst = [] - for arg in actions[LINK]: - dist, pkgs_dir, lt = split_linkarg(arg) - extra = ' %s' % install.link_name_map.get(lt) - lst.append((dist, extra)) - print_dists(lst) + + # package -> [oldver-oldbuild, newver-newbuild] + packages = defaultdict(lambda: list(('', ''))) + features = defaultdict(lambda: list(('', ''))) + + # This assumes each package will appear in LINK no more than once. + Packages = {} + linktypes = {} + for arg in actions.get(LINK, []): + dist, pkgs_dir, lt = split_linkarg(arg) + pkg, ver, build = dist.rsplit('-', 2) + packages[pkg][1] = ver + '-' + build + Packages[dist] = Package(dist + '.tar.bz2', index[dist + '.tar.bz2']) + linktypes[pkg] = lt + features[pkg][1] = index[dist + '.tar.bz2'].get('features', '') + for arg in actions.get(UNLINK, []): + dist, pkgs_dir, lt = split_linkarg(arg) + pkg, ver, build = dist.rsplit('-', 2) + packages[pkg][0] = ver + '-' + build + Packages[dist] = Package(dist + '.tar.bz2', index[dist + '.tar.bz2']) + features[pkg][0] = index[dist + '.tar.bz2'].get('features', '') + + # Put a minimum length here---. .--For the : + # v v + maxpkg = max(len(max(packages or [''], key=len)), 0) + 1 + maxoldver = len(max(packages.values() or [['']], key=lambda i: len(i[0]))[0]) + maxnewver = len(max(packages.values() or [['', '']], key=lambda i: len(i[1]))[1]) + maxoldfeatures = len(max(features.values() or [['']], key=lambda i: len(i[0]))[0]) + maxnewfeatures = len(max(features.values() or [['', '']], key=lambda i: len(i[1]))[1]) + maxoldchannel = len(max([config.canonical_channel_name(Packages[pkg + '-' + + packages[pkg][0]].channel) for pkg in packages if packages[pkg][0]] or + [''], key=len)) + maxnewchannel = len(max([config.canonical_channel_name(Packages[pkg + '-' + + packages[pkg][1]].channel) for pkg in packages if packages[pkg][1]] or + [''], key=len)) + new = {pkg for pkg in packages if not packages[pkg][0]} + removed = {pkg for pkg in packages if not packages[pkg][1]} + updated = set() + downgraded = set() + oldfmt = {} + newfmt = {} + for pkg in packages: + # That's right. I'm using old-style string formatting to generate a + # string with new-style string formatting. + oldfmt[pkg] = '{pkg:<%s} {vers[0]:<%s}' % (maxpkg, maxoldver) + if config.show_channel_urls: + oldfmt[pkg] += ' {channel[0]:<%s}' % maxoldchannel + if packages[pkg][0]: + newfmt[pkg] = '{vers[1]:<%s}' % maxnewver + else: + newfmt[pkg] = '{pkg:<%s} {vers[1]:<%s}' % (maxpkg, maxnewver) + if config.show_channel_urls: + newfmt[pkg] += ' {channel[1]:<%s}' % maxnewchannel + # TODO: Should we also care about the old package's link type? + if pkg in linktypes and linktypes[pkg] != install.LINK_HARD: + newfmt[pkg] += ' (%s)' % install.link_name_map[linktypes[pkg]] + + if features[pkg][0]: + oldfmt[pkg] += ' [{features[0]:<%s}]' % maxoldfeatures + if features[pkg][1]: + newfmt[pkg] += ' [{features[1]:<%s}]' % maxnewfeatures + + if pkg in new or pkg in removed: + continue + P0 = Packages[pkg + '-' + packages[pkg][0]] + P1 = Packages[pkg + '-' + packages[pkg][1]] + try: + # <= here means that unchanged packages will be put in updated + newer = (P0.name, P0.norm_version, P0.build_number) <= (P1.name, P1.norm_version, P1.build_number) + except TypeError: + newer = (P0.name, P0.version, P0.build_number) <= (P1.name, P1.version, P1.build_number) + if newer: + updated.add(pkg) + else: + downgraded.add(pkg) + + arrow = ' --> ' + lead = ' '*4 + + def format(s, pkg): + channel = ['', ''] + for i in range(2): + if packages[pkg][i]: + channel[i] = config.canonical_channel_name(Packages[pkg + '-' + packages[pkg][i]].channel) + return lead + s.format(pkg=pkg+':', vers=packages[pkg], + channel=channel, features=features[pkg]) + + if new: + print("\nThe following NEW packages will be INSTALLED:\n") + for pkg in sorted(new): + print(format(newfmt[pkg], pkg)) + + if removed: + print("\nThe following packages will be REMOVED:\n") + for pkg in sorted(removed): + print(format(oldfmt[pkg], pkg)) + + if updated: + print("\nThe following packages will be UPDATED:\n") + for pkg in sorted(updated): + print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg)) + + if downgraded: + print("\nThe following packages will be DOWNGRADED:\n") + for pkg in sorted(downgraded): + print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg)) + print() # the order matters here, don't change it
diff --git a/tests/helpers.py b/tests/helpers.py --- a/tests/helpers.py +++ b/tests/helpers.py @@ -5,6 +5,8 @@ import sys import os +from contextlib import contextmanager + def raises(exception, func, string=None): try: a = func() @@ -35,3 +37,31 @@ def run_conda_command(*args): stdout, stderr = p.communicate() return (stdout.decode('utf-8').replace('\r\n', '\n'), stderr.decode('utf-8').replace('\r\n', '\n')) + +class CapturedText(object): + pass + +@contextmanager +def captured(): + """ + Context manager to capture the printed output of the code in the with block + + Bind the context manager to a variable using `as` and the result will be + in the stdout property. + + >>> from tests.helpers import capture + >>> with captured() as c: + ... print('hello world!') + ... + >>> c.stdout + 'hello world!\n' + """ + from conda.compat import StringIO + import sys + + stdout = sys.stdout + sys.stdout = file = StringIO() + c = CapturedText() + yield c + c.stdout = file.getvalue() + sys.stdout = stdout diff --git a/tests/test_plan.py b/tests/test_plan.py --- a/tests/test_plan.py +++ b/tests/test_plan.py @@ -1,15 +1,20 @@ import json import unittest from os.path import dirname, join +from collections import defaultdict from conda.config import default_python, pkgs_dirs +import conda.config from conda.install import LINK_HARD import conda.plan as plan +from conda.plan import display_actions from conda.resolve import Resolve +from tests.helpers import captured with open(join(dirname(__file__), 'index.json')) as fi: - r = Resolve(json.load(fi)) + index = json.load(fi) + r = Resolve(index) def solve(specs): return [fn[:-8] for fn in r.solve(specs)] @@ -77,3 +82,685 @@ def test_4(self): (['anaconda', 'python 3*'], []), ]: self.check(specs, added) + +def test_display_actions(): + conda.config.show_channel_urls = False + actions = defaultdict(list, {"FETCH": ['sympy-0.7.2-py27_0', + "numpy-1.7.1-py27_0"]}) + # The older test index doesn't have the size metadata + index['sympy-0.7.2-py27_0.tar.bz2']['size'] = 4374752 + index["numpy-1.7.1-py27_0.tar.bz2"]['size'] = 5994338 + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be downloaded: + + package | build + ---------------------------|----------------- + sympy-0.7.2 | py27_0 4.2 MB + numpy-1.7.1 | py27_0 5.7 MB + ------------------------------------------------------------ + Total: 9.9 MB + +""" + + actions = defaultdict(list, {'PREFIX': + '/Users/aaronmeurer/anaconda/envs/test', 'SYMLINK_CONDA': + ['/Users/aaronmeurer/anaconda'], 'LINK': ['python-3.3.2-0', 'readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 1', 'sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 1', 'tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 1', 'zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 1']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following NEW packages will be INSTALLED: + + python: 3.3.2-0 \n\ + readline: 6.2-0 \n\ + sqlite: 3.7.13-0 + tk: 8.5.13-0 + zlib: 1.2.7-0 \n\ + +""" + + actions['UNLINK'] = actions['LINK'] + actions['LINK'] = [] + + with captured() as c: + display_actions(actions, index) + + + assert c.stdout == """ +The following packages will be REMOVED: + + python: 3.3.2-0 \n\ + readline: 6.2-0 \n\ + sqlite: 3.7.13-0 + tk: 8.5.13-0 + zlib: 1.2.7-0 \n\ + +""" + + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0'], 'UNLINK': + ['cython-0.19-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + cython: 0.19-py33_0 --> 0.19.1-py33_0 + +""" + + actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK'] + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + cython: 0.19.1-py33_0 --> 0.19-py33_0 + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0', + 'dateutil-1.5-py33_0', 'numpy-1.7.1-py33_0'], 'UNLINK': + ['cython-0.19-py33_0', 'dateutil-2.1-py33_1', 'pip-1.3.1-py33_1']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following NEW packages will be INSTALLED: + + numpy: 1.7.1-py33_0 \n\ + +The following packages will be REMOVED: + + pip: 1.3.1-py33_1 + +The following packages will be UPDATED: + + cython: 0.19-py33_0 --> 0.19.1-py33_0 + +The following packages will be DOWNGRADED: + + dateutil: 2.1-py33_1 --> 1.5-py33_0 \n\ + +""" + + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0', + 'dateutil-2.1-py33_1'], 'UNLINK': ['cython-0.19-py33_0', + 'dateutil-1.5-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + cython: 0.19-py33_0 --> 0.19.1-py33_0 + dateutil: 1.5-py33_0 --> 2.1-py33_1 \n\ + +""" + + actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK'] + + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + cython: 0.19.1-py33_0 --> 0.19-py33_0 + dateutil: 2.1-py33_1 --> 1.5-py33_0 \n\ + +""" + + + +def test_display_actions_show_channel_urls(): + conda.config.show_channel_urls = True + actions = defaultdict(list, {"FETCH": ['sympy-0.7.2-py27_0', + "numpy-1.7.1-py27_0"]}) + # The older test index doesn't have the size metadata + index['sympy-0.7.2-py27_0.tar.bz2']['size'] = 4374752 + index["numpy-1.7.1-py27_0.tar.bz2"]['size'] = 5994338 + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be downloaded: + + package | build + ---------------------------|----------------- + sympy-0.7.2 | py27_0 4.2 MB <unknown> + numpy-1.7.1 | py27_0 5.7 MB <unknown> + ------------------------------------------------------------ + Total: 9.9 MB + +""" + + + actions = defaultdict(list, {'PREFIX': + '/Users/aaronmeurer/anaconda/envs/test', 'SYMLINK_CONDA': + ['/Users/aaronmeurer/anaconda'], 'LINK': ['python-3.3.2-0', 'readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 1', 'sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 1', 'tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 1', 'zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 1']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following NEW packages will be INSTALLED: + + python: 3.3.2-0 <unknown> + readline: 6.2-0 <unknown> + sqlite: 3.7.13-0 <unknown> + tk: 8.5.13-0 <unknown> + zlib: 1.2.7-0 <unknown> + +""" + + actions['UNLINK'] = actions['LINK'] + actions['LINK'] = [] + + with captured() as c: + display_actions(actions, index) + + + assert c.stdout == """ +The following packages will be REMOVED: + + python: 3.3.2-0 <unknown> + readline: 6.2-0 <unknown> + sqlite: 3.7.13-0 <unknown> + tk: 8.5.13-0 <unknown> + zlib: 1.2.7-0 <unknown> + +""" + + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0'], 'UNLINK': + ['cython-0.19-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + cython: 0.19-py33_0 <unknown> --> 0.19.1-py33_0 <unknown> + +""" + + actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK'] + + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + cython: 0.19.1-py33_0 <unknown> --> 0.19-py33_0 <unknown> + +""" + + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0', + 'dateutil-1.5-py33_0', 'numpy-1.7.1-py33_0'], 'UNLINK': + ['cython-0.19-py33_0', 'dateutil-2.1-py33_1', 'pip-1.3.1-py33_1']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following NEW packages will be INSTALLED: + + numpy: 1.7.1-py33_0 <unknown> + +The following packages will be REMOVED: + + pip: 1.3.1-py33_1 <unknown> + +The following packages will be UPDATED: + + cython: 0.19-py33_0 <unknown> --> 0.19.1-py33_0 <unknown> + +The following packages will be DOWNGRADED: + + dateutil: 2.1-py33_1 <unknown> --> 1.5-py33_0 <unknown> + +""" + + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0', + 'dateutil-2.1-py33_1'], 'UNLINK': ['cython-0.19-py33_0', + 'dateutil-1.5-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + cython: 0.19-py33_0 <unknown> --> 0.19.1-py33_0 <unknown> + dateutil: 1.5-py33_0 <unknown> --> 2.1-py33_1 <unknown> + +""" + + actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK'] + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + cython: 0.19.1-py33_0 <unknown> --> 0.19-py33_0 <unknown> + dateutil: 2.1-py33_1 <unknown> --> 1.5-py33_0 <unknown> + +""" + + actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK'] + + index['cython-0.19.1-py33_0.tar.bz2']['channel'] = 'my_channel' + index['dateutil-1.5-py33_0.tar.bz2']['channel'] = 'my_channel' + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + cython: 0.19-py33_0 <unknown> --> 0.19.1-py33_0 my_channel + dateutil: 1.5-py33_0 my_channel --> 2.1-py33_1 <unknown> \n\ + +""" + + actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK'] + + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + cython: 0.19.1-py33_0 my_channel --> 0.19-py33_0 <unknown> \n\ + dateutil: 2.1-py33_1 <unknown> --> 1.5-py33_0 my_channel + +""" + + +def test_display_actions_link_type(): + conda.config.show_channel_urls = False + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 2', 'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 2', + 'numpy-1.7.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 2', 'python-3.3.2-0 /Users/aaronmeurer/anaconda/pkgs 2', 'readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 2', 'sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 2', 'tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 2', 'zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 2']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following NEW packages will be INSTALLED: + + cython: 0.19.1-py33_0 (soft-link) + dateutil: 1.5-py33_0 (soft-link) + numpy: 1.7.1-py33_0 (soft-link) + python: 3.3.2-0 (soft-link) + readline: 6.2-0 (soft-link) + sqlite: 3.7.13-0 (soft-link) + tk: 8.5.13-0 (soft-link) + zlib: 1.2.7-0 (soft-link) + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 2', + 'dateutil-2.1-py33_1 /Users/aaronmeurer/anaconda/pkgs 2'], 'UNLINK': ['cython-0.19-py33_0', + 'dateutil-1.5-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + cython: 0.19-py33_0 --> 0.19.1-py33_0 (soft-link) + dateutil: 1.5-py33_0 --> 2.1-py33_1 (soft-link) + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19-py33_0 /Users/aaronmeurer/anaconda/pkgs 2', + 'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 2'], 'UNLINK': ['cython-0.19.1-py33_0', + 'dateutil-2.1-py33_1']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + cython: 0.19.1-py33_0 --> 0.19-py33_0 (soft-link) + dateutil: 2.1-py33_1 --> 1.5-py33_0 (soft-link) + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 1', 'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 1', + 'numpy-1.7.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 1', 'python-3.3.2-0 /Users/aaronmeurer/anaconda/pkgs 1', 'readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 1', 'sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 1', 'tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 1', 'zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 1']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following NEW packages will be INSTALLED: + + cython: 0.19.1-py33_0 + dateutil: 1.5-py33_0 \n\ + numpy: 1.7.1-py33_0 \n\ + python: 3.3.2-0 \n\ + readline: 6.2-0 \n\ + sqlite: 3.7.13-0 \n\ + tk: 8.5.13-0 \n\ + zlib: 1.2.7-0 \n\ + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 1', + 'dateutil-2.1-py33_1 /Users/aaronmeurer/anaconda/pkgs 1'], 'UNLINK': ['cython-0.19-py33_0', + 'dateutil-1.5-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + cython: 0.19-py33_0 --> 0.19.1-py33_0 + dateutil: 1.5-py33_0 --> 2.1-py33_1 \n\ + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19-py33_0 /Users/aaronmeurer/anaconda/pkgs 1', + 'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 1'], 'UNLINK': ['cython-0.19.1-py33_0', + 'dateutil-2.1-py33_1']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + cython: 0.19.1-py33_0 --> 0.19-py33_0 + dateutil: 2.1-py33_1 --> 1.5-py33_0 \n\ + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', 'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', + 'numpy-1.7.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', 'python-3.3.2-0 /Users/aaronmeurer/anaconda/pkgs 3', 'readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 3', 'sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 3', 'tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 3', 'zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 3']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following NEW packages will be INSTALLED: + + cython: 0.19.1-py33_0 (copy) + dateutil: 1.5-py33_0 (copy) + numpy: 1.7.1-py33_0 (copy) + python: 3.3.2-0 (copy) + readline: 6.2-0 (copy) + sqlite: 3.7.13-0 (copy) + tk: 8.5.13-0 (copy) + zlib: 1.2.7-0 (copy) + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', + 'dateutil-2.1-py33_1 /Users/aaronmeurer/anaconda/pkgs 3'], 'UNLINK': ['cython-0.19-py33_0', + 'dateutil-1.5-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + cython: 0.19-py33_0 --> 0.19.1-py33_0 (copy) + dateutil: 1.5-py33_0 --> 2.1-py33_1 (copy) + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', + 'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 3'], 'UNLINK': ['cython-0.19.1-py33_0', + 'dateutil-2.1-py33_1']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + cython: 0.19.1-py33_0 --> 0.19-py33_0 (copy) + dateutil: 2.1-py33_1 --> 1.5-py33_0 (copy) + +""" + + conda.config.show_channel_urls = True + + index['cython-0.19.1-py33_0.tar.bz2']['channel'] = 'my_channel' + index['dateutil-1.5-py33_0.tar.bz2']['channel'] = 'my_channel' + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', 'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', + 'numpy-1.7.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', 'python-3.3.2-0 /Users/aaronmeurer/anaconda/pkgs 3', 'readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 3', 'sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 3', 'tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 3', 'zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 3']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following NEW packages will be INSTALLED: + + cython: 0.19.1-py33_0 my_channel (copy) + dateutil: 1.5-py33_0 my_channel (copy) + numpy: 1.7.1-py33_0 <unknown> (copy) + python: 3.3.2-0 <unknown> (copy) + readline: 6.2-0 <unknown> (copy) + sqlite: 3.7.13-0 <unknown> (copy) + tk: 8.5.13-0 <unknown> (copy) + zlib: 1.2.7-0 <unknown> (copy) + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', + 'dateutil-2.1-py33_1 /Users/aaronmeurer/anaconda/pkgs 3'], 'UNLINK': ['cython-0.19-py33_0', + 'dateutil-1.5-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + cython: 0.19-py33_0 <unknown> --> 0.19.1-py33_0 my_channel (copy) + dateutil: 1.5-py33_0 my_channel --> 2.1-py33_1 <unknown> (copy) + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', + 'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 3'], 'UNLINK': ['cython-0.19.1-py33_0', + 'dateutil-2.1-py33_1']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + cython: 0.19.1-py33_0 my_channel --> 0.19-py33_0 <unknown> (copy) + dateutil: 2.1-py33_1 <unknown> --> 1.5-py33_0 my_channel (copy) + +""" + +def test_display_actions_features(): + conda.config.show_channel_urls = False + + actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0', 'cython-0.19-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following NEW packages will be INSTALLED: + + cython: 0.19-py33_0 \n\ + numpy: 1.7.1-py33_p0 [mkl] + +""" + + actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0', 'cython-0.19-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be REMOVED: + + cython: 0.19-py33_0 \n\ + numpy: 1.7.1-py33_p0 [mkl] + +""" + + actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0'], 'LINK': ['numpy-1.7.0-py33_p0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + numpy: 1.7.1-py33_p0 [mkl] --> 1.7.0-py33_p0 [mkl] + +""" + + actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0'], 'UNLINK': ['numpy-1.7.0-py33_p0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + numpy: 1.7.0-py33_p0 [mkl] --> 1.7.1-py33_p0 [mkl] + +""" + + actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0'], 'UNLINK': ['numpy-1.7.1-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + # NB: Packages whose version do not changed are put in UPDATED + assert c.stdout == """ +The following packages will be UPDATED: + + numpy: 1.7.1-py33_0 --> 1.7.1-py33_p0 [mkl] + +""" + + actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0'], 'LINK': ['numpy-1.7.1-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + numpy: 1.7.1-py33_p0 [mkl] --> 1.7.1-py33_0 + +""" + + conda.config.show_channel_urls = True + + actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0', 'cython-0.19-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following NEW packages will be INSTALLED: + + cython: 0.19-py33_0 <unknown> + numpy: 1.7.1-py33_p0 <unknown> [mkl] + +""" + + + actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0', 'cython-0.19-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be REMOVED: + + cython: 0.19-py33_0 <unknown> + numpy: 1.7.1-py33_p0 <unknown> [mkl] + +""" + + actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0'], 'LINK': ['numpy-1.7.0-py33_p0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + numpy: 1.7.1-py33_p0 <unknown> [mkl] --> 1.7.0-py33_p0 <unknown> [mkl] + +""" + + actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0'], 'UNLINK': ['numpy-1.7.0-py33_p0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + numpy: 1.7.0-py33_p0 <unknown> [mkl] --> 1.7.1-py33_p0 <unknown> [mkl] + +""" + + + actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0'], 'UNLINK': ['numpy-1.7.1-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + # NB: Packages whose version do not changed are put in UPDATED + assert c.stdout == """ +The following packages will be UPDATED: + + numpy: 1.7.1-py33_0 <unknown> --> 1.7.1-py33_p0 <unknown> [mkl] + +""" + + actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0'], 'LINK': ['numpy-1.7.1-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + numpy: 1.7.1-py33_p0 <unknown> [mkl] --> 1.7.1-py33_0 <unknown> + +"""
Make the conda install table easier to read The table of what packages will be installed and removed is hard to read. For one thing, it's hard to tell easily what packages are not removed but just upgraded or downgraded. Also, the "link" terminology is confusing. A suggestion by @jklowden: ``` $ conda update conda Updating Anaconda environment at /usr/local/anaconda The following packages will be downloaded: conda-2.2.3-py27_0.tar.bz2 [http://repo.continuum.io/pkgs/free/osx-64/] The following packages will be upgraded: Old version Replace with ------------------------- ------------------------- conda-1.4.4 conda-2.2.3 ``` > or, if you really want the build (I don't, it's not meaningful to the user) ``` package Old version New version ------------ ------------------ ------------------ conda 1.4.4, py27_0 2.2.3, py27_0 ``` I think the build is meaningful as it tells you what Python version is being used. It also tells you if you are using mkl. And also some people might use the build string to put other information which may be useful to users. <!--- @huboard:{"order":3.3142282405143226e-49,"custom_state":""} -->
2014-04-11T20:19:51
-1.0
conda/conda
667
conda__conda-667
[ "666" ]
f2934aea3f32ac94907b742a800d82c1e08757fe
diff --git a/conda/resolve.py b/conda/resolve.py --- a/conda/resolve.py +++ b/conda/resolve.py @@ -278,12 +278,25 @@ def all_deps(self, root_fn, max_only=False): def add_dependents(fn1, max_only=False): for ms in self.ms_depends(fn1): + found = False + notfound = [] for pkg2 in self.get_pkgs(ms, max_only=max_only): if pkg2.fn in res: + found = True continue - res[pkg2.fn] = pkg2 - if ms.strictness < 3: - add_dependents(pkg2.fn, max_only=max_only) + try: + if ms.strictness < 3: + add_dependents(pkg2.fn, max_only=max_only) + except NoPackagesFound as e: + if e.pkg not in notfound: + notfound.append(e.pkg) + else: + found = True + res[pkg2.fn] = pkg2 + + if not found: + raise NoPackagesFound("Could not find some dependencies " + "for %s: %s" % (ms, ', '.join(notfound)), str(ms)) add_dependents(root_fn, max_only=max_only) return res @@ -394,7 +407,7 @@ def get_dists(self, specs, max_only=False): dists[pkg.fn] = pkg found = True if not found: - raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), None) + raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), spec) return dists
diff --git a/tests/test_resolve.py b/tests/test_resolve.py --- a/tests/test_resolve.py +++ b/tests/test_resolve.py @@ -696,6 +696,22 @@ def test_nonexistent_deps(): 'requires': ['nose 1.2.1', 'python 3.3'], 'version': '1.1', } + index2['anotherpackage-1.0-py33_0.tar.bz2'] = { + 'build': 'py33_0', + 'build_number': 0, + 'depends': ['nose', 'mypackage 1.1'], + 'name': 'anotherpackage', + 'requires': ['nose', 'mypackage 1.1'], + 'version': '1.0', + } + index2['anotherpackage-2.0-py33_0.tar.bz2'] = { + 'build': 'py33_0', + 'build_number': 0, + 'depends': ['nose', 'mypackage'], + 'name': 'anotherpackage', + 'requires': ['nose', 'mypackage'], + 'version': '2.0', + } r = Resolve(index2) assert set(r.find_matches(MatchSpec('mypackage'))) == { @@ -772,6 +788,32 @@ def test_nonexistent_deps(): ] assert raises(NoPackagesFound, lambda: r.solve(['mypackage 1.0'])) + assert r.solve(['anotherpackage 1.0']) == [ + 'anotherpackage-1.0-py33_0.tar.bz2', + 'mypackage-1.1-py33_0.tar.bz2', + 'nose-1.3.0-py33_0.tar.bz2', + 'openssl-1.0.1c-0.tar.bz2', + 'python-3.3.2-0.tar.bz2', + 'readline-6.2-0.tar.bz2', + 'sqlite-3.7.13-0.tar.bz2', + 'system-5.8-1.tar.bz2', + 'tk-8.5.13-0.tar.bz2', + 'zlib-1.2.7-0.tar.bz2', + ] + + assert r.solve(['anotherpackage']) == [ + 'anotherpackage-2.0-py33_0.tar.bz2', + 'mypackage-1.1-py33_0.tar.bz2', + 'nose-1.3.0-py33_0.tar.bz2', + 'openssl-1.0.1c-0.tar.bz2', + 'python-3.3.2-0.tar.bz2', + 'readline-6.2-0.tar.bz2', + 'sqlite-3.7.13-0.tar.bz2', + 'system-5.8-1.tar.bz2', + 'tk-8.5.13-0.tar.bz2', + 'zlib-1.2.7-0.tar.bz2', + ] + # This time, the latest version is messed up index3 = index.copy() index3['mypackage-1.1-py33_0.tar.bz2'] = { @@ -790,6 +832,22 @@ def test_nonexistent_deps(): 'requires': ['nose 1.2.1', 'python 3.3'], 'version': '1.0', } + index3['anotherpackage-1.0-py33_0.tar.bz2'] = { + 'build': 'py33_0', + 'build_number': 0, + 'depends': ['nose', 'mypackage 1.0'], + 'name': 'anotherpackage', + 'requires': ['nose', 'mypackage 1.0'], + 'version': '1.0', + } + index3['anotherpackage-2.0-py33_0.tar.bz2'] = { + 'build': 'py33_0', + 'build_number': 0, + 'depends': ['nose', 'mypackage'], + 'name': 'anotherpackage', + 'requires': ['nose', 'mypackage'], + 'version': '2.0', + } r = Resolve(index3) assert set(r.find_matches(MatchSpec('mypackage'))) == { @@ -852,6 +910,35 @@ def test_nonexistent_deps(): ] assert raises(NoPackagesFound, lambda: r.solve(['mypackage 1.1'])) + + assert r.solve(['anotherpackage 1.0']) == [ + 'anotherpackage-1.0-py33_0.tar.bz2', + 'mypackage-1.0-py33_0.tar.bz2', + 'nose-1.3.0-py33_0.tar.bz2', + 'openssl-1.0.1c-0.tar.bz2', + 'python-3.3.2-0.tar.bz2', + 'readline-6.2-0.tar.bz2', + 'sqlite-3.7.13-0.tar.bz2', + 'system-5.8-1.tar.bz2', + 'tk-8.5.13-0.tar.bz2', + 'zlib-1.2.7-0.tar.bz2', + ] + + # If recursive checking is working correctly, this will give + # anotherpackage 2.0, not anotherpackage 1.0 + assert r.solve(['anotherpackage']) == [ + 'anotherpackage-2.0-py33_0.tar.bz2', + 'mypackage-1.0-py33_0.tar.bz2', + 'nose-1.3.0-py33_0.tar.bz2', + 'openssl-1.0.1c-0.tar.bz2', + 'python-3.3.2-0.tar.bz2', + 'readline-6.2-0.tar.bz2', + 'sqlite-3.7.13-0.tar.bz2', + 'system-5.8-1.tar.bz2', + 'tk-8.5.13-0.tar.bz2', + 'zlib-1.2.7-0.tar.bz2', + ] + def test_package_ordering(): sympy_071 = Package('sympy-0.7.1-py27_0.tar.bz2', r.index['sympy-0.7.1-py27_0.tar.bz2']) sympy_072 = Package('sympy-0.7.2-py27_0.tar.bz2', r.index['sympy-0.7.2-py27_0.tar.bz2'])
NoPackagesFound does not work correctly for missing recursive dependencies
2014-04-14T22:05:18
-1.0
conda/conda
1,807
conda__conda-1807
[ "1751" ]
f46c73c3cb6353a409449fdba08fdbd0856bdb35
diff --git a/conda/cli/main_clean.py b/conda/cli/main_clean.py --- a/conda/cli/main_clean.py +++ b/conda/cli/main_clean.py @@ -151,9 +151,13 @@ def rm_tarballs(args, pkgs_dirs, totalsize, verbose=True): for pkgs_dir in pkgs_dirs: for fn in pkgs_dirs[pkgs_dir]: - if verbose: - print("removing %s" % fn) - os.unlink(os.path.join(pkgs_dir, fn)) + if os.access(os.path.join(pkgs_dir, fn), os.W_OK): + if verbose: + print("Removing %s" % fn) + os.unlink(os.path.join(pkgs_dir, fn)) + else: + if verbose: + print("WARNING: cannot remove, file permissions: %s" % fn) def find_pkgs(): diff --git a/conda/install.py b/conda/install.py --- a/conda/install.py +++ b/conda/install.py @@ -138,6 +138,13 @@ def _remove_readonly(func, path, excinfo): os.chmod(path, stat.S_IWRITE) func(path) +def warn_failed_remove(function, path, exc_info): + if exc_info[1].errno == errno.EACCES: + log.warn("Cannot remove, permission denied: {0}".format(path)) + elif exc_info[1].errno == errno.ENOTEMPTY: + log.warn("Cannot remove, not empty: {0}".format(path)) + else: + log.warn("Cannot remove, unknown reason: {0}".format(path)) def rm_rf(path, max_retries=5, trash=True): """ @@ -152,12 +159,15 @@ def rm_rf(path, max_retries=5, trash=True): # Note that we have to check if the destination is a link because # exists('/path/to/dead-link') will return False, although # islink('/path/to/dead-link') is True. - os.unlink(path) + if os.access(path, os.W_OK): + os.unlink(path) + else: + log.warn("Cannot remove, permission denied: {0}".format(path)) elif isdir(path): for i in range(max_retries): try: - shutil.rmtree(path) + shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove) return except OSError as e: msg = "Unable to delete %s\n%s\n" % (path, e) @@ -189,7 +199,7 @@ def rm_rf(path, max_retries=5, trash=True): log.debug(msg + "Retrying after %s seconds..." % i) time.sleep(i) # Final time. pass exceptions to caller. - shutil.rmtree(path) + shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove) def rm_empty_dir(path): """
diff --git a/tests/test_install.py b/tests/test_install.py --- a/tests/test_install.py +++ b/tests/test_install.py @@ -8,7 +8,7 @@ from conda import install -from conda.install import PaddingError, binary_replace, update_prefix +from conda.install import PaddingError, binary_replace, update_prefix, warn_failed_remove from .decorators import skip_if_no_mock from .helpers import mock @@ -140,6 +140,11 @@ def generate_mock_isfile(self, value): with patch.object(install, 'isfile', return_value=value) as isfile: yield isfile + @contextmanager + def generate_mock_os_access(self, value): + with patch.object(install.os, 'access', return_value=value) as os_access: + yield os_access + @contextmanager def generate_mock_unlink(self): with patch.object(install.os, 'unlink') as unlink: @@ -173,25 +178,27 @@ def generate_mock_check_call(self): yield check_call @contextmanager - def generate_mocks(self, islink=True, isfile=True, isdir=True, on_win=False): + def generate_mocks(self, islink=True, isfile=True, isdir=True, on_win=False, os_access=True): with self.generate_mock_islink(islink) as mock_islink: with self.generate_mock_isfile(isfile) as mock_isfile: - with self.generate_mock_isdir(isdir) as mock_isdir: - with self.generate_mock_unlink() as mock_unlink: - with self.generate_mock_rmtree() as mock_rmtree: - with self.generate_mock_sleep() as mock_sleep: - with self.generate_mock_log() as mock_log: - with self.generate_mock_on_win(on_win): - with self.generate_mock_check_call() as check_call: - yield { - 'islink': mock_islink, - 'isfile': mock_isfile, - 'isdir': mock_isdir, - 'unlink': mock_unlink, - 'rmtree': mock_rmtree, - 'sleep': mock_sleep, - 'log': mock_log, - 'check_call': check_call, + with self.generate_mock_os_access(os_access) as mock_os_access: + with self.generate_mock_isdir(isdir) as mock_isdir: + with self.generate_mock_unlink() as mock_unlink: + with self.generate_mock_rmtree() as mock_rmtree: + with self.generate_mock_sleep() as mock_sleep: + with self.generate_mock_log() as mock_log: + with self.generate_mock_on_win(on_win): + with self.generate_mock_check_call() as check_call: + yield { + 'islink': mock_islink, + 'isfile': mock_isfile, + 'isdir': mock_isdir, + 'os_access': mock_os_access, + 'unlink': mock_unlink, + 'rmtree': mock_rmtree, + 'sleep': mock_sleep, + 'log': mock_log, + 'check_call': check_call, } def generate_directory_mocks(self, on_win=False): @@ -219,6 +226,13 @@ def test_calls_unlink_on_true_islink(self): install.rm_rf(some_path) mocks['unlink'].assert_called_with(some_path) + @skip_if_no_mock + def test_does_not_call_unlink_on_os_access_false(self): + with self.generate_mocks(os_access=False) as mocks: + some_path = self.generate_random_path + install.rm_rf(some_path) + self.assertFalse(mocks['unlink'].called) + @skip_if_no_mock def test_does_not_call_isfile_if_islink_is_true(self): with self.generate_mocks() as mocks: @@ -259,7 +273,8 @@ def test_calls_rmtree_at_least_once_on_isdir_true(self): with self.generate_directory_mocks() as mocks: some_path = self.generate_random_path install.rm_rf(some_path) - mocks['rmtree'].assert_called_with(some_path) + mocks['rmtree'].assert_called_with( + some_path, onerror=warn_failed_remove, ignore_errors=False) @skip_if_no_mock def test_calls_rmtree_only_once_on_success(self): @@ -342,7 +357,7 @@ def test_tries_extra_kwarg_on_windows(self): install.rm_rf(random_path) expected_call_list = [ - mock.call(random_path), + mock.call(random_path, ignore_errors=False, onerror=warn_failed_remove), mock.call(random_path, onerror=install._remove_readonly) ] mocks['rmtree'].assert_has_calls(expected_call_list)
conda clean -pt as non-root user with root anaconda install I have installed root miniconda at /opt/anaconda. When running ``` conda clean -pt ``` as a lesser user than root, I am seeing errors indicating conda is not checking permissions before attempting to delete package dirs: ``` conda clean -pt Cache location: /opt/anaconda/pkgs Will remove the following tarballs: /opt/anaconda/pkgs ------------------ conda-3.18.3-py27_0.tar.bz2 175 KB conda-env-2.4.4-py27_0.tar.bz2 24 KB itsdangerous-0.24-py27_0.tar.bz2 16 KB markupsafe-0.23-py27_0.tar.bz2 30 KB flask-0.10.1-py27_1.tar.bz2 129 KB jinja2-2.8-py27_0.tar.bz2 263 KB anaconda-build-0.12.0-py27_0.tar.bz2 69 KB flask-wtf-0.8.4-py27_1.tar.bz2 12 KB flask-ldap-login-0.3.0-py27_1.tar.bz2 13 KB --------------------------------------------------- Total: 730 KB removing conda-3.18.3-py27_0.tar.bz2 An unexpected error has occurred, please consider sending the following traceback to the conda GitHub issue tracker at: https://github.com/conda/conda/issues Include the output of the command 'conda info' in your report. Traceback (most recent call last): File "/opt/anaconda/envs/anaconda.org/bin/conda", line 5, in <module> sys.exit(main()) File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 195, in main args_func(args, p) File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 202, in args_func args.func(args, p) File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main_clean.py", line 331, in execute rm_tarballs(args, pkgs_dirs, totalsize, verbose=not args.json) File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main_clean.py", line 156, in rm_tarballs os.unlink(os.path.join(pkgs_dir, fn)) OSError: [Errno 13] Permission denied: '/opt/anaconda/pkgs/conda-3.18.3-py27_0.tar.bz2' ```
Hi @csoja - this issue is blocking some key Build System stability fixes. LMK if this can be prioritized (along with #1752 above) @stephenakearns and @PeterDSteinberg and @csoja this issue is also now preventing us from moving forward with the anaconda-cluster build scripts. @csoja - I know you're strapped for resources and may have a new person taking a look at this. Can we bring it up in the platform meeting to discuss a possible solution
2015-11-11T20:20:17
-1.0
conda/conda
1,808
conda__conda-1808
[ "1752" ]
f46c73c3cb6353a409449fdba08fdbd0856bdb35
diff --git a/conda/cli/main_clean.py b/conda/cli/main_clean.py --- a/conda/cli/main_clean.py +++ b/conda/cli/main_clean.py @@ -151,9 +151,13 @@ def rm_tarballs(args, pkgs_dirs, totalsize, verbose=True): for pkgs_dir in pkgs_dirs: for fn in pkgs_dirs[pkgs_dir]: - if verbose: - print("removing %s" % fn) - os.unlink(os.path.join(pkgs_dir, fn)) + if os.access(os.path.join(pkgs_dir, fn), os.W_OK): + if verbose: + print("Removing %s" % fn) + os.unlink(os.path.join(pkgs_dir, fn)) + else: + if verbose: + print("WARNING: cannot remove, file permissions: %s" % fn) def find_pkgs(): @@ -163,6 +167,9 @@ def find_pkgs(): pkgs_dirs = defaultdict(list) for pkgs_dir in config.pkgs_dirs: + if not os.path.exists(pkgs_dir): + print("WARNING: {0} does not exist".format(pkgs_dir)) + continue pkgs = [i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and # Only include actual packages isdir(join(pkgs_dir, i, 'info'))] diff --git a/conda/install.py b/conda/install.py --- a/conda/install.py +++ b/conda/install.py @@ -138,6 +138,13 @@ def _remove_readonly(func, path, excinfo): os.chmod(path, stat.S_IWRITE) func(path) +def warn_failed_remove(function, path, exc_info): + if exc_info[1].errno == errno.EACCES: + log.warn( "WARNING: cannot remove, permission denied: %s" % path ) + elif exc_info[1].errno == errno.ENOTEMPTY: + log.warn( "WARNING: cannot remove, not empty: %s" % path ) + else: + log.warn( "WARNING: cannot remove, unknown reason: %s" % path ) def rm_rf(path, max_retries=5, trash=True): """ @@ -152,12 +159,15 @@ def rm_rf(path, max_retries=5, trash=True): # Note that we have to check if the destination is a link because # exists('/path/to/dead-link') will return False, although # islink('/path/to/dead-link') is True. - os.unlink(path) + if os.access(path, os.W_OK): + os.unlink(path) + else: + log.warn("WARNING: cannot remove, permission denied: %s" % path) elif isdir(path): for i in range(max_retries): try: - shutil.rmtree(path) + shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove) return except OSError as e: msg = "Unable to delete %s\n%s\n" % (path, e) @@ -189,7 +199,7 @@ def rm_rf(path, max_retries=5, trash=True): log.debug(msg + "Retrying after %s seconds..." % i) time.sleep(i) # Final time. pass exceptions to caller. - shutil.rmtree(path) + shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove) def rm_empty_dir(path): """
diff --git a/tests/test_install.py b/tests/test_install.py --- a/tests/test_install.py +++ b/tests/test_install.py @@ -8,7 +8,7 @@ from conda import install -from conda.install import PaddingError, binary_replace, update_prefix +from conda.install import PaddingError, binary_replace, update_prefix, warn_failed_remove from .decorators import skip_if_no_mock from .helpers import mock @@ -140,6 +140,11 @@ def generate_mock_isfile(self, value): with patch.object(install, 'isfile', return_value=value) as isfile: yield isfile + @contextmanager + def generate_mock_os_access(self, value): + with patch.object(install.os, 'access', return_value=value) as os_access: + yield os_access + @contextmanager def generate_mock_unlink(self): with patch.object(install.os, 'unlink') as unlink: @@ -173,25 +178,27 @@ def generate_mock_check_call(self): yield check_call @contextmanager - def generate_mocks(self, islink=True, isfile=True, isdir=True, on_win=False): + def generate_mocks(self, islink=True, isfile=True, isdir=True, on_win=False, os_access=True): with self.generate_mock_islink(islink) as mock_islink: with self.generate_mock_isfile(isfile) as mock_isfile: - with self.generate_mock_isdir(isdir) as mock_isdir: - with self.generate_mock_unlink() as mock_unlink: - with self.generate_mock_rmtree() as mock_rmtree: - with self.generate_mock_sleep() as mock_sleep: - with self.generate_mock_log() as mock_log: - with self.generate_mock_on_win(on_win): - with self.generate_mock_check_call() as check_call: - yield { - 'islink': mock_islink, - 'isfile': mock_isfile, - 'isdir': mock_isdir, - 'unlink': mock_unlink, - 'rmtree': mock_rmtree, - 'sleep': mock_sleep, - 'log': mock_log, - 'check_call': check_call, + with self.generate_mock_os_access(os_access) as mock_os_access: + with self.generate_mock_isdir(isdir) as mock_isdir: + with self.generate_mock_unlink() as mock_unlink: + with self.generate_mock_rmtree() as mock_rmtree: + with self.generate_mock_sleep() as mock_sleep: + with self.generate_mock_log() as mock_log: + with self.generate_mock_on_win(on_win): + with self.generate_mock_check_call() as check_call: + yield { + 'islink': mock_islink, + 'isfile': mock_isfile, + 'isdir': mock_isdir, + 'os_access': mock_os_access, + 'unlink': mock_unlink, + 'rmtree': mock_rmtree, + 'sleep': mock_sleep, + 'log': mock_log, + 'check_call': check_call, } def generate_directory_mocks(self, on_win=False): @@ -219,6 +226,13 @@ def test_calls_unlink_on_true_islink(self): install.rm_rf(some_path) mocks['unlink'].assert_called_with(some_path) + @skip_if_no_mock + def test_does_not_call_unlink_on_os_access_false(self): + with self.generate_mocks(os_access=False) as mocks: + some_path = self.generate_random_path + install.rm_rf(some_path) + self.assertFalse(mocks['unlink'].called) + @skip_if_no_mock def test_does_not_call_isfile_if_islink_is_true(self): with self.generate_mocks() as mocks: @@ -259,7 +273,8 @@ def test_calls_rmtree_at_least_once_on_isdir_true(self): with self.generate_directory_mocks() as mocks: some_path = self.generate_random_path install.rm_rf(some_path) - mocks['rmtree'].assert_called_with(some_path) + mocks['rmtree'].assert_called_with( + some_path, onerror=warn_failed_remove, ignore_errors=False) @skip_if_no_mock def test_calls_rmtree_only_once_on_success(self): @@ -342,7 +357,7 @@ def test_tries_extra_kwarg_on_windows(self): install.rm_rf(random_path) expected_call_list = [ - mock.call(random_path), + mock.call(random_path, ignore_errors=False, onerror=warn_failed_remove), mock.call(random_path, onerror=install._remove_readonly) ] mocks['rmtree'].assert_has_calls(expected_call_list)
conda clean -pt with empty package cache and non-root user I have a root miniconda install at /opt/anaconda. I ran ``` conda clean -pt ``` successfully as root then immediately tried running the same command as a lesser user. I got this error even though the package cache was empty: ``` Cache location: There are no tarballs to remove An unexpected error has occurred, please consider sending the following traceback to the conda GitHub issue tracker at: https://github.com/conda/conda/issues Include the output of the command 'conda info' in your report. Traceback (most recent call last): File "/opt/anaconda/envs/anaconda.org/bin/conda", line 5, in <module> sys.exit(main()) File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 195, in main args_func(args, p) File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 202, in args_func args.func(args, p) File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main_clean.py", line 340, in execute pkgs_dirs, warnings, totalsize, pkgsizes = find_pkgs() File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main_clean.py", line 166, in find_pkgs pkgs = [i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and OSError: [Errno 2] No such file or directory: '/home/user5/envs/.pkgs' ```
Appears somewhat related to #1751. Fixing #1751 and #1752 is necessary for the future needs of anaconda-build.
2015-11-11T21:17:30
-1.0
conda/conda
2,291
conda__conda-2291
[ "2284" ]
c3d54a1bd6a15dcea2ce900b0900c369bb848907
diff --git a/conda/cli/activate.py b/conda/cli/activate.py --- a/conda/cli/activate.py +++ b/conda/cli/activate.py @@ -124,10 +124,13 @@ def main(): if rootpath: path = path.replace(translate_stream(rootpath, win_path_to_cygwin), "") else: - path = translate_stream(path, win_path_to_unix) + if sys.platform == 'win32': + path = translate_stream(path, win_path_to_unix) + if rootpath: + rootpath = translate_stream(rootpath, win_path_to_unix) # Clear the root path if it is present if rootpath: - path = path.replace(translate_stream(rootpath, win_path_to_unix), "") + path = path.replace(rootpath, "") elif sys.argv[1] == '..deactivate': diff --git a/conda/install.py b/conda/install.py --- a/conda/install.py +++ b/conda/install.py @@ -66,11 +66,12 @@ def win_path_to_unix(path, root_prefix=""): Does not add cygdrive. If you need that, set root_prefix to "/cygdrive" """ - import re - path_re = '[a-zA-Z]:[/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;/\\\\]*' - converted_paths = [root_prefix + "/" + _path.replace("\\", "/").replace(":", "") - for _path in re.findall(path_re, path)] - return ":".join(converted_paths) + path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' + translation = lambda found_path: root_prefix + "/" + found_path.groups()[0].replace("\\", "/")\ + .replace(":", "") + translation = re.sub(path_re, translation, path) + translation = translation.replace(";/", ":/") + return translation on_win = bool(sys.platform == "win32") diff --git a/conda/utils.py b/conda/utils.py --- a/conda/utils.py +++ b/conda/utils.py @@ -81,10 +81,12 @@ def win_path_to_unix(path, root_prefix=""): Does not add cygdrive. If you need that, set root_prefix to "/cygdrive" """ - path_re = '(?<![:/])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' + path_re = '(?<![:/^a-zA-Z])([a-zA-Z]:[\/\\\\]+(?:[^:*?"<>|]+[\/\\\\]+)*[^:*?"<>|;\/\\\\]+?(?![a-zA-Z]:))' translation = lambda found_path: root_prefix + "/" + found_path.groups()[0].replace("\\", "/")\ - .replace(":", "").replace(";", ":") - return re.sub(path_re, translation, path) + .replace(":", "") + translation = re.sub(path_re, translation, path) + translation = translation.replace(";/", ":/") + return translation def unix_path_to_win(path, root_prefix=""): @@ -100,7 +102,7 @@ def unix_path_to_win(path, root_prefix=""): translation = lambda found_path: found_path.group(0)[len(root_prefix)+1] + ":" + \ found_path.group(0)[len(root_prefix)+2:].replace("/", "\\") translation = re.sub(path_re, translation, path) - translation = re.sub(":([a-zA-Z]):", lambda match: ";" + match.group(0)[1] + ":", translation) + translation = re.sub(":?([a-zA-Z]):\\\\", lambda match: ";" + match.group(0)[1] + ":\\", translation) return translation
diff --git a/tests/test_activate.py b/tests/test_activate.py --- a/tests/test_activate.py +++ b/tests/test_activate.py @@ -226,12 +226,12 @@ def _format_vars(shell): def test_path_translation(): - test_cygwin_path = "/usr/bin:/cygdrive/z/documents (x86)/code/conda/tests/envskhkzts/test1:/cygdrive/z/documents/code/conda/tests/envskhkzts/test1/cmd" - test_unix_path = "/usr/bin:/z/documents (x86)/code/conda/tests/envskhkzts/test1:/z/documents/code/conda/tests/envskhkzts/test1/cmd" - test_win_path = "z:\\documents (x86)\\code\\conda\\tests\\envskhkzts\\test1;z:\\documents\\code\\conda\\tests\\envskhkzts\\test1\\cmd" + test_cygwin_path = "test dummy text /usr/bin:/cygdrive/z/documents (x86)/code/conda/tests/envskhkzts/test1:/cygdrive/z/documents/code/conda/tests/envskhkzts/test1/cmd more dummy text" + test_unix_path = "test dummy text /usr/bin:/z/documents (x86)/code/conda/tests/envskhkzts/test1:/z/documents/code/conda/tests/envskhkzts/test1/cmd more dummy text" + test_win_path = "test dummy text /usr/bin;z:\\documents (x86)\\code\\conda\\tests\\envskhkzts\\test1;z:\\documents\\code\\conda\\tests\\envskhkzts\\test1\\cmd more dummy text" assert_equals(test_win_path, unix_path_to_win(test_unix_path)) - assert_equals(test_unix_path.replace("/usr/bin:", ""), win_path_to_unix(test_win_path)) - assert_equals(test_cygwin_path.replace("/usr/bin:", ""), win_path_to_cygwin(test_win_path)) + assert_equals(test_unix_path, win_path_to_unix(test_win_path)) + assert_equals(test_cygwin_path, win_path_to_cygwin(test_win_path)) assert_equals(test_win_path, cygwin_path_to_win(test_cygwin_path))
activate on master is broken in OSX @msarahan I'm on master, and I get: ``` $ source activate koo path:usage: conda [-h] [-V] [--debug] command ... conda: error: argument command: invalid choice: '..changeps1' (choose from 'info', 'help', 'list', 'search', 'create', 'install', 'update', 'upgrade', 'remove', 'uninstall', 'run', 'config', 'init', 'clean', 'package', 'bundle') prepending /Users/ilan/python/envs/koo and /Users/ilan/python/envs/koo/cmd and /Users/ilan/python/envs/koo/bin to PATH -bash: awk: command not found -bash: dirname: command not found Traceback (most recent call last): File "/Users/ilan/python/bin/conda", line 4, in <module> from conda.cli.main import main File "/Users/ilan/conda/conda/__init__.py", line 19, in <module> __version__ = get_version(__file__, __name__) File "/Users/ilan/python/lib/python2.7/site-packages/auxlib/packaging.py", line 93, in get_version if is_git_repo(here): File "/Users/ilan/python/lib/python2.7/site-packages/auxlib/packaging.py", line 70, in is_git_repo return call(('git', 'rev-parse'), cwd=path) == 0 File "/Users/ilan/python/lib/python2.7/subprocess.py", line 522, in call return Popen(*popenargs, **kwargs).wait() File "/Users/ilan/python/lib/python2.7/subprocess.py", line 710, in __init__ errread, errwrite) File "/Users/ilan/python/lib/python2.7/subprocess.py", line 1335, in _execute_child raise child_exception ``` It is true that the new activate scripts depend on `awk` and `dirname`?
> the new activate scripts The usage of awk and dirname are not new: https://github.com/conda/conda-env/blame/develop/bin/activate#L32 That's conda-env, of course, but that's where the scripts that are now in conda came from. Something else is going wrong. It might be a bug in the path translation stuff. It is supposed to just be a pass-through on *nix to *nix, but I did see some issues like this in fixing the tests. I'll look into it and see if I can improve the tests to catch whatever is happening here.
2016-03-19T19:56:18
-1.0
conda/conda
2,445
conda__conda-2445
[ "2444" ]
33e1e45b8aee9df5377931619deacfb250be3986
diff --git a/conda/cli/main_info.py b/conda/cli/main_info.py --- a/conda/cli/main_info.py +++ b/conda/cli/main_info.py @@ -148,7 +148,7 @@ def execute(args, parser): import conda.config as config from conda.resolve import Resolve from conda.cli.main_init import is_initialized - from conda.api import get_index, get_package_versions + from conda.api import get_index if args.root: if args.json: @@ -158,21 +158,19 @@ def execute(args, parser): return if args.packages: - if args.json: - results = defaultdict(list) - for arg in args.packages: - for pkg in get_package_versions(arg): - results[arg].append(pkg._asdict()) - common.stdout_json(results) - return index = get_index() r = Resolve(index) - specs = map(common.arg2spec, args.packages) - - for spec in specs: - versions = r.get_pkgs(spec) - for pkg in sorted(versions): - pretty_package(pkg) + if args.json: + common.stdout_json({ + package: [p._asdict() + for p in sorted(r.get_pkgs(common.arg2spec(package)))] + for package in args.packages + }) + else: + for package in args.packages: + versions = r.get_pkgs(common.arg2spec(package)) + for pkg in sorted(versions): + pretty_package(pkg) return options = 'envs', 'system', 'license'
diff --git a/tests/test_info.py b/tests/test_info.py --- a/tests/test_info.py +++ b/tests/test_info.py @@ -1,6 +1,8 @@ from __future__ import print_function, absolute_import, division +import json from conda import config +from conda.cli import main_info from tests.helpers import run_conda_command, assert_in, assert_equals @@ -32,3 +34,21 @@ def test_info(): assert_in(conda_info_out, conda_info_all_out) assert_in(conda_info_e_out, conda_info_all_out) assert_in(conda_info_s_out, conda_info_all_out) + + +def test_info_package_json(): + out, err = run_conda_command("info", "--json", "numpy=1.11.0=py35_0") + assert err == "" + + out = json.loads(out) + assert set(out.keys()) == {"numpy=1.11.0=py35_0"} + assert len(out["numpy=1.11.0=py35_0"]) == 1 + assert isinstance(out["numpy=1.11.0=py35_0"], list) + + out, err = run_conda_command("info", "--json", "numpy") + assert err == "" + + out = json.loads(out) + assert set(out.keys()) == {"numpy"} + assert len(out["numpy"]) > 1 + assert isinstance(out["numpy"], list)
conda info --json and package lookup If you set the `--json` flag for `conda info` when searching for packages, you sometimes get nothing: ``` bash $ conda info numpy=1.11.0=py35_0 Fetching package metadata: .... numpy 1.11.0 py35_0 ------------------- file name : numpy-1.11.0-py35_0.tar.bz2 name : numpy version : 1.11.0 build number: 0 build string: py35_0 channel : defaults size : 6.1 MB date : 2016-03-28 license : BSD md5 : 1900998c19c5e310687013f95374bba2 installed environments: dependencies: mkl 11.3.1 python 3.5* $ conda info --json numpy=1.11.0=py35_0 {} ``` Things work fine for `conda info --json numpy`, so it's something with the spec format. conda info: ``` platform : linux-64 conda version : 4.0.5 conda-build version : not installed python version : 2.7.11.final.0 requests version : 2.9.1 root environment : /opt/conda (writable) default environment : /opt/conda envs directories : /opt/conda/envs package cache : /opt/conda/pkgs channel URLs : https://repo.continuum.io/pkgs/free/linux-64/ https://repo.continuum.io/pkgs/free/noarch/ https://repo.continuum.io/pkgs/pro/linux-64/ https://repo.continuum.io/pkgs/pro/noarch/ config file : None is foreign system : False ```
2016-05-08T06:59:02
-1.0
conda/conda
2,529
conda__conda-2529
[ "2527" ]
ff8d814b6a8b075e8a2c4bab5b23691b21d0e902
diff --git a/conda/install.py b/conda/install.py --- a/conda/install.py +++ b/conda/install.py @@ -85,10 +85,9 @@ def remove_binstar_tokens(url): # A simpler version of url_channel will do def url_channel(url): - return None, 'defaults' + return url.rsplit('/', 2)[0] + '/' if url and '/' in url else None, 'defaults' - # We don't use the package cache or trash logic in the installer - pkgs_dirs = [] + pkgs_dirs = [join(sys.prefix, 'pkgs')] on_win = bool(sys.platform == "win32") @@ -423,13 +422,14 @@ def create_meta(prefix, dist, info_dir, extra_info): meta = json.load(fi) # add extra info, add to our intenral cache meta.update(extra_info) + if 'url' not in meta: + meta['url'] = read_url(dist) # write into <env>/conda-meta/<dist>.json meta_dir = join(prefix, 'conda-meta') if not isdir(meta_dir): os.makedirs(meta_dir) with open(join(meta_dir, _dist2filename(dist, '.json')), 'w') as fo: json.dump(meta, fo, indent=2, sort_keys=True) - # only update the package cache if it is loaded for this prefix. if prefix in linked_data_: load_linked_data(prefix, dist, meta) @@ -821,12 +821,15 @@ def load_linked_data(prefix, dist, rec=None): rec = json.load(fi) except IOError: return None - _, schannel = url_channel(rec.get('url')) else: linked_data(prefix) + url = rec.get('url') + channel, schannel = url_channel(url) + if 'fn' not in rec: + rec['fn'] = url.rsplit('/', 1)[-1] if url else dname + '.tar.bz2' + rec['channel'] = channel rec['schannel'] = schannel cprefix = '' if schannel == 'defaults' else schannel + '::' - rec['fn'] = dname + '.tar.bz2' linked_data_[prefix][str(cprefix + dname)] = rec return rec @@ -1159,9 +1162,9 @@ def main(): prefix = opts.prefix pkgs_dir = join(prefix, 'pkgs') + pkgs_dirs[0] = [pkgs_dir] if opts.verbose: print("prefix: %r" % prefix) - pkgs_dirs.append(pkgs_dir) if opts.file: idists = list(yield_lines(join(prefix, opts.file)))
diff --git a/tests/test_install.py b/tests/test_install.py --- a/tests/test_install.py +++ b/tests/test_install.py @@ -448,5 +448,20 @@ def test_misc(self): self.assertEqual(duplicates_to_remove(li, [d1, d2]), []) +def test_standalone_import(): + import sys + import conda.install + tmp_dir = tempfile.mkdtemp() + fname = conda.install.__file__.rstrip('co') + shutil.copyfile(fname, join(tmp_dir, basename(fname))) + opath = [tmp_dir] + opath.extend(s for s in sys.path if basename(s) not in ('conda', 'site-packages')) + opath, sys.path = sys.path, opath + try: + import install + finally: + sys.path = opath + + if __name__ == '__main__': unittest.main()
Add test for conda/install.py imports https://github.com/conda/conda/pull/2526#issuecomment-220751869 Since this type of bug was introduced by at least three people independently, I think it would be good to add a test for this. The test could be something along the lines of: ``` tmp_dir = tempfile.mkdtemp() shutil.copyfile(conda.install.__file__, tmp_dir) sys.path = [tmp_dir] # we also need the standard library here import install ``` Basically, put the `install` module into an empty directory, and test if it can be imported by itself.
Perhaps it would be sufficient to scan for `(from|import)\s+(conda|[.])`?
2016-05-21T21:58:02
-1.0
conda/conda
2,631
conda__conda-2631
[ "2626" ]
38b4966d5c0cf16ebe310ce82ffef63c926f9f3f
diff --git a/conda/misc.py b/conda/misc.py --- a/conda/misc.py +++ b/conda/misc.py @@ -9,7 +9,7 @@ import sys from collections import defaultdict from os.path import (abspath, dirname, expanduser, exists, - isdir, isfile, islink, join, relpath) + isdir, isfile, islink, join, relpath, curdir) from .install import (name_dist, linked as install_linked, is_fetched, is_extracted, is_linked, linked_data, find_new_location, cached_url) @@ -35,8 +35,8 @@ def conda_installed_files(prefix, exclude_self_build=False): res.update(set(meta['files'])) return res - -url_pat = re.compile(r'(?P<url>.+)/(?P<fn>[^/#]+\.tar\.bz2)' +url_pat = re.compile(r'(?:(?P<url_p>.+)(?:[/\\]))?' + r'(?P<fn>[^/\\#]+\.tar\.bz2)' r'(:?#(?P<md5>[0-9a-f]{32}))?$') def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None): actions = defaultdict(list) @@ -55,12 +55,14 @@ def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None): m = url_pat.match(spec) if m is None: sys.exit('Could not parse explicit URL: %s' % spec) - url, md5 = m.group('url') + '/' + m.group('fn'), m.group('md5') - if not is_url(url): - if not isfile(url): - sys.exit('Error: file not found: %s' % url) - url = utils_url_path(url) - url_p, fn = url.rsplit('/', 1) + url_p, fn, md5 = m.group('url_p'), m.group('fn'), m.group('md5') + if not is_url(url_p): + if url_p is None: + url_p = curdir + elif not isdir(url_p): + sys.exit('Error: file not found: %s' % join(url_p, fn)) + url_p = utils_url_path(url_p).rstrip('/') + url = "{0}/{1}".format(url_p, fn) # See if the URL refers to a package in our cache prefix = pkg_path = dir_path = None
diff --git a/tests/test_misc.py b/tests/test_misc.py --- a/tests/test_misc.py +++ b/tests/test_misc.py @@ -15,13 +15,13 @@ def test_cache_fn_url(self): def test_url_pat_1(self): m = url_pat.match('http://www.cont.io/pkgs/linux-64/foo.tar.bz2' '#d6918b03927360aa1e57c0188dcb781b') - self.assertEqual(m.group('url'), 'http://www.cont.io/pkgs/linux-64') + self.assertEqual(m.group('url_p'), 'http://www.cont.io/pkgs/linux-64') self.assertEqual(m.group('fn'), 'foo.tar.bz2') self.assertEqual(m.group('md5'), 'd6918b03927360aa1e57c0188dcb781b') def test_url_pat_2(self): m = url_pat.match('http://www.cont.io/pkgs/linux-64/foo.tar.bz2') - self.assertEqual(m.group('url'), 'http://www.cont.io/pkgs/linux-64') + self.assertEqual(m.group('url_p'), 'http://www.cont.io/pkgs/linux-64') self.assertEqual(m.group('fn'), 'foo.tar.bz2') self.assertEqual(m.group('md5'), None)
Installing packages from files broken in master ``` ((p35)) Z:\msarahan\code\conda>conda install --force z:\msarahan\Downloads\numexpr-2.6.0-np110py35_0.tar.bz2 Could not parse explicit URL: z:\msarahan\Downloads\numexpr-2.6.0-np110py35_0.tar.bz2 ((p35)) Z:\msarahan\code\conda>conda install --offline z:\msarahan\Downloads\numexpr-2.6.0-np110py35_0.tar.bz2 Could not parse explicit URL: z:\msarahan\Downloads\numexpr-2.6.0-np110py35_0.tar.bz2 ((p35)) Z:\msarahan\code\conda>conda install --force ..\..\Downloads\numexpr-2.6.0-np110py35_0.tar.bz2 Could not parse explicit URL: ..\..\Downloads\numexpr-2.6.0-np110py35_0.tar.bz2 ``` z: is a mapped network drive - not sure if that makes any difference.
Looks like this only affects windows. Perhaps the `\`. Will dig further. https://github.com/conda/conda/blob/master/conda/misc.py#L57 Ok so the regex [here](https://github.com/conda/conda/blob/master/conda/misc.py#L39) needs to be more robust for backslashes I guess?
2016-06-09T05:42:02
-1.0
conda/conda
2,729
conda__conda-2729
[ "2642" ]
07e517865bbb98e333a0ba0d217fc5f60c444aeb
diff --git a/conda/config.py b/conda/config.py --- a/conda/config.py +++ b/conda/config.py @@ -195,7 +195,9 @@ def get_rc_urls(): return rc['channels'] def is_url(url): - return url and urlparse.urlparse(url).scheme != "" + if url: + p = urlparse.urlparse(url) + return p.netloc != "" or p.scheme == "file" def binstar_channel_alias(channel_alias): if channel_alias.startswith('file:/'):
diff --git a/tests/test_create.py b/tests/test_create.py --- a/tests/test_create.py +++ b/tests/test_create.py @@ -5,7 +5,7 @@ from glob import glob import json from logging import getLogger, Handler -from os.path import exists, isdir, isfile, join, relpath +from os.path import exists, isdir, isfile, join, relpath, basename import os from shlex import split from shutil import rmtree, copyfile @@ -24,7 +24,7 @@ from conda.cli.main_remove import configure_parser as remove_configure_parser from conda.cli.main_update import configure_parser as update_configure_parser from conda.config import pkgs_dirs, bits -from conda.install import linked as install_linked, linked_data_ +from conda.install import linked as install_linked, linked_data_, dist2dirname from conda.install import on_win from conda.compat import PY3, TemporaryDirectory @@ -63,37 +63,18 @@ def reenable_dotlog(handlers): dotlogger.handlers = handlers -@contextmanager -def make_temp_env(*packages): - prefix = make_temp_prefix() - try: - # try to clear any config that's been set by other tests - config.rc = config.load_condarc('') - - p = conda_argparse.ArgumentParser() - sub_parsers = p.add_subparsers(metavar='command', dest='cmd') - create_configure_parser(sub_parsers) - - command = "create -y -q -p {0} {1}".format(escape_for_winpath(prefix), " ".join(packages)) - - args = p.parse_args(split(command)) - args.func(args, p) - - yield prefix - finally: - rmtree(prefix, ignore_errors=True) - - class Commands: INSTALL = "install" UPDATE = "update" REMOVE = "remove" + CREATE = "create" parser_config = { Commands.INSTALL: install_configure_parser, Commands.UPDATE: update_configure_parser, Commands.REMOVE: remove_configure_parser, + Commands.CREATE: create_configure_parser, } @@ -102,23 +83,38 @@ def run_command(command, prefix, *arguments): sub_parsers = p.add_subparsers(metavar='command', dest='cmd') parser_config[command](sub_parsers) - command = "{0} -y -q -p {1} {2}".format(command, - escape_for_winpath(prefix), - " ".join(arguments)) + prefix = escape_for_winpath(prefix) + arguments = list(map(escape_for_winpath, arguments)) + command = "{0} -y -q -p {1} {2}".format(command, prefix, " ".join(arguments)) args = p.parse_args(split(command)) args.func(args, p) +@contextmanager +def make_temp_env(*packages): + prefix = make_temp_prefix() + try: + # try to clear any config that's been set by other tests + config.rc = config.load_condarc('') + run_command(Commands.CREATE, prefix, *packages) + yield prefix + finally: + rmtree(prefix, ignore_errors=True) + + def package_is_installed(prefix, package, exact=False): + packages = list(install_linked(prefix)) + if '::' not in package: + packages = list(map(dist2dirname, packages)) if exact: - return any(p == package for p in install_linked(prefix)) - return any(p.startswith(package) for p in install_linked(prefix)) + return package in packages + return any(p.startswith(package) for p in packages) def assert_package_is_installed(prefix, package, exact=False): if not package_is_installed(prefix, package, exact): - print([p for p in install_linked(prefix)]) + print(list(install_linked(prefix))) raise AssertionError("package {0} is not in prefix".format(package)) @@ -147,29 +143,29 @@ def test_create_install_update_remove(self): assert not package_is_installed(prefix, 'flask-0.') assert_package_is_installed(prefix, 'python-3') - @pytest.mark.skipif(on_win, reason="windows tarball is broken still") @pytest.mark.timeout(300) def test_tarball_install_and_bad_metadata(self): with make_temp_env("python flask=0.10.1") as prefix: - assert_package_is_installed(prefix, 'flask-0.') + assert_package_is_installed(prefix, 'flask-0.10.1') run_command(Commands.REMOVE, prefix, 'flask') - assert not package_is_installed(prefix, 'flask-0.') + assert not package_is_installed(prefix, 'flask-0.10.1') assert_package_is_installed(prefix, 'python') # regression test for #2626 # install tarball with full path flask_tar_file = glob(join(pkgs_dirs[0], 'flask-0.*.tar.bz2'))[-1] - if not on_win: - run_command(Commands.INSTALL, prefix, flask_tar_file) - assert_package_is_installed(prefix, 'flask-0.') + tar_new_path = join(prefix, basename(flask_tar_file)) + copyfile(flask_tar_file, tar_new_path) + run_command(Commands.INSTALL, prefix, tar_new_path) + assert_package_is_installed(prefix, 'flask-0') - run_command(Commands.REMOVE, prefix, 'flask') - assert not package_is_installed(prefix, 'flask-0.') + run_command(Commands.REMOVE, prefix, 'flask') + assert not package_is_installed(prefix, 'flask-0') # regression test for #2626 # install tarball with relative path - flask_tar_file = relpath(flask_tar_file) - run_command(Commands.INSTALL, prefix, flask_tar_file) + tar_new_path = relpath(tar_new_path) + run_command(Commands.INSTALL, prefix, tar_new_path) assert_package_is_installed(prefix, 'flask-0.') # regression test for #2599
tarball install windows @msarahan @mingwandroid What _should_ the `file://` url format be on Windows? ``` ________________________ IntegrationTests.test_python3 ________________________ Traceback (most recent call last): File "C:\projects\conda\tests\test_create.py", line 146, in test_python3 run_command(Commands.INSTALL, prefix, flask_tar_file) File "C:\projects\conda\tests\test_create.py", line 104, in run_command args.func(args, p) File "C:\projects\conda\conda\cli\main_install.py", line 62, in execute install(args, parser, 'install') File "C:\projects\conda\conda\cli\install.py", line 195, in install explicit(args.packages, prefix, verbose=not args.quiet) File "C:\projects\conda\conda\misc.py", line 111, in explicit index.update(fetch_index(channels, **fetch_args)) File "C:\projects\conda\conda\fetch.py", line 266, in fetch_index for url in iterkeys(channel_urls)] File "C:\projects\conda\conda\fetch.py", line 67, in func res = f(*args, **kwargs) File "C:\projects\conda\conda\fetch.py", line 149, in fetch_repodata raise RuntimeError(msg) RuntimeError: Could not find URL: file:///C|/projects/conda/ ---------------------------- Captured stdout call ----------------------------- ``` The relevant lines to look at here are line 64 in `conda/misc.py` ``` url_p = utils_url_path(url_p).rstrip('/') ``` and line 147 in `conda/utils.py` ``` def url_path(path): path = abspath(path) if sys.platform == 'win32': path = '/' + path.replace(':', '|').replace('\\', '/') return 'file://%s' % path ``` Help here is definitely appreciated.
Maybe this might be useful. https://blogs.msdn.microsoft.com/ie/2006/12/06/file-uris-in-windows/ Python's (3.4+) pathlib module might give a hint ``` In [1]: import pathlib In [2]: pathlib.Path(r"C:\projects\conda").as_uri() Out[2]: 'file:///C:/projects/conda' ``` I'm unable to reproduce this, however maybe the code that interprets the file uri might also be relevant. This would include conda's `LocalFSAdapter()` in connection.py (https://github.com/conda/conda/blob/231e9db898b3d7720b49e9e2050a88be6978fd38/conda/connection.py#L205-L235) which makes a call to `url_to_path()` (https://github.com/conda/conda/blob/231e9db898b3d7720b49e9e2050a88be6978fd38/conda/connection.py#L238-L251). These functions seem to interpret conda's `file:///C|/projects/conda/` format correctly. One final observation: line 64 in `misc.py` strips the rightmost `/`, yet the uri in the error message seems to still have one. Is that supposed to be happening? Hi @groutr. thanks for looking at this and good to see you again! @kalefranz yeah, you need to keep the : @mingwandroid no problem. I hope to keep active here as time permits.
2016-06-16T20:23:12
-1.0
conda/conda
2,734
conda__conda-2734
[ "2732" ]
3c6a9b5827f255735993c433488429e5781d4658
diff --git a/conda/cli/main_config.py b/conda/cli/main_config.py --- a/conda/cli/main_config.py +++ b/conda/cli/main_config.py @@ -13,7 +13,7 @@ from ..compat import string_types from ..config import (rc_bool_keys, rc_string_keys, rc_list_keys, sys_rc_path, user_rc_path, rc_other) -from ..utils import yaml_load, yaml_dump +from ..utils import yaml_load, yaml_dump, yaml_bool descr = """ Modify configuration values in .condarc. This is modeled after the git @@ -289,14 +289,14 @@ def execute_config(args, parser): set_bools, set_strings = set(rc_bool_keys), set(rc_string_keys) for key, item in args.set: # Check key and value - yamlitem = yaml_load(item) if key in set_bools: - if not isinstance(yamlitem, bool): + itemb = yaml_bool(item) + if itemb is None: error_and_exit("Key: %s; %s is not a YAML boolean." % (key, item), json=args.json, error_type="TypeError") - rc_config[key] = yamlitem + rc_config[key] = itemb elif key in set_strings: - rc_config[key] = yamlitem + rc_config[key] = item else: error_and_exit("Error key must be one of %s, not %s" % (', '.join(set_bools | set_strings), key), json=args.json, diff --git a/conda/connection.py b/conda/connection.py --- a/conda/connection.py +++ b/conda/connection.py @@ -22,7 +22,7 @@ from . import __version__ as VERSION from .compat import urlparse, StringIO from .config import platform as config_platform, ssl_verify, get_proxy_servers -from .utils import gnu_get_libc_version +from .utils import gnu_get_libc_version, yaml_bool RETRIES = 3 @@ -110,7 +110,7 @@ def __init__(self, *args, **kwargs): self.headers['User-Agent'] = user_agent - self.verify = ssl_verify + self.verify = yaml_bool(ssl_verify, ssl_verify) class NullAuth(requests.auth.AuthBase): diff --git a/conda/utils.py b/conda/utils.py --- a/conda/utils.py +++ b/conda/utils.py @@ -292,6 +292,20 @@ def get_yaml(): return yaml +# Restores YAML 1.1 boolean flexibility. +yaml_bool_ = { + 'true': True, 'yes': True, 'on': True, + 'false': False, 'no': False, 'off': False +} +def yaml_bool(s, passthrough=None): + if type(s) is bool: + return s + try: + return yaml_bool_.get(s.lower(), passthrough) + except AttributeError: + return passthrough + + def yaml_load(filehandle): yaml = get_yaml() try:
diff --git a/tests/test_config.py b/tests/test_config.py --- a/tests/test_config.py +++ b/tests/test_config.py @@ -12,7 +12,7 @@ import pytest import conda.config as config -from conda.utils import get_yaml +from conda.utils import get_yaml, yaml_bool from tests.helpers import run_conda_command @@ -441,20 +441,17 @@ def test_invalid_rc(): def test_config_set(): # Test the config set command - # Make sure it accepts only boolean values for boolean keys and any value for string keys + # Make sure it accepts any YAML 1.1 boolean values + assert yaml_bool(True) is True + assert yaml_bool(False) is False + for str in ('yes', 'Yes', 'YES', 'on', 'On', 'ON', + 'off', 'Off', 'OFF', 'no', 'No', 'NO'): + with make_temp_condarc() as rc: + stdout, stderr = run_conda_command('config', '--file', rc, + '--set', 'always_yes', str) + assert stdout == '' + assert stderr == '' - with make_temp_condarc() as rc: - stdout, stderr = run_conda_command('config', '--file', rc, - '--set', 'always_yes', 'yes') - - assert stdout == '' - assert stderr == 'Error: Key: always_yes; yes is not a YAML boolean.' - - stdout, stderr = run_conda_command('config', '--file', rc, - '--set', 'always_yes', 'no') - - assert stdout == '' - assert stderr == 'Error: Key: always_yes; no is not a YAML boolean.' def test_set_rc_string(): # Test setting string keys in .condarc
conda config --set show_channel_urls yes doesn't work anymore This is happening since the latest conda update: ``` bat λ conda config --set show_channel_urls yes Error: Key: show_channel_urls; yes is not a YAML boolean. ``` It happens with both conda 4.1.1 (local windows py 3.5) and 4.1.0 (appveyor, https://ci.appveyor.com/project/mdboom/matplotlib/build/1.0.1774) and it worked with 4.0.8 (https://ci.appveyor.com/project/mdboom/matplotlib/build/1.0.1765/job/bkldg98f8p087xmf)
This one is CC @mcg1969 I didn't mess with the section of code. I don't mind fixing it, don't get me wrong, but I am guessing that this is a difference between our old YAML library and our new one Try using true/false instead of yes/no... @msarahan? Ahhhh, interesting. I thought I had tested for that. But we can change the ruamel_yaml version back to using yaml 1.1 (if it isn't already), and then yes/no should work for yaml booleans. On Thu, Jun 16, 2016 at 5:33 PM, Michael C. Grant notifications@github.com wrote: > Try using true/false instead of yes/no... @msarahan > https://github.com/msarahan? > > — > You are receiving this because you commented. > Reply to this email directly, view it on GitHub > https://github.com/conda/conda/issues/2732#issuecomment-226633230, or mute > the thread > https://github.com/notifications/unsubscribe/ABWks12m0vAh-7R3sO7oVkpihlF1YwIhks5qMc8tgaJpZM4I33Ze > . ## _Kale J. Franz, PhD_ _Conda Tech Lead_ _kfranz@continuum.io kfranz@continuum.io_ _@kalefranz https://twitter.com/kalefranz_ http://continuum.io/ http://continuum.io/ http://continuum.io/ 221 W 6th St | Suite 1550 | Austin, TX 78701 I think the ship is sailed on the new library. I think we have to work around it Looks like YAML 1.2 drops Yes/No and On/Off support: http://yaml.readthedocs.io/en/latest/pyyaml.html Good news, we can fix Changing `version="1.2"` to `version="1.1"` on line 298 of `utils.py` restores this behavior. I'm concerned we may be breaking other things though by doing this, so my inclination is to create a `yaml_bool` function to wrap around the output of `yaml.load` in cases like this. Even better: drop the use of `yaml.load` to parse the boolean strings in `cli/main_config.py` in the first place.
2016-06-16T23:29:56
-1.0
conda/conda
2,862
conda__conda-2862
[ "2845" ]
b332659482ea5e3b3596dbe89f338f9a7e750d30
diff --git a/conda/fetch.py b/conda/fetch.py --- a/conda/fetch.py +++ b/conda/fetch.py @@ -98,7 +98,7 @@ def fetch_repodata(url, cache_dir=None, use_cache=False, session=None): if "_mod" in cache: headers["If-Modified-Since"] = cache["_mod"] - if 'repo.continuum.io' in url: + if 'repo.continuum.io' in url or url.startswith("file://"): filename = 'repodata.json.bz2' else: headers['Accept-Encoding'] = 'gzip, deflate, compress, identity'
diff --git a/tests/test_create.py b/tests/test_create.py --- a/tests/test_create.py +++ b/tests/test_create.py @@ -260,8 +260,8 @@ def test_tarball_install_and_bad_metadata(self): os.makedirs(subchan) tar_new_path = join(subchan, flask_fname) copyfile(tar_old_path, tar_new_path) - with open(join(subchan, 'repodata.json'), 'w') as f: - f.write(json.dumps(repodata)) + with bz2.BZ2File(join(subchan, 'repodata.json.bz2'), 'w') as f: + f.write(json.dumps(repodata).encode('utf-8')) run_command(Commands.INSTALL, prefix, '-c', channel, 'flask') assert_package_is_installed(prefix, channel + '::' + 'flask-')
file:// URLs don't work anymore with conda 4.1.3 Conda 4.1.3 does not work anymore with **file://** URLs: ``` (E:\Anaconda3) C:\Windows\system32>conda update --override-channels --channel file:///A:/pkgs/free --all Fetching package metadata ....Error: Could not find URL: file:///A:/pkgs/free/win-64/ ``` But `A:\pkgs\free\win-64` really exists: ``` (E:\Anaconda3) C:\Windows\system32>dir A:\pkgs\free\win-64 Volume in drive A is Software Volume Serial Number is 4546-3CD9 Directory of A:\pkgs\free\win-64 06/24/2016 12:31 AM <DIR> . 01/23/2016 06:27 PM <DIR> .. 06/24/2016 12:28 AM 259,605 repodata.json.bz2 07/07/2015 12:54 AM 85,764 argcomplete-0.9.0-py34_0.tar.bz2 ``` Before upgrading from 4.0.8-py35_0 everything worked fine. The same happened to the Linux version.
@mcg1969 I think I was going to write an integration test for this but obviously let it slip. ARGH! No, actually, we have integration tests for file URLs and for file-based channels. This is madness! :-( @ciupicri, I apologize. Hold on! @ciupicri, can you please create an _uncompressed_ `repodata.json` in that channel directory? (You do recall, do you not, @kalefranz, that you removed the `bzip2` support from my test...) :facepalm: > On Jun 23, 2016, at 7:09 PM, Michael C. Grant notifications@github.com wrote: > > (You do recall, do you not, @kalefranz, that you removed the bzip2 support from my test...) > > — > You are receiving this because you were mentioned. > Reply to this email directly, view it on GitHub, or mute the thread. Looks like we need to refer to `bzip2` for file:// base URLs. Should be a simple fix, not quite one-character :-) @mcg1969, I've uncompresses the `repodata.json.bz2` that I had, and now conda fails at the next stage: ``` unicodecsv: 0.14.1-py35_0 --> 0.14.1-py35_0 file:///A:/pkgs/free vs2015_runtime: 14.00.23026.0-0 --> 14.00.23026.0-0 file:///A:/pkgs/free wheel: 0.29.0-py35_0 --> 0.29.0-py35_0 file:///A:/pkgs/free Proceed ([y]/n)? y DEBUG:conda.instructions: PREFIX('E:\\Anaconda3') DEBUG:conda.instructions: PRINT('Fetching packages ...') Fetching packages ... INFO:print:Fetching packages ... DEBUG:conda.instructions: FETCH('file:///A:/pkgs/free::libdynd-0.7.2-0') DEBUG:requests.packages.urllib3.util.retry:Converted retries value: 3 -> Retry(total=3, connect=None, read=None, redirect=None) DEBUG:conda.fetch:url='file:///A:/pkgs/free/win-64/libdynd-0.7.2-0.tar.bz2' DEBUG:conda.fetch:HTTPError: 404 Client Error: None for url: file:///A:/pkgs/free/win-64/libdynd-0.7.2-0.tar.bz2: file:///A:/pkgs/free/win-64/libdynd-0.7.2-0.tar.bz2 Error: HTTPError: 404 Client Error: None for url: file:///A:/pkgs/free/win-64/libdynd-0.7.2-0.tar.bz2: file:///A:/pkgs/free/win-64/libdynd-0.7.2-0.tar.bz2 ``` `conda list` show that I already have libdynd-0.7.2.
2016-06-24T22:48:06
-1.0
conda/conda
2,873
conda__conda-2873
[ "2754" ]
895d23dd3c5154b149bdc5f57b1c1e33b3afdd71
diff --git a/conda/egg_info.py b/conda/egg_info.py --- a/conda/egg_info.py +++ b/conda/egg_info.py @@ -29,14 +29,15 @@ def get_site_packages_dir(installed_pkgs): def get_egg_info_files(sp_dir): for fn in os.listdir(sp_dir): - if not fn.endswith(('.egg', '.egg-info')): + if not fn.endswith(('.egg', '.egg-info', '.dist-info')): continue path = join(sp_dir, fn) if isfile(path): yield path elif isdir(path): for path2 in [join(path, 'PKG-INFO'), - join(path, 'EGG-INFO', 'PKG-INFO')]: + join(path, 'EGG-INFO', 'PKG-INFO'), + join(path, 'METADATA')]: if isfile(path2): yield path2 @@ -54,7 +55,7 @@ def parse_egg_info(path): key = m.group(1).lower() info[key] = m.group(2) try: - return '%(name)s-%(version)s-<egg_info>' % info + return '%(name)s-%(version)s-<pip>' % info except KeyError: pass return None
diff --git a/tests/test_create.py b/tests/test_create.py --- a/tests/test_create.py +++ b/tests/test_create.py @@ -230,11 +230,21 @@ def test_create_install_update_remove(self): @pytest.mark.timeout(300) def test_list_with_pip_egg(self): with make_temp_env("python=3 pip") as prefix: - check_call(PYTHON_BINARY + " -m pip install --egg --no-use-wheel flask==0.10.1", + check_call(PYTHON_BINARY + " -m pip install --egg --no-binary flask flask==0.10.1", cwd=prefix, shell=True) stdout, stderr = run_command(Commands.LIST, prefix) stdout_lines = stdout.split('\n') - assert any(line.endswith("<egg_info>") for line in stdout_lines + assert any(line.endswith("<pip>") for line in stdout_lines + if line.lower().startswith("flask")) + + @pytest.mark.timeout(300) + def test_list_with_pip_wheel(self): + with make_temp_env("python=3 pip") as prefix: + check_call(PYTHON_BINARY + " -m pip install flask==0.10.1", + cwd=prefix, shell=True) + stdout, stderr = run_command(Commands.LIST, prefix) + stdout_lines = stdout.split('\n') + assert any(line.endswith("<pip>") for line in stdout_lines if line.lower().startswith("flask")) @pytest.mark.timeout(300)
conda list misses pip-installed wheels As of conda 4.1, `conda list` no longer captures python packages that were pip-installed and were installed from wheels. https://www.python.org/dev/peps/pep-0427/#id14 CC @ilanschnell
This is actually a real issue now because the large majority of packages on PyPI are distributed as sdists, and now on install, pip force-compiles sdists to wheels, then installs those wheels. How about we use something like this: ``` import pkgutil packages = [p[1] for p in pkgutil.iter_modules()] ``` The problem is that from whatever `pkgutil.iter_modules()` returns, it is hard to tell whether or nor the installed package is a conda package or not. The point of the new `conda.egginfo` module, part from not having to call out to `pip`, is that conda knows which `.egg-info` files are "untracked" (not part of any conda package). I think the best solution is to extend `conda.egginfo` to handle these new meta-data files also.
2016-06-26T19:18:01
-1.0
conda/conda
2,875
conda__conda-2875
[ "2841" ]
8d744a0fab207153da762d615a7e71342fe9a20f
diff --git a/conda/cli/main_config.py b/conda/cli/main_config.py --- a/conda/cli/main_config.py +++ b/conda/cli/main_config.py @@ -257,13 +257,19 @@ def execute_config(args, parser): if isinstance(rc_config[key], (bool, string_types)): print("--set", key, rc_config[key]) - else: + else: # assume the key is a list-type # Note, since conda config --add prepends, these are printed in # the reverse order so that entering them in this order will # recreate the same file - for item in reversed(rc_config.get(key, [])): + items = rc_config.get(key, []) + numitems = len(items) + for q, item in enumerate(reversed(items)): # Use repr so that it can be pasted back in to conda config --add - print("--add", key, repr(item)) + if key == "channels" and q in (0, numitems-1): + print("--add", key, repr(item), + " # lowest priority" if q == 0 else " # highest priority") + else: + print("--add", key, repr(item)) # Add, append for arg, prepend in zip((args.add, args.append), (True, False)):
diff --git a/tests/test_config.py b/tests/test_config.py --- a/tests/test_config.py +++ b/tests/test_config.py @@ -240,8 +240,8 @@ def test_config_command_get(): --set always_yes True --set changeps1 False --set channel_alias http://alpha.conda.anaconda.org ---add channels 'defaults' ---add channels 'test' +--add channels 'defaults' # lowest priority +--add channels 'test' # highest priority --add create_default_packages 'numpy' --add create_default_packages 'ipython'\ """ @@ -251,8 +251,8 @@ def test_config_command_get(): '--get', 'channels') assert stdout == """\ ---add channels 'defaults' ---add channels 'test'\ +--add channels 'defaults' # lowest priority +--add channels 'test' # highest priority\ """ assert stderr == "" @@ -269,8 +269,8 @@ def test_config_command_get(): assert stdout == """\ --set changeps1 False ---add channels 'defaults' ---add channels 'test'\ +--add channels 'defaults' # lowest priority +--add channels 'test' # highest priority\ """ assert stderr == "" @@ -326,12 +326,12 @@ def test_config_command_parser(): with make_temp_condarc(condarc) as rc: stdout, stderr = run_conda_command('config', '--file', rc, '--get') - + print(stdout) assert stdout == """\ --set always_yes True --set changeps1 False ---add channels 'defaults' ---add channels 'test' +--add channels 'defaults' # lowest priority +--add channels 'test' # highest priority --add create_default_packages 'numpy' --add create_default_packages 'ipython'\ """
Would be nice if conda config --get channels listed the channels in priority order As far as I can tell it currently lists them in reverse order.
Good idea! Wow, I was about to change this, but the code has a note ``` # Note, since conda config --add prepends, these are printed in # the reverse order so that entering them in this order will # recreate the same file ``` It's a good point. And as implemented was intended to be a feature. I don't think this is a bug anymore. And if we change it, would have to go in 4.2.x instead of 4.1.x. I have to think more about it. I think one problem is that `--add` prepends. It seems more natural to me that `--add` would append, and there should be a separate `--prepend` flag. As a new feature (maybe not in 4.2.x), we should probably add both `--append` and `--prepend` flags. Keep the `--add` flag as-is for a while, but warn on use, and eventually change the behavior for `--add` from prepend to append. Good news: we already have append and prepend. As a stopgap, you could perhaps add `# lowest priority` and `# highest priority` to the first and last lines of the output? > Good news: we already have append and prepend. Oh perfect. I thought we had added one of them. Didn't remember if we put in both.
2016-06-26T21:50:18
-1.0
conda/conda
2,908
conda__conda-2908
[ "2886" ]
767c0a9c06e8d37b06ad2a5afce8a25af1eac795
diff --git a/conda/install.py b/conda/install.py --- a/conda/install.py +++ b/conda/install.py @@ -41,7 +41,6 @@ import tarfile import tempfile import time -import tempfile import traceback from os.path import (abspath, basename, dirname, isdir, isfile, islink, join, normpath) diff --git a/conda/misc.py b/conda/misc.py --- a/conda/misc.py +++ b/conda/misc.py @@ -69,14 +69,18 @@ def explicit(specs, prefix, verbose=False, force_extract=True, fetch_args=None, prefix = pkg_path = dir_path = None if url.startswith('file://'): prefix = cached_url(url) + if prefix is not None: + schannel = 'defaults' if prefix == '' else prefix[:-2] + is_file = False # If not, determine the channel name from the URL if prefix is None: channel, schannel = url_channel(url) + is_file = schannel.startswith('file:') and schannel.endswith('/') prefix = '' if schannel == 'defaults' else schannel + '::' + fn = prefix + fn dist = fn[:-8] - is_file = schannel.startswith('file:') and schannel.endswith('/') # Add explicit file to index so we'll see it later if is_file: index[fn] = {'fn': dist2filename(fn), 'url': url, 'md5': None}
diff --git a/tests/test_create.py b/tests/test_create.py --- a/tests/test_create.py +++ b/tests/test_create.py @@ -260,11 +260,32 @@ def test_tarball_install_and_bad_metadata(self): assert not package_is_installed(prefix, 'flask-0.10.1') assert_package_is_installed(prefix, 'python') - # Regression test for 2812 - # install from local channel from conda.config import pkgs_dirs flask_fname = flask_data['fn'] tar_old_path = join(pkgs_dirs[0], flask_fname) + + # regression test for #2886 (part 1 of 2) + # install tarball from package cache, default channel + run_command(Commands.INSTALL, prefix, tar_old_path) + assert_package_is_installed(prefix, 'flask-0.') + + # regression test for #2626 + # install tarball with full path, outside channel + tar_new_path = join(prefix, flask_fname) + copyfile(tar_old_path, tar_new_path) + run_command(Commands.INSTALL, prefix, tar_new_path) + assert_package_is_installed(prefix, 'flask-0') + + # regression test for #2626 + # install tarball with relative path, outside channel + run_command(Commands.REMOVE, prefix, 'flask') + assert not package_is_installed(prefix, 'flask-0.10.1') + tar_new_path = relpath(tar_new_path) + run_command(Commands.INSTALL, prefix, tar_new_path) + assert_package_is_installed(prefix, 'flask-0.') + + # Regression test for 2812 + # install from local channel for field in ('url', 'channel', 'schannel'): del flask_data[field] repodata = {'info': {}, 'packages':{flask_fname: flask_data}} @@ -279,21 +300,12 @@ def test_tarball_install_and_bad_metadata(self): run_command(Commands.INSTALL, prefix, '-c', channel, 'flask') assert_package_is_installed(prefix, channel + '::' + 'flask-') - # regression test for #2626 - # install tarball with full path - tar_new_path = join(prefix, flask_fname) - copyfile(tar_old_path, tar_new_path) - run_command(Commands.INSTALL, prefix, tar_new_path) - assert_package_is_installed(prefix, 'flask-0') - + # regression test for #2886 (part 2 of 2) + # install tarball from package cache, local channel run_command(Commands.REMOVE, prefix, 'flask') assert not package_is_installed(prefix, 'flask-0') - - # regression test for #2626 - # install tarball with relative path - tar_new_path = relpath(tar_new_path) - run_command(Commands.INSTALL, prefix, tar_new_path) - assert_package_is_installed(prefix, 'flask-0.') + run_command(Commands.INSTALL, prefix, tar_old_path) + assert_package_is_installed(prefix, channel + '::' + 'flask-') # regression test for #2599 linked_data_.clear()
conda install from tarball error? Running into this issue when trying to install directly from a tarball. ``` Traceback (most recent call last): File "/usr/local/bin/conda2", line 6, in <module> sys.exit(main()) An unexpected error has occurred, please consider sending the following traceback to the conda GitHub issue tracker at: https://github.com/conda/conda/issues Include the output of the command 'conda info' in your report. File "/opt/conda2/lib/python2.7/site-packages/conda/cli/main.py", line 120, in main exit_code = args_func(args, p) File "/opt/conda2/lib/python2.7/site-packages/conda/cli/main.py", line 130, in args_func exit_code = args.func(args, p) File "/opt/conda2/lib/python2.7/site-packages/conda/cli/main_install.py", line 69, in execute install(args, parser, 'install') File "/opt/conda2/lib/python2.7/site-packages/conda/cli/install.py", line 196, in install explicit(args.packages, prefix, verbose=not args.quiet) File "/opt/conda2/lib/python2.7/site-packages/conda/misc.py", line 79, in explicit is_file = schannel.startswith('file:') and schannel.endswith('/') UnboundLocalError: local variable 'schannel' referenced before assignment ```
`conda info`? This is in a docker image. ``` $ conda info Current conda install: platform : linux-64 conda version : 4.1.4 conda-env version : 2.5.1 conda-build version : 1.20.0 python version : 2.7.11.final.0 requests version : 2.9.2 root environment : /opt/conda2 (writable) default environment : /opt/conda2 envs directories : /opt/conda2/envs package cache : /opt/conda2/pkgs channel URLs : https://conda.anaconda.org/nanshe/linux-64/ https://conda.anaconda.org/nanshe/noarch/ https://conda.anaconda.org/conda-forge/linux-64/ https://conda.anaconda.org/conda-forge/noarch/ https://repo.continuum.io/pkgs/free/linux-64/ https://repo.continuum.io/pkgs/free/noarch/ https://repo.continuum.io/pkgs/pro/linux-64/ https://repo.continuum.io/pkgs/pro/noarch/ config file : /root/.condarc offline mode : False is foreign system : False ``` I'm having trouble reproducing this. I know this should be obvious but can you give us the exact command line? @jakirkham More details here would be helpful, including the specific package you tried to install by tarball. I'm genuinely having difficulty reproducing. So, this is coming out of an open sourced Docker image. Though I really should come up with a simpler example. I started to and then other stuff came up. I'll give it another try. Until I do here is the [Dockerfile](https://github.com/nanshe-org/docker_nanshe/blob/6fa60ad6f221731c17bf2277f5744f8b781095db/Dockerfile). Sorry it is so ugly. I've tried my best to make it readable given the constraints. The line that cause it to fail is this [one](https://github.com/nanshe-org/docker_nanshe/blob/6fa60ad6f221731c17bf2277f5744f8b781095db/Dockerfile#L29). Basically, what happens is we install everything in the `root` environment of two different `conda`s. One for Python 2 and the other for Python 3. We then remove one package `nanshe` and download the source code matching that version so as to have the test suite. We then remove the package and run the test suite. Once complete we try to reinstall the package from a file which fails. I was able to reproduce the problem---it is specifically limited to installing tarballs _in the package cache_. As a short-term fix you can copy the package out of the cache and then reinstall, I think. #2907 is the same issue. I'm working on a fix now. cc: @kalefranz
2016-06-29T04:29:30
-1.0
conda/conda
2,915
conda__conda-2915
[ "2681" ]
deaccea600d7b80cbcc939f018a5fdfe2a066967
diff --git a/conda/egg_info.py b/conda/egg_info.py --- a/conda/egg_info.py +++ b/conda/egg_info.py @@ -15,6 +15,7 @@ from .misc import rel_path + def get_site_packages_dir(installed_pkgs): for info in itervalues(installed_pkgs): if info['name'] == 'python': diff --git a/conda/exceptions.py b/conda/exceptions.py --- a/conda/exceptions.py +++ b/conda/exceptions.py @@ -8,6 +8,3 @@ class InvalidInstruction(CondaException): def __init__(self, instruction, *args, **kwargs): msg = "No handler for instruction: %r" % instruction super(InvalidInstruction, self).__init__(msg, *args, **kwargs) - -class LockError(RuntimeError, CondaException): - pass diff --git a/conda/lock.py b/conda/lock.py --- a/conda/lock.py +++ b/conda/lock.py @@ -17,11 +17,11 @@ """ from __future__ import absolute_import, division, print_function -import logging import os -import time - -from .exceptions import LockError +import logging +from os.path import join +import glob +from time import sleep LOCKFN = '.conda_lock' @@ -33,13 +33,15 @@ class Locked(object): """ Context manager to handle locks. """ - def __init__(self, path, retries=10): + def __init__(self, path): self.path = path self.end = "-" + str(os.getpid()) - self.lock_path = os.path.join(self.path, LOCKFN + self.end) - self.retries = retries + self.lock_path = join(self.path, LOCKFN + self.end) + self.pattern = join(self.path, LOCKFN + '-*') + self.remove = True def __enter__(self): + retries = 10 # Keep the string "LOCKERROR" in this string so that external # programs can look for it. lockstr = ("""\ @@ -48,24 +50,33 @@ def __enter__(self): If you are sure that conda is not running, remove it and try again. You can also use: $ conda clean --lock\n""") sleeptime = 1 - - for _ in range(self.retries): - if os.path.isdir(self.lock_path): - stdoutlog.info(lockstr % self.lock_path) + files = None + while retries: + files = glob.glob(self.pattern) + if files and not files[0].endswith(self.end): + stdoutlog.info(lockstr % str(files)) stdoutlog.info("Sleeping for %s seconds\n" % sleeptime) - - time.sleep(sleeptime) + sleep(sleeptime) sleeptime *= 2 + retries -= 1 else: - os.makedirs(self.lock_path) - return self + break + else: + stdoutlog.error("Exceeded max retries, giving up") + raise RuntimeError(lockstr % str(files)) - stdoutlog.error("Exceeded max retries, giving up") - raise LockError(lockstr % self.lock_path) + if not files: + try: + os.makedirs(self.lock_path) + except OSError: + pass + else: # PID lock already here --- someone else will remove it. + self.remove = False def __exit__(self, exc_type, exc_value, traceback): - try: - os.rmdir(self.lock_path) - os.rmdir(self.path) - except OSError: - pass + if self.remove: + for path in self.lock_path, self.path: + try: + os.rmdir(path) + except OSError: + pass
diff --git a/tests/test_exceptions.py b/tests/test_exceptions.py --- a/tests/test_exceptions.py +++ b/tests/test_exceptions.py @@ -18,7 +18,3 @@ def test_creates_message_with_instruction_name(self): e = exceptions.InvalidInstruction(random_instruction) expected = "No handler for instruction: %s" % random_instruction self.assertEqual(expected, str(e)) - -def test_lockerror_hierarchy(): - assert issubclass(exceptions.LockError, exceptions.CondaException) - assert issubclass(exceptions.LockError, RuntimeError) diff --git a/tests/test_lock.py b/tests/test_lock.py deleted file mode 100644 --- a/tests/test_lock.py +++ /dev/null @@ -1,32 +0,0 @@ -import os.path -import pytest - -from conda.lock import Locked, LockError - - -def test_lock_passes(tmpdir): - with Locked(tmpdir.strpath) as lock: - path = os.path.basename(lock.lock_path) - assert tmpdir.join(path).exists() and tmpdir.join(path).isdir() - - # lock should clean up after itself - assert not tmpdir.join(path).exists() - assert not tmpdir.exists() - -def test_lock_locks(tmpdir): - with Locked(tmpdir.strpath) as lock1: - path = os.path.basename(lock1.lock_path) - assert tmpdir.join(path).exists() and tmpdir.join(path).isdir() - - with pytest.raises(LockError) as execinfo: - with Locked(tmpdir.strpath, retries=1) as lock2: - assert False # this should never happen - assert lock2.lock_path == lock1.lock_path - assert "LOCKERROR" in str(execinfo) - assert "conda is already doing something" in str(execinfo) - - assert tmpdir.join(path).exists() and tmpdir.join(path).isdir() - - # lock should clean up after itself - assert not tmpdir.join(path).exists() - assert not tmpdir.exists()
[Regression] Conda create environment fails on lock if root environment is not under user control This issue is introduced in Conda 4.1.0 (Conda 4.0.8 works fine). ``` $ conda create -n root2 python=2 [123/1811] Fetching package metadata ....... Solving package specifications ............. Package plan for installation in environment /home/frol/.conda/envs/root2: The following NEW packages will be INSTALLED: openssl: 1.0.2h-1 (soft-link) pip: 8.1.2-py27_0 (soft-link) python: 2.7.11-0 (soft-link) readline: 6.2-2 (soft-link) setuptools: 23.0.0-py27_0 (soft-link) sqlite: 3.13.0-0 (soft-link) tk: 8.5.18-0 (soft-link) wheel: 0.29.0-py27_0 (soft-link) zlib: 1.2.8-3 (soft-link) Proceed ([y]/n)? Linking packages ... An unexpected error has occurred, please consider sending the following traceback to the conda GitHub issue tracker at: https://github.com/conda/conda/issues Include the output of the command 'conda info' in your report. Traceback (most recent call last): File "/usr/local/miniconda/bin/conda", line 6, in <module> sys.exit(main()) File "/usr/local/miniconda/lib/python2.7/site-packages/conda/cli/main.py", line 120, in main args_func(args, p) File "/usr/local/miniconda/lib/python2.7/site-packages/conda/cli/main.py", line 127, in args_func args.func(args, p) File "/usr/local/miniconda/lib/python2.7/site-packages/conda/cli/main_create.py", line 57, in execute install(args, parser, 'create') File "/usr/local/miniconda/lib/python2.7/site-packages/conda/cli/install.py", line 407, in install execute_actions(actions, index, verbose=not args.quiet) File "/usr/local/miniconda/lib/python2.7/site-packages/conda/plan.py", line 566, in execute_actions inst.execute_instructions(plan, index, verbose) File "/usr/local/miniconda/lib/python2.7/site-packages/conda/instructions.py", line 137, in execute_instructions cmd(state, arg) File "/usr/local/miniconda/lib/python2.7/site-packages/conda/instructions.py", line 80, in LINK_CMD link(state['prefix'], dist, lt, index=state['index'], shortcuts=shortcuts) File "/usr/local/miniconda/lib/python2.7/site-packages/conda/install.py", line 1035, in link with Locked(prefix), Locked(pkgs_dir): File "/usr/local/miniconda/lib/python2.7/site-packages/conda/lock.py", line 60, in __enter__ os.makedirs(self.lock_path) File "/usr/local/miniconda/lib/python2.7/os.py", line 157, in makedirs mkdir(name, mode) OSError: [Errno 13] Permission denied: '/usr/local/miniconda/pkgs/.conda_lock-949' ``` `/usr/local/miniconda/` is a system-wide installation of miniconda, so obviously, users cannot create lock files there. P.S. I have a dream that updating conda software won't break things on every release...
It seems that I cannot even do `source activate ...` as a regular user now. It just hangs. Here is what I get when I interrupt it with `^C`: ``` $ source activate root2 ^CTraceback (most recent call last): File "/usr/local/miniconda/bin/conda", line 6, in <module> sys.exit(main()) File "/usr/local/miniconda/lib/python2.7/site-packages/conda/cli/main.py", line 48, in main activate.main() File "/usr/local/miniconda/lib/python2.7/site-packages/conda/cli/activate.py", line 121, in main path = get_path(shelldict) File "/usr/local/miniconda/lib/python2.7/site-packages/conda/cli/activate.py", line 108, in get_path return run_in(shelldict["printpath"], shelldict)[0] File "/usr/local/miniconda/lib/python2.7/site-packages/conda/utils.py", line 174, in run_in stdout, stderr = p.communicate() File "/usr/local/miniconda/lib/python2.7/subprocess.py", line 799, in communicate return self._communicate(input) File "/usr/local/miniconda/lib/python2.7/subprocess.py", line 1409, in _communicate stdout, stderr = self._communicate_with_poll(input) File "/usr/local/miniconda/lib/python2.7/subprocess.py", line 1463, in _communicate_with_poll ready = poller.poll() KeyboardInterrupt ``` It also seems like there is no way to pin Conda version. Installing any package to the root env tries to update Conda to the latest version: ``` $ conda install python Using Anaconda Cloud api site https://api.anaconda.org Fetching package metadata: .... Solving package specifications: ......... Package plan for installation in environment /usr/local/miniconda: The following packages will be UPDATED: conda: 4.0.8-py27_0 --> 4.1.0-py27_0 conda-env: 2.4.5-py27_0 --> 2.5.0-py27_0 Proceed ([y]/n)? ``` You can now pin 4.0.9. ``` conda install conda=4.0.9 conda config --set auto_update_conda false ``` `auto_update_conda` setting will be added to 4.1.1 also, which is coming out tonight or tomorrow morning. Freaking lock on the whole package cache. We're going to get rid of that soon. I can't really tell what initially tripped here. What are the permissions on `/usr/local/miniconda/pkgs/.conda_lock-949`? Are they yours, or root, or does it even exist? Ohhh I think I get it, from your issue title. I actually thought this was already broken anyway. I plan on having 4.2 out in a couple weeks now, and should be a simple fix at that point. Ok with staying on 4.0.9 for this use case until then? @kalefranz It seems that I don't have options here but wait. I use Docker containers and this bug doesn't bother me that much. However, I would say that it is not a minor regression to postpone it to the next release. (From my experience, next release will break things in another way, so people will stuck with 4.0.*) In the last several months our coverage has gone from ~48% to almost 70%. The code base is still far more fragile and brittle than I'd like it to be, but we're making progress I think. Reverting #2320 fixed the regression. However, it seems that it just fails silently at locking, but at least it works in non-concurrent scenarios. cc @alanhdu @frol: Yeah, that's about right. Before #2320, conda would just swallow all `OsError`s (including `PermissionError`s). For now, we could add a `try ... except` around https://github.com/conda/conda/blob/master/conda/lock.py#L60, catch a `PermissionError` (or whatever the equivalent Python 2 error is), and try to do something smart (or just silently fail... depends how important that lock actually is). I am seeing this issue as well @kalefranz After reading this, I thought I would be able to downgrade conda to 4.0.9. However after doing that, I am still unable to activate centrally administered conda environments as a user. Is there a prior version of conda you would reccommend? Or did the 4.1.3 version leave something in my miniconda install that is causing the problem? Do I need to re-install miniconda2 from scratch? Should I just wait for this to be fixed before proceeding with trying to build a central conda install for our users? @davidslac There are two options you may try: 1. Downgrade conda-env together with conda: ``` bash $ conda install conda=4.0.9 'conda-env<2.5' ``` 2. Patch (revert changes) conda lock in Conda 4.1.x: ``` bash $ curl -o "/usr/local/miniconda/lib/python"*"/site-packages/conda/lock.py" \ "https://raw.githubusercontent.com/conda/conda/9428ad0b76be55e8070e04dd577c96e7dab571e0/conda/lock.py" ``` Thanks! I tried both, but it sill did not work. It's probably something I'm overlooking on my part - after the first failure I deleted the lock.pyc, but still no luck. I'll just hope for a fix in the future. I suspect though, that central administration is not as standard of a use case, so one is more likely to run into problems. It may be we should just provide a channel of our packages to our users and let them administer their own software stacks, give them some environments we know work. best, David On 06/27/16 10:58, Vlad Frolov wrote: > @davidslac https://github.com/davidslac There are two options you > may try: > > 1. > > ``` > Downgrade conda-env: > > $ conda install'conda-env<2.5' > ``` > > 2. > > ``` > Patch (revert changes) conda lock: > > $ curl -o"/usr/local/miniconda/lib/python"*"/site-packages/conda/lock.py" \ > "https://raw.githubusercontent.com/conda/conda/9428ad0b76be55e8070e04dd577c96e7dab571e0/conda/lock.py" > ```
2016-06-29T17:16:25
-1.0
conda/conda
3,041
conda__conda-3041
[ "3036" ]
0b4b690e6a3e1b5562307b4bda29f2f7cdbb4632
diff --git a/conda/cli/main_config.py b/conda/cli/main_config.py --- a/conda/cli/main_config.py +++ b/conda/cli/main_config.py @@ -151,21 +151,19 @@ def configure_parser(sub_parsers): choices=BoolOrListKey() ) action.add_argument( - "--add", + "--append", "--add", nargs=2, action="append", - help="""Add one configuration value to the beginning of a list key. - To add to the end of the list, use --append.""", + help="""Add one configuration value to the end of a list key.""", default=[], choices=ListKey(), metavar=('KEY', 'VALUE'), ) action.add_argument( - "--append", + "--prepend", nargs=2, action="append", - help="""Add one configuration value to a list key. The default - behavior is to prepend.""", + help="""Add one configuration value to the beginning of a list key.""", default=[], choices=ListKey(), metavar=('KEY', 'VALUE'), @@ -260,7 +258,7 @@ def execute_config(args, parser): # recreate the same file items = rc_config.get(key, []) numitems = len(items) - for q, item in enumerate(reversed(items)): + for q, item in enumerate(items): # Use repr so that it can be pasted back in to conda config --add if key == "channels" and q in (0, numitems-1): print("--add", key, repr(item), @@ -268,8 +266,8 @@ def execute_config(args, parser): else: print("--add", key, repr(item)) - # Add, append - for arg, prepend in zip((args.add, args.append), (True, False)): + # prepend, append, add + for arg, prepend in zip((args.prepend, args.append), (True, False)): for key, item in arg: if key == 'channels' and key not in rc_config: rc_config[key] = ['defaults'] @@ -287,7 +285,7 @@ def execute_config(args, parser): if item in arglist: # Right now, all list keys should not contain duplicates message = "Warning: '%s' already in '%s' list, moving to the %s" % ( - item, key, "front" if prepend else "back") + item, key, "top" if prepend else "bottom") arglist = rc_config[key] = [p for p in arglist if p != item] if not args.json: print(message, file=sys.stderr) diff --git a/conda/exceptions.py b/conda/exceptions.py --- a/conda/exceptions.py +++ b/conda/exceptions.py @@ -197,6 +197,12 @@ def __init__(self, message, *args, **kwargs): super(PackageNotFoundError, self).__init__(msg, *args, **kwargs) +class CondaHTTPError(CondaError): + def __init__(self, message, *args, **kwargs): + msg = 'HTTP Error: %s\n' % message + super(CondaHTTPError, self).__init__(msg, *args, **kwargs) + + class NoPackagesFoundError(CondaError, RuntimeError): '''An exception to report that requested packages are missing. @@ -352,7 +358,7 @@ def print_exception(exception): def get_info(): - from StringIO import StringIO + from conda.compat import StringIO from contextlib import contextmanager from conda.cli import conda_argparse from conda.cli.main_info import configure_parser diff --git a/conda/fetch.py b/conda/fetch.py --- a/conda/fetch.py +++ b/conda/fetch.py @@ -27,8 +27,8 @@ rm_rf, exp_backoff_fn) from .lock import Locked as Locked from .utils import memoized -from .exceptions import ProxyError, ChannelNotAllowed, CondaRuntimeError, CondaSignatureError - +from .exceptions import ProxyError, ChannelNotAllowed, CondaRuntimeError, CondaSignatureError, \ + CondaError, CondaHTTPError log = getLogger(__name__) dotlog = getLogger('dotupdate') @@ -159,7 +159,7 @@ def fetch_repodata(url, cache_dir=None, use_cache=False, session=None): msg = "HTTPError: %s: %s\n" % (e, remove_binstar_tokens(url)) log.debug(msg) - raise CondaRuntimeError(msg) + raise CondaHTTPError(msg) except requests.exceptions.SSLError as e: msg = "SSL Error: %s\n" % e
diff --git a/tests/test_config.py b/tests/test_config.py --- a/tests/test_config.py +++ b/tests/test_config.py @@ -303,19 +303,19 @@ def test_config_command_basics(): assert stdout == stderr == '' assert _read_test_condarc(rc) == """\ channels: - - test - defaults + - test """ with make_temp_condarc() as rc: # When defaults is explicitly given, it should not be added stdout, stderr = run_conda_command('config', '--file', rc, '--add', 'channels', 'test', '--add', 'channels', 'defaults') assert stdout == '' - assert stderr == "Warning: 'defaults' already in 'channels' list, moving to the front" + assert stderr == "Warning: 'defaults' already in 'channels' list, moving to the bottom" assert _read_test_condarc(rc) == """\ channels: - - defaults - test + - defaults """ # Duplicate keys should not be added twice with make_temp_condarc() as rc: @@ -325,11 +325,11 @@ def test_config_command_basics(): stdout, stderr = run_conda_command('config', '--file', rc, '--add', 'channels', 'test') assert stdout == '' - assert stderr == "Warning: 'test' already in 'channels' list, moving to the front" + assert stderr == "Warning: 'test' already in 'channels' list, moving to the bottom" assert _read_test_condarc(rc) == """\ channels: - - test - defaults + - test """ # Test append @@ -340,7 +340,7 @@ def test_config_command_basics(): stdout, stderr = run_conda_command('config', '--file', rc, '--append', 'channels', 'test') assert stdout == '' - assert stderr == "Warning: 'test' already in 'channels' list, moving to the back" + assert stderr == "Warning: 'test' already in 'channels' list, moving to the bottom" assert _read_test_condarc(rc) == """\ channels: - defaults @@ -394,10 +394,10 @@ def test_config_command_get(): --set always_yes True --set changeps1 False --set channel_alias http://alpha.conda.anaconda.org ---add channels 'defaults' # lowest priority ---add channels 'test' # highest priority ---add create_default_packages 'numpy' ---add create_default_packages 'ipython'\ +--add channels 'test' # lowest priority +--add channels 'defaults' # highest priority +--add create_default_packages 'ipython' +--add create_default_packages 'numpy'\ """ assert stderr == "unknown key invalid_key" @@ -405,8 +405,8 @@ def test_config_command_get(): '--get', 'channels') assert stdout == """\ ---add channels 'defaults' # lowest priority ---add channels 'test' # highest priority\ +--add channels 'test' # lowest priority +--add channels 'defaults' # highest priority\ """ assert stderr == "" @@ -423,8 +423,8 @@ def test_config_command_get(): assert stdout == """\ --set changeps1 False ---add channels 'defaults' # lowest priority ---add channels 'test' # highest priority\ +--add channels 'test' # lowest priority +--add channels 'defaults' # highest priority\ """ assert stderr == "" @@ -484,16 +484,23 @@ def test_config_command_parser(): assert stdout == """\ --set always_yes True --set changeps1 False ---add channels 'defaults' # lowest priority ---add channels 'test' # highest priority ---add create_default_packages 'numpy' ---add create_default_packages 'ipython'\ +--add channels 'test' # lowest priority +--add channels 'defaults' # highest priority +--add create_default_packages 'ipython' +--add create_default_packages 'numpy'\ """ + print(">>>>") + with open(rc, 'r') as fh: + print(fh.read()) - stdout, stderr = run_conda_command('config', '--file', rc, '--add', - 'channels', 'mychannel') + + + stdout, stderr = run_conda_command('config', '--file', rc, '--prepend', 'channels', 'mychannel') assert stdout == stderr == '' + with open(rc, 'r') as fh: + print(fh.read()) + assert _read_test_condarc(rc) == """\ channels: - mychannel
conda config needs --prepend; change behavior of --add to --append referencing https://github.com/conda/conda/issues/2841 - conda config needs `--prepend` - change behavior of `--add` to `--append` - un-reverse order of `conda config --get channels`
2016-07-11T22:03:41
-1.0
conda/conda
3,326
conda__conda-3326
[ "3307", "3307" ]
550d679e447b02781caeb348aa89370a62ffa400
diff --git a/conda/lock.py b/conda/lock.py --- a/conda/lock.py +++ b/conda/lock.py @@ -45,8 +45,11 @@ def touch(file_name, times=None): Examples: touch("hello_world.py") """ - with open(file_name, 'a'): - os.utime(file_name, times) + try: + with open(file_name, 'a'): + os.utime(file_name, times) + except (OSError, IOError) as e: + log.warn("Failed to create lock, do not run conda in parallel process\n") class FileLock(object): @@ -111,11 +114,13 @@ def __init__(self, directory_path, retries=10): self.lock_file_path = "%s.pid{0}.%s" % (lock_path_pre, LOCK_EXTENSION) # e.g. if locking directory `/conda`, lock file will be `/conda/conda.pidXXXX.conda_lock` self.lock_file_glob_str = "%s.pid*.%s" % (lock_path_pre, LOCK_EXTENSION) + # make sure '/' exists assert isdir(dirname(self.directory_path)), "{0} doesn't exist".format(self.directory_path) if not isdir(self.directory_path): - os.makedirs(self.directory_path, exist_ok=True) - log.debug("forced to create %s", self.directory_path) - assert os.access(self.directory_path, os.W_OK), "%s not writable" % self.directory_path - + try: + os.makedirs(self.directory_path) + log.debug("forced to create %s", self.directory_path) + except (OSError, IOError) as e: + log.warn("Failed to create directory %s" % self.directory_path) Locked = DirectoryLock
diff --git a/tests/test_lock.py b/tests/test_lock.py --- a/tests/test_lock.py +++ b/tests/test_lock.py @@ -1,9 +1,12 @@ import pytest -from os.path import basename, join -from conda.lock import FileLock, LOCKSTR, LOCK_EXTENSION, LockError -from conda.install import on_win +from os.path import basename, join, exists, isfile +from conda.lock import FileLock, DirectoryLock, LockError + def test_filelock_passes(tmpdir): + """ + Normal test on file lock + """ package_name = "conda_file1" tmpfile = join(tmpdir.strpath, package_name) with FileLock(tmpfile) as lock: @@ -15,7 +18,10 @@ def test_filelock_passes(tmpdir): def test_filelock_locks(tmpdir): - + """ + Test on file lock, multiple lock on same file + Lock error should raised + """ package_name = "conda_file_2" tmpfile = join(tmpdir.strpath, package_name) with FileLock(tmpfile) as lock1: @@ -27,48 +33,42 @@ def test_filelock_locks(tmpdir): assert False # this should never happen assert lock2.path_to_lock == lock1.path_to_lock - if not on_win: - assert "LOCKERROR" in str(execinfo.value) - assert "conda is already doing something" in str(execinfo.value) assert tmpdir.join(path).exists() and tmpdir.join(path).isfile() # lock should clean up after itself assert not tmpdir.join(path).exists() -def test_filelock_folderlocks(tmpdir): - import os - package_name = "conda_file_2" +def test_folder_locks(tmpdir): + """ + Test on Directory lock + """ + package_name = "dir_1" tmpfile = join(tmpdir.strpath, package_name) - os.makedirs(tmpfile) - with FileLock(tmpfile) as lock1: - path = basename(lock1.lock_file_path) - assert tmpdir.join(path).exists() and tmpdir.join(path).isfile() + with DirectoryLock(tmpfile) as lock1: + + assert exists(lock1.lock_file_path) and isfile(lock1.lock_file_path) with pytest.raises(LockError) as execinfo: - with FileLock(tmpfile, retries=1) as lock2: + with DirectoryLock(tmpfile, retries=1) as lock2: assert False # this should never happen - assert lock2.path_to_lock == lock1.path_to_lock - - if not on_win: - assert "LOCKERROR" in str(execinfo.value) - assert "conda is already doing something" in str(execinfo.value) - assert lock1.path_to_lock in str(execinfo.value) - assert tmpdir.join(path).exists() and tmpdir.join(path).isfile() + assert exists(lock1.lock_file_path) and isfile(lock1.lock_file_path) # lock should clean up after itself - assert not tmpdir.join(path).exists() - - -def lock_thread(tmpdir, file_path): - with FileLock(file_path) as lock1: - path = basename(lock1.lock_file_path) - assert tmpdir.join(path).exists() and tmpdir.join(path).isfile() - assert not tmpdir.join(path).exists() + assert not exists(lock1.lock_file_path) def test_lock_thread(tmpdir): + """ + 2 thread want to lock a file + One thread will have LockError Raised + """ + def lock_thread(tmpdir, file_path): + with FileLock(file_path) as lock1: + path = basename(lock1.lock_file_path) + assert tmpdir.join(path).exists() and tmpdir.join(path).isfile() + assert not tmpdir.join(path).exists() from threading import Thread package_name = "conda_file_3" @@ -85,13 +85,17 @@ def test_lock_thread(tmpdir): assert not tmpdir.join(path).exists() -def lock_thread_retries(tmpdir, file_path): - with pytest.raises(LockError) as execinfo: - with FileLock(file_path, retries=0): - assert False # should never enter here, since max_tires is 0 - assert "LOCKERROR" in str(execinfo.value) - def test_lock_retries(tmpdir): + """ + 2 thread want to lock a same file + Lock has zero retries + One thread will have LockError raised + """ + def lock_thread_retries(tmpdir, file_path): + with pytest.raises(LockError) as execinfo: + with FileLock(file_path, retries=0): + assert False # should never enter here, since max_tires is 0 + assert "LOCKERROR" in str(execinfo.value) from threading import Thread package_name = "conda_file_3" @@ -106,3 +110,19 @@ def test_lock_retries(tmpdir): t.join() # lock should clean up after itself assert not tmpdir.join(path).exists() + + +def test_permission_file(): + """ + Test when lock cannot be created due to permission + Make sure no exception raised + """ + import tempfile + from conda.compat import text_type + with tempfile.NamedTemporaryFile(mode='r') as f: + if not isinstance(f.name, text_type): + return + with FileLock(f.name) as lock: + + path = basename(lock.lock_file_path) + assert not exists(join(f.name, path))
[Regression] Conda create environment fails on lock if root environment is not under user control This is "funny", but it seems that Conda managed to break this thing the second time in a month... #2681 was the previous one. This time, I get the following error: ``` $ conda create -n _root --yes --use-index-cache python=3 ... Traceback (most recent call last): File "/usr/local/miniconda/lib/python3.5/site-packages/conda/exceptions.py", line 442, in conda_exception_handler return_value = func(*args, **kwargs) File "/usr/local/miniconda/lib/python3.5/site-packages/conda/cli/main.py", line 144, in _main exit_code = args.func(args, p) File "/usr/local/miniconda/lib/python3.5/site-packages/conda/cli/main_create.py", line 66, in execute install(args, parser, 'create') File "/usr/local/miniconda/lib/python3.5/site-packages/conda/cli/install.py", line 399, in install execute_actions(actions, index, verbose=not args.quiet) File "/usr/local/miniconda/lib/python3.5/site-packages/conda/plan.py", line 640, in execute_actions inst.execute_instructions(plan, index, verbose) File "/usr/local/miniconda/lib/python3.5/site-packages/conda/instructions.py", line 132, in execute_instructions cmd(state, arg) File "/usr/local/miniconda/lib/python3.5/site-packages/conda/instructions.py", line 77, in LINK_CMD link(state['prefix'], dist, lt, index=state['index']) File "/usr/local/miniconda/lib/python3.5/site-packages/conda/install.py", line 1060, in link with DirectoryLock(prefix), FileLock(source_dir): File "/usr/local/miniconda/lib/python3.5/site-packages/conda/lock.py", line 86, in __enter__ touch(self.lock_file_path) File "/usr/local/miniconda/lib/python3.5/site-packages/conda/lock.py", line 48, in touch with open(file_name, 'a'): PermissionError: [Errno 13] Permission denied: '/usr/local/miniconda/pkgs/openssl-1.0.2h-1.pid34.conda_lock' ``` ``` Current conda install: platform : linux-64 conda version : 4.2.2 conda is private : False conda-env version : 2.6.0 conda-build version : 1.21.11+0.g5b44ab3.dirty python version : 3.5.2.final.0 requests version : 2.10.0 root environment : /usr/local/miniconda (read only) default environment : /usr/local/miniconda envs directories : /home/gitlab-ci/.conda/envs /usr/local/miniconda/envs package cache : /home/gitlab-ci/.conda/envs/.pkgs /usr/local/miniconda/pkgs channel URLs : defaults config file : None offline mode : False ``` /CC @kalefranz [Regression] Conda create environment fails on lock if root environment is not under user control This is "funny", but it seems that Conda managed to break this thing the second time in a month... #2681 was the previous one. This time, I get the following error: ``` $ conda create -n _root --yes --use-index-cache python=3 ... Traceback (most recent call last): File "/usr/local/miniconda/lib/python3.5/site-packages/conda/exceptions.py", line 442, in conda_exception_handler return_value = func(*args, **kwargs) File "/usr/local/miniconda/lib/python3.5/site-packages/conda/cli/main.py", line 144, in _main exit_code = args.func(args, p) File "/usr/local/miniconda/lib/python3.5/site-packages/conda/cli/main_create.py", line 66, in execute install(args, parser, 'create') File "/usr/local/miniconda/lib/python3.5/site-packages/conda/cli/install.py", line 399, in install execute_actions(actions, index, verbose=not args.quiet) File "/usr/local/miniconda/lib/python3.5/site-packages/conda/plan.py", line 640, in execute_actions inst.execute_instructions(plan, index, verbose) File "/usr/local/miniconda/lib/python3.5/site-packages/conda/instructions.py", line 132, in execute_instructions cmd(state, arg) File "/usr/local/miniconda/lib/python3.5/site-packages/conda/instructions.py", line 77, in LINK_CMD link(state['prefix'], dist, lt, index=state['index']) File "/usr/local/miniconda/lib/python3.5/site-packages/conda/install.py", line 1060, in link with DirectoryLock(prefix), FileLock(source_dir): File "/usr/local/miniconda/lib/python3.5/site-packages/conda/lock.py", line 86, in __enter__ touch(self.lock_file_path) File "/usr/local/miniconda/lib/python3.5/site-packages/conda/lock.py", line 48, in touch with open(file_name, 'a'): PermissionError: [Errno 13] Permission denied: '/usr/local/miniconda/pkgs/openssl-1.0.2h-1.pid34.conda_lock' ``` ``` Current conda install: platform : linux-64 conda version : 4.2.2 conda is private : False conda-env version : 2.6.0 conda-build version : 1.21.11+0.g5b44ab3.dirty python version : 3.5.2.final.0 requests version : 2.10.0 root environment : /usr/local/miniconda (read only) default environment : /usr/local/miniconda envs directories : /home/gitlab-ci/.conda/envs /usr/local/miniconda/envs package cache : /home/gitlab-ci/.conda/envs/.pkgs /usr/local/miniconda/pkgs channel URLs : defaults config file : None offline mode : False ``` /CC @kalefranz
@frol, First, **thanks for using canary**!! Do you know if a previous run of conda crashed, exited prematurely, or you sent a signal (i.e. ctrl-c) for it to exit early? Something made that lock file stick around... ``` conda clean --yes --lock ``` should take care of it for you. If it doesn't, that's definitely a bug. We're still working on tuning the locking in conda. It's (hopefully) better in 4.2 than it was in 4.1, and I know for sure it will be better yet in 4.3 with @HugoTian's work in #3197. @kalefranz The error message is quite informative: ``` File "/usr/local/miniconda/lib/python3.5/site-packages/conda/lock.py", line 48, in touch with open(file_name, 'a'): PermissionError: [Errno 13] Permission denied: '/usr/local/miniconda/pkgs/openssl-1.0.2h-1.pid34.conda_lock' ``` `/usr/local/miniconda` is a read-only directory, so there is no way `open(mode='a')` will ever succeed there. Ahh. This then is what I generally call "multi-user support." That is, install conda as one user, and use the executable as another but without write permissions to the conda executable's root environment. This used case has always been problematic, and it's s high priority to tackle and "get right" in the next several months. I think I just ran into this, created a env in a central install area, then from a user account tried to clone the environment, but it failed with something similar, doing `conda create --offline -n myrel --clone anarel-1.0.0 ` caused ``` ... WARNING conda.install:warn_failed_remove(178): Cannot remove, permission denied: /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/pkgs/psana-conda-1.0.0-py27_1/bin WARNING conda.install:warn_failed_remove(178): Cannot remove, permission denied: /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/pkgs/psana-conda-1.0.0-py27_1 Pruning fetched packages from the cache ... An unexpected error has occurred. Please consider posting the following information to the conda GitHub issue tracker at: https://github.com/conda/conda/issues Current conda install: platform : linux-64 conda version : 4.2.3 conda is private : False conda-env version : 2.6.0 conda-build version : 1.21.11+0.g5b44ab3.dirty python version : 2.7.12.final.0 requests version : 2.10.0 root environment : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7 (read only) default environment : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/anarel-1.0.0 envs directories : /reg/neh/home/davidsch/.conda/envs /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs package cache : /reg/neh/home/davidsch/.conda/envs/.pkgs /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/pkgs channel URLs : file:///reg/g/psdm/sw/conda/channels/system-rhel7 file:///reg/g/psdm/sw/conda/channels/external-rhel7 defaults file:///reg/g/psdm/sw/conda/channels/psana-rhel7 scikit-beam file:///reg/g/psdm/sw/conda/channels/testing-rhel7 config file : /reg/neh/home/davidsch/.condarc offline mode : False `$ /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/anarel-1.0.0/bin/conda create --offline -n myrel --clone anarel-1.0.0` Traceback (most recent call last): File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/exceptions.py", line 442, in conda_exception_handler return_value = func(*args, **kwargs) File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/cli/main.py", line 144, in _main exit_code = args.func(args, p) File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/cli/main_create.py", line 66, in execute install(args, parser, 'create') File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/cli/install.py", line 225, in install clone(args.clone, prefix, json=args.json, quiet=args.quiet, index_args=index_args) File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/cli/install.py", line 90, in clone index_args=index_args) File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/misc.py", line 388, in clone_env force_extract=False, index_args=index_args) File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/misc.py", line 188, in explicit execute_actions(actions, index=index, verbose=verbose) File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/plan.py", line 639, in execute_actions inst.execute_instructions(plan, index, verbose) File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/instructions.py", line 132, in execute_instructions cmd(state, arg) File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/instructions.py", line 66, in RM_FETCHED_CMD rm_fetched(arg) File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/install.py", line 801, in rm_fetched with FileLock(fname): File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/lock.py", line 86, in __enter__ touch(self.lock_file_path) File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/lock.py", line 48, in touch with open(file_name, 'a'): IOError: [Errno 13] Permission denied: u'/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/pkgs/openmpi-1.10.3-lsf_verbs_100.tar.bz2.pid17825.conda_lock' ``` @HugoTian As part of the 4.2 release, I think we need a lock cleanup step in the exception handler that just attempts to clean up any lock files for the current pid before it ultimately exits. Do you agree? @kalefranz , Moreover, I think some how we need to bring back the try/except block when we create the lock file. Something like : ``` try: touch(****) except OSError: pass ``` @frol, First, **thanks for using canary**!! Do you know if a previous run of conda crashed, exited prematurely, or you sent a signal (i.e. ctrl-c) for it to exit early? Something made that lock file stick around... ``` conda clean --yes --lock ``` should take care of it for you. If it doesn't, that's definitely a bug. We're still working on tuning the locking in conda. It's (hopefully) better in 4.2 than it was in 4.1, and I know for sure it will be better yet in 4.3 with @HugoTian's work in #3197. @kalefranz The error message is quite informative: ``` File "/usr/local/miniconda/lib/python3.5/site-packages/conda/lock.py", line 48, in touch with open(file_name, 'a'): PermissionError: [Errno 13] Permission denied: '/usr/local/miniconda/pkgs/openssl-1.0.2h-1.pid34.conda_lock' ``` `/usr/local/miniconda` is a read-only directory, so there is no way `open(mode='a')` will ever succeed there. Ahh. This then is what I generally call "multi-user support." That is, install conda as one user, and use the executable as another but without write permissions to the conda executable's root environment. This used case has always been problematic, and it's s high priority to tackle and "get right" in the next several months. I think I just ran into this, created a env in a central install area, then from a user account tried to clone the environment, but it failed with something similar, doing `conda create --offline -n myrel --clone anarel-1.0.0 ` caused ``` ... WARNING conda.install:warn_failed_remove(178): Cannot remove, permission denied: /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/pkgs/psana-conda-1.0.0-py27_1/bin WARNING conda.install:warn_failed_remove(178): Cannot remove, permission denied: /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/pkgs/psana-conda-1.0.0-py27_1 Pruning fetched packages from the cache ... An unexpected error has occurred. Please consider posting the following information to the conda GitHub issue tracker at: https://github.com/conda/conda/issues Current conda install: platform : linux-64 conda version : 4.2.3 conda is private : False conda-env version : 2.6.0 conda-build version : 1.21.11+0.g5b44ab3.dirty python version : 2.7.12.final.0 requests version : 2.10.0 root environment : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7 (read only) default environment : /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/anarel-1.0.0 envs directories : /reg/neh/home/davidsch/.conda/envs /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs package cache : /reg/neh/home/davidsch/.conda/envs/.pkgs /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/pkgs channel URLs : file:///reg/g/psdm/sw/conda/channels/system-rhel7 file:///reg/g/psdm/sw/conda/channels/external-rhel7 defaults file:///reg/g/psdm/sw/conda/channels/psana-rhel7 scikit-beam file:///reg/g/psdm/sw/conda/channels/testing-rhel7 config file : /reg/neh/home/davidsch/.condarc offline mode : False `$ /reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/envs/anarel-1.0.0/bin/conda create --offline -n myrel --clone anarel-1.0.0` Traceback (most recent call last): File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/exceptions.py", line 442, in conda_exception_handler return_value = func(*args, **kwargs) File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/cli/main.py", line 144, in _main exit_code = args.func(args, p) File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/cli/main_create.py", line 66, in execute install(args, parser, 'create') File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/cli/install.py", line 225, in install clone(args.clone, prefix, json=args.json, quiet=args.quiet, index_args=index_args) File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/cli/install.py", line 90, in clone index_args=index_args) File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/misc.py", line 388, in clone_env force_extract=False, index_args=index_args) File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/misc.py", line 188, in explicit execute_actions(actions, index=index, verbose=verbose) File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/plan.py", line 639, in execute_actions inst.execute_instructions(plan, index, verbose) File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/instructions.py", line 132, in execute_instructions cmd(state, arg) File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/instructions.py", line 66, in RM_FETCHED_CMD rm_fetched(arg) File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/install.py", line 801, in rm_fetched with FileLock(fname): File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/lock.py", line 86, in __enter__ touch(self.lock_file_path) File "/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/lib/python2.7/site-packages/conda/lock.py", line 48, in touch with open(file_name, 'a'): IOError: [Errno 13] Permission denied: u'/reg/g/psdm/sw/conda/inst/miniconda2-dev-rhel7/pkgs/openmpi-1.10.3-lsf_verbs_100.tar.bz2.pid17825.conda_lock' ``` @HugoTian As part of the 4.2 release, I think we need a lock cleanup step in the exception handler that just attempts to clean up any lock files for the current pid before it ultimately exits. Do you agree? @kalefranz , Moreover, I think some how we need to bring back the try/except block when we create the lock file. Something like : ``` try: touch(****) except OSError: pass ```
2016-08-20T01:22:54
-1.0
End of preview. Expand in Data Studio

No dataset card yet

Downloads last month
57