hash
stringlengths
40
40
repo
stringlengths
9
36
date
stringclasses
0 values
license
stringclasses
0 values
message
stringlengths
74
349
mods
listlengths
1
16
c27fd80c76b03bf187fa98029e2edb151644903a
burnysc2/python-sc2
null
null
Add different functions to print debug text Add: debug box/line/sphere functions Add: all debug requests are getting collected so the user can call send_debug() to batch request the debug draw.
[ { "change_type": "MODIFY", "diff": "@@ -29,6 +29,10 @@ class Client(Protocol):\n super().__init__(ws)\n self._player_id = None\n self._game_result = None\n+ self._debug_texts = list()\n+ self._debug_lines = list()\n+ self._debug_boxes = list()\n+ self._debug_spheres = list()\n \n @property\n def in_game(self):\n@@ -212,7 +216,50 @@ class Client(Protocol):\n else:\n await self.debug_text([texts], [positions], color)\n \n- async def debug_create_unit(self, unit_type, amount_of_units, position, owner_id):\n+ def debug_text_simple(self, text, color=None):\n+ self._debug_texts.append(to_debug_message(text, color))\n+\n+ def debug_text_2d(self, text, pos, color=None, size=8):\n+ self._debug_texts.append(to_debug_message(text, color, pos, False, size))\n+\n+ def debug_text_3d(self, text, pos, color=None, size=8):\n+ self._debug_texts.append(to_debug_message(text, color, pos, True, size))\n+\n+ def debug_line_out(self, p0, p1, color=None):\n+ self._debug_lines.append(debug_pb.DebugLine(\n+ line=debug_pb.Line(p0=to_debug_point(p0), p1=to_debug_point(p1)),\n+ color=to_debug_color(color)))\n+\n+ def debug_box_out(self, p_min, p_max, color=None):\n+ self._debug_boxes.append(debug_pb.DebugBox(\n+ min=to_debug_point(p_min),\n+ max=to_debug_point(p_max),\n+ color=to_debug_color(color)\n+ ))\n+\t\t\n+ def debug_sphere_out(self, p, r, color=None):\n+ self._debug_spheres.append(debug_pb.DebugSphere(\n+ p=to_debug_point(p),\n+ r=r,\n+ color=to_debug_color(color)\n+ ))\n+\n+ async def send_debug(self):\n+\t\tawait self._execute(debug=sc_pb.RequestDebug(\n+ debug=[debug_pb.DebugCommand(draw=debug_pb.DebugDraw(\n+ text=self._debug_texts if len(self._debug_texts) > 0 else None,\n+ lines=self._debug_lines if len(self._debug_lines) > 0 else None,\n+ boxes=self._debug_boxes if len(self._debug_boxes) > 0 else None,\n+ spheres=self._debug_spheres if len(self._debug_spheres) > 0 else None\n+ ))]\n+\n+ ))\n+ self._debug_texts.clear()\n+ self._debug_lines.clear()\n+ self._debug_boxes.clear()\n+ self._debug_spheres.clear()\n+\t\t\n+\tasync def debug_create_unit(self, unit_type, amount_of_units, position, owner_id):\n # example:\n # await self._client.debug_create_unit(MARINE, 1, self._game_info.map_center, 1)\n assert isinstance(unit_type, UnitTypeId)\n@@ -228,14 +275,44 @@ class Client(Protocol):\n quantity=(amount_of_units)\n ))]\n ))\n- async def debug_text_simple(self, texts):\n- if not isinstance(texts, list):\n- texts = [texts]\n- await self._execute(debug=sc_pb.RequestDebug(\n- debug=[debug_pb.DebugCommand(draw=debug_pb.DebugDraw(\n- text=[debug_pb.DebugText(\n- text=text,\n- color=debug_pb.Color(r=1, g=1, b=1),\n- ) for text in texts]\n- ))]\n- ))\n+\t\t\n+ \n+\n+def to_debug_color(color):\n+ if color is None:\n+ return debug_pb.Color(r=255, g=255, b=255)\n+ else:\n+ r = getattr(color, \"r\", getattr(color, \"x\", 255))\n+ g = getattr(color, \"g\", getattr(color, \"y\", 255))\n+ b = getattr(color, \"b\", getattr(color, \"z\", 255))\n+ if r + g + b <= 3:\n+ r *= 255\n+ g *= 255\n+ b *= 255\n+\n+ return debug_pb.Color(r=int(r), g=int(g), b=int(b))\n+\n+\n+def to_debug_point(point):\n+ return common_pb.Point(x=point.x, y=point.y, z=getattr(point, \"z\", 0))\n+\n+\n+def to_debug_message(text, color=None, pos=None, is3d=False, size=8):\n+ text = text\n+ color = to_debug_color(color)\n+ size = size\n+ pt3d = None\n+ virtual_pos = None\n+\n+ if pos is not None:\n+ if is3d:\n+ pt3d = to_debug_point(pos)\n+ else:\n+ virtual_pos = to_debug_point(pos)\n+ return debug_pb.DebugText(\n+ color=color,\n+ text=text,\n+ virtual_pos=virtual_pos,\n+ world_pos=pt3d,\n+ size=size\n+ )\n", "new_path": "sc2/client.py", "old_path": "sc2/client.py" } ]
7b2e711c738b864e4e0b7c35257225fb23cfa15d
rqlite/rqlite
null
null
Remove constraint check It's too clever, and causing test practicality issues.
[ { "change_type": "MODIFY", "diff": "@@ -136,7 +136,7 @@ class Node(object):\n r.raise_for_status()\n return r.json()\n \n- def is_leader(self, constraint_check=True):\n+ def is_leader(self):\n '''\n is_leader returns whether this node is the cluster leader\n It also performs a check, to ensure the node nevers gives out\n@@ -144,28 +144,16 @@ class Node(object):\n '''\n \n try:\n- isLeaderRaft = self.status()['store']['raft']['state'] == 'Leader'\n- isLeaderNodes = self.nodes()[self.node_id]['leader'] is True\n+ return self.status()['store']['raft']['state'] == 'Leader'\n except requests.exceptions.ConnectionError:\n return False\n \n- if (isLeaderRaft != isLeaderNodes) and constraint_check:\n- raise AssertionError(\"conflicting states reported for leadership (raft: %s, nodes: %s)\"\n- % (isLeaderRaft, isLeaderNodes))\n- return isLeaderNodes\n-\n def is_follower(self):\n try:\n- isFollowerRaft = self.status()['store']['raft']['state'] == 'Follower'\n- isFollowersNodes = self.nodes()[self.node_id]['leader'] is False\n+ return self.status()['store']['raft']['state'] == 'Follower'\n except requests.exceptions.ConnectionError:\n return False\n \n- if isFollowerRaft != isFollowersNodes:\n- raise AssertionError(\"conflicting states reported for followership (raft: %s, nodes: %s)\"\n- % (isFollowerRaft, isFollowersNodes))\n- return isFollowersNodes\n-\n def wait_for_leader(self, timeout=TIMEOUT):\n lr = None\n t = 0\n@@ -289,6 +277,7 @@ class Node(object):\n \n def redirect_addr(self):\n r = requests.post(self._execute_url(redirect=True), data=json.dumps(['nonsense']), allow_redirects=False)\n+ r.raise_for_status()\n if r.status_code == 301:\n return \"%s://%s\" % (urlparse(r.headers['Location']).scheme, urlparse(r.headers['Location']).netloc)\n \n@@ -333,7 +322,7 @@ def deprovision_node(node):\n class Cluster(object):\n def __init__(self, nodes):\n self.nodes = nodes\n- def wait_for_leader(self, node_exc=None, timeout=TIMEOUT, constraint_check=True):\n+ def wait_for_leader(self, node_exc=None, timeout=TIMEOUT):\n t = 0\n while True:\n if t > timeout:\n@@ -341,7 +330,7 @@ class Cluster(object):\n for n in self.nodes:\n if node_exc is not None and n == node_exc:\n continue\n- if n.is_leader(constraint_check):\n+ if n.is_leader():\n return n\n time.sleep(1)\n t+=1\n@@ -682,10 +671,9 @@ class TestEndToEndNonVoterFollowsLeader(unittest.TestCase):\n j = n.query('SELECT * FROM foo')\n self.assertEqual(str(j), \"{u'results': [{u'values': [[1, u'fiona']], u'types': [u'integer', u'text'], u'columns': [u'id', u'name']}]}\")\n \n- # Kill leader, and then make more changes. Don't perform leader-constraint checks\n- # since the cluster is changing right now.\n- n0 = self.cluster.wait_for_leader(constraint_check=False).stop()\n- n1 = self.cluster.wait_for_leader(node_exc=n0, constraint_check=False)\n+ # Kill leader, and then make more changes.\n+ n0 = self.cluster.wait_for_leader().stop()\n+ n1 = self.cluster.wait_for_leader(node_exc=n0)\n n1.wait_for_all_applied()\n j = n1.query('SELECT * FROM foo')\n self.assertEqual(str(j), \"{u'results': [{u'values': [[1, u'fiona']], u'types': [u'integer', u'text'], u'columns': [u'id', u'name']}]}\")\n", "new_path": "system_test/full_system_test.py", "old_path": "system_test/full_system_test.py" } ]
dad51485282b6e05c4993b0733bd54aa3c0bacef
cupy/cupy
null
null
Use "import numpy as np" in the array_api submodule This avoids importing everything inside the individual functions, but still is preferred over importing the functions used explicitly, as most of them clash with the wrapper function names.
[ { "change_type": "MODIFY", "diff": "@@ -1,76 +1,67 @@\n+import numpy as np\n+\n def arange(start, /, *, stop=None, step=1, dtype=None, device=None):\n- from .. import arange\n if device is not None:\n # Note: Device support is not yet implemented on ndarray\n raise NotImplementedError(\"Device support is not yet implemented\")\n- return arange(start, stop=stop, step=step, dtype=dtype)\n+ return np.arange(start, stop=stop, step=step, dtype=dtype)\n \n def empty(shape, /, *, dtype=None, device=None):\n- from .. import empty\n if device is not None:\n # Note: Device support is not yet implemented on ndarray\n raise NotImplementedError(\"Device support is not yet implemented\")\n- return empty(shape, dtype=dtype)\n+ return np.empty(shape, dtype=dtype)\n \n def empty_like(x, /, *, dtype=None, device=None):\n- from .. import empty_like\n if device is not None:\n # Note: Device support is not yet implemented on ndarray\n raise NotImplementedError(\"Device support is not yet implemented\")\n- return empty_like(x, dtype=dtype)\n+ return np.empty_like(x, dtype=dtype)\n \n def eye(N, /, *, M=None, k=0, dtype=None, device=None):\n- from .. import eye\n if device is not None:\n # Note: Device support is not yet implemented on ndarray\n raise NotImplementedError(\"Device support is not yet implemented\")\n- return eye(N, M=M, k=k, dtype=dtype)\n+ return np.eye(N, M=M, k=k, dtype=dtype)\n \n def full(shape, fill_value, /, *, dtype=None, device=None):\n- from .. import full\n if device is not None:\n # Note: Device support is not yet implemented on ndarray\n raise NotImplementedError(\"Device support is not yet implemented\")\n- return full(shape, fill_value, dtype=dtype)\n+ return np.full(shape, fill_value, dtype=dtype)\n \n def full_like(x, fill_value, /, *, dtype=None, device=None):\n- from .. import full_like\n if device is not None:\n # Note: Device support is not yet implemented on ndarray\n raise NotImplementedError(\"Device support is not yet implemented\")\n- return full_like(x, fill_value, dtype=dtype)\n+ return np.full_like(x, fill_value, dtype=dtype)\n \n def linspace(start, stop, num, /, *, dtype=None, device=None, endpoint=True):\n- from .. import linspace\n if device is not None:\n # Note: Device support is not yet implemented on ndarray\n raise NotImplementedError(\"Device support is not yet implemented\")\n- return linspace(start, stop, num, dtype=dtype, endpoint=endpoint)\n+ return np.linspace(start, stop, num, dtype=dtype, endpoint=endpoint)\n \n def ones(shape, /, *, dtype=None, device=None):\n- from .. import ones\n if device is not None:\n # Note: Device support is not yet implemented on ndarray\n raise NotImplementedError(\"Device support is not yet implemented\")\n- return ones(shape, dtype=dtype)\n+ return np.ones(shape, dtype=dtype)\n \n def ones_like(x, /, *, dtype=None, device=None):\n- from .. import ones_like\n if device is not None:\n # Note: Device support is not yet implemented on ndarray\n raise NotImplementedError(\"Device support is not yet implemented\")\n- return ones_like(x, dtype=dtype)\n+ return np.ones_like(x, dtype=dtype)\n \n def zeros(shape, /, *, dtype=None, device=None):\n- from .. import zeros\n if device is not None:\n # Note: Device support is not yet implemented on ndarray\n raise NotImplementedError(\"Device support is not yet implemented\")\n- return zeros(shape, dtype=dtype)\n+ return np.zeros(shape, dtype=dtype)\n \n def zeros_like(x, /, *, dtype=None, device=None):\n- from .. import zeros_like\n if device is not None:\n # Note: Device support is not yet implemented on ndarray\n raise NotImplementedError(\"Device support is not yet implemented\")\n- return zeros_like(x, dtype=dtype)\n+ return np.zeros_like(x, dtype=dtype)\n", "new_path": "numpy/_array_api/_creation_functions.py", "old_path": "numpy/_array_api/_creation_functions.py" }, { "change_type": "MODIFY", "diff": "@@ -1,230 +1,177 @@\n+import numpy as np\n+\n def abs(x, /):\n- from .. import abs\n- return abs(x)\n+ return np.abs(x)\n \n def acos(x, /):\n # Note: the function name is different here\n- from .. import arccos\n- return arccos(x)\n+ return np.arccos(x)\n \n def acosh(x, /):\n # Note: the function name is different here\n- from .. import arccosh\n- return arccosh(x)\n+ return np.arccosh(x)\n \n def add(x1, x2, /):\n- from .. import add\n- return add(x1, x2)\n+ return np.add(x1, x2)\n \n def asin(x, /):\n # Note: the function name is different here\n- from .. import arcsin\n- return arcsin(x)\n+ return np.arcsin(x)\n \n def asinh(x, /):\n # Note: the function name is different here\n- from .. import arcsinh\n- return arcsinh(x)\n+ return np.arcsinh(x)\n \n def atan(x, /):\n # Note: the function name is different here\n- from .. import arctan\n- return arctan(x)\n+ return np.arctan(x)\n \n def atan2(x1, x2, /):\n # Note: the function name is different here\n- from .. import arctan2\n- return arctan2(x1, x2)\n+ return np.arctan2(x1, x2)\n \n def atanh(x, /):\n # Note: the function name is different here\n- from .. import arctanh\n- return arctanh(x)\n+ return np.arctanh(x)\n \n def bitwise_and(x1, x2, /):\n- from .. import bitwise_and\n- return bitwise_and(x1, x2)\n+ return np.bitwise_and(x1, x2)\n \n def bitwise_left_shift(x1, x2, /):\n # Note: the function name is different here\n- from .. import left_shift\n- return left_shift(x1, x2)\n+ return np.left_shift(x1, x2)\n \n def bitwise_invert(x, /):\n # Note: the function name is different here\n- from .. import invert\n- return invert(x)\n+ return np.invert(x)\n \n def bitwise_or(x1, x2, /):\n- from .. import bitwise_or\n- return bitwise_or(x1, x2)\n+ return np.bitwise_or(x1, x2)\n \n def bitwise_right_shift(x1, x2, /):\n # Note: the function name is different here\n- from .. import right_shift\n- return right_shift(x1, x2)\n+ return np.right_shift(x1, x2)\n \n def bitwise_xor(x1, x2, /):\n- from .. import bitwise_xor\n- return bitwise_xor(x1, x2)\n+ return np.bitwise_xor(x1, x2)\n \n def ceil(x, /):\n- from .. import ceil\n- return ceil(x)\n+ return np.ceil(x)\n \n def cos(x, /):\n- from .. import cos\n- return cos(x)\n+ return np.cos(x)\n \n def cosh(x, /):\n- from .. import cosh\n- return cosh(x)\n+ return np.cosh(x)\n \n def divide(x1, x2, /):\n- from .. import divide\n- return divide(x1, x2)\n+ return np.divide(x1, x2)\n \n def equal(x1, x2, /):\n- from .. import equal\n- return equal(x1, x2)\n+ return np.equal(x1, x2)\n \n def exp(x, /):\n- from .. import exp\n- return exp(x)\n+ return np.exp(x)\n \n def expm1(x, /):\n- from .. import expm1\n- return expm1(x)\n+ return np.expm1(x)\n \n def floor(x, /):\n- from .. import floor\n- return floor(x)\n+ return np.floor(x)\n \n def floor_divide(x1, x2, /):\n- from .. import floor_divide\n- return floor_divide(x1, x2)\n+ return np.floor_divide(x1, x2)\n \n def greater(x1, x2, /):\n- from .. import greater\n- return greater(x1, x2)\n+ return np.greater(x1, x2)\n \n def greater_equal(x1, x2, /):\n- from .. import greater_equal\n- return greater_equal(x1, x2)\n+ return np.greater_equal(x1, x2)\n \n def isfinite(x, /):\n- from .. import isfinite\n- return isfinite(x)\n+ return np.isfinite(x)\n \n def isinf(x, /):\n- from .. import isinf\n- return isinf(x)\n+ return np.isinf(x)\n \n def isnan(x, /):\n- from .. import isnan\n- return isnan(x)\n+ return np.isnan(x)\n \n def less(x1, x2, /):\n- from .. import less\n- return less(x1, x2)\n+ return np.less(x1, x2)\n \n def less_equal(x1, x2, /):\n- from .. import less_equal\n- return less_equal(x1, x2)\n+ return np.less_equal(x1, x2)\n \n def log(x, /):\n- from .. import log\n- return log(x)\n+ return np.log(x)\n \n def log1p(x, /):\n- from .. import log1p\n- return log1p(x)\n+ return np.log1p(x)\n \n def log2(x, /):\n- from .. import log2\n- return log2(x)\n+ return np.log2(x)\n \n def log10(x, /):\n- from .. import log10\n- return log10(x)\n+ return np.log10(x)\n \n def logical_and(x1, x2, /):\n- from .. import logical_and\n- return logical_and(x1, x2)\n+ return np.logical_and(x1, x2)\n \n def logical_not(x, /):\n- from .. import logical_not\n- return logical_not(x)\n+ return np.logical_not(x)\n \n def logical_or(x1, x2, /):\n- from .. import logical_or\n- return logical_or(x1, x2)\n+ return np.logical_or(x1, x2)\n \n def logical_xor(x1, x2, /):\n- from .. import logical_xor\n- return logical_xor(x1, x2)\n+ return np.logical_xor(x1, x2)\n \n def multiply(x1, x2, /):\n- from .. import multiply\n- return multiply(x1, x2)\n+ return np.multiply(x1, x2)\n \n def negative(x, /):\n- from .. import negative\n- return negative(x)\n+ return np.negative(x)\n \n def not_equal(x1, x2, /):\n- from .. import not_equal\n- return not_equal(x1, x2)\n+ return np.not_equal(x1, x2)\n \n def positive(x, /):\n- from .. import positive\n- return positive(x)\n+ return np.positive(x)\n \n def pow(x1, x2, /):\n # Note: the function name is different here\n- from .. import power\n- return power(x1, x2)\n+ return np.power(x1, x2)\n \n def remainder(x1, x2, /):\n- from .. import remainder\n- return remainder(x1, x2)\n+ return np.remainder(x1, x2)\n \n def round(x, /):\n- from .. import round\n- return round(x)\n+ return np.round(x)\n \n def sign(x, /):\n- from .. import sign\n- return sign(x)\n+ return np.sign(x)\n \n def sin(x, /):\n- from .. import sin\n- return sin(x)\n+ return np.sin(x)\n \n def sinh(x, /):\n- from .. import sinh\n- return sinh(x)\n+ return np.sinh(x)\n \n def square(x, /):\n- from .. import square\n- return square(x)\n+ return np.square(x)\n \n def sqrt(x, /):\n- from .. import sqrt\n- return sqrt(x)\n+ return np.sqrt(x)\n \n def subtract(x1, x2, /):\n- from .. import subtract\n- return subtract(x1, x2)\n+ return np.subtract(x1, x2)\n \n def tan(x, /):\n- from .. import tan\n- return tan(x)\n+ return np.tan(x)\n \n def tanh(x, /):\n- from .. import tanh\n- return tanh(x)\n+ return np.tanh(x)\n \n def trunc(x, /):\n- from .. import trunc\n- return trunc(x)\n+ return np.trunc(x)\n", "new_path": "numpy/_array_api/_elementwise_functions.py", "old_path": "numpy/_array_api/_elementwise_functions.py" }, { "change_type": "MODIFY", "diff": "@@ -1,93 +1,73 @@\n+import numpy as np\n+\n # def cholesky():\n-# from .. import cholesky\n-# return cholesky()\n+# return np.cholesky()\n \n def cross(x1, x2, /, *, axis=-1):\n- from .. import cross\n- return cross(x1, x2, axis=axis)\n+ return np.cross(x1, x2, axis=axis)\n \n def det(x, /):\n # Note: this function is being imported from a nondefault namespace\n- from ..linalg import det\n- return det(x)\n+ return np.det(x)\n \n def diagonal(x, /, *, axis1=0, axis2=1, offset=0):\n- from .. import diagonal\n- return diagonal(x, axis1=axis1, axis2=axis2, offset=offset)\n+ return np.diagonal(x, axis1=axis1, axis2=axis2, offset=offset)\n \n # def dot():\n-# from .. import dot\n-# return dot()\n+# return np.dot()\n #\n # def eig():\n-# from .. import eig\n-# return eig()\n+# return np.eig()\n #\n # def eigvalsh():\n-# from .. import eigvalsh\n-# return eigvalsh()\n+# return np.eigvalsh()\n #\n # def einsum():\n-# from .. import einsum\n-# return einsum()\n+# return np.einsum()\n \n def inv(x):\n # Note: this function is being imported from a nondefault namespace\n- from ..linalg import inv\n- return inv(x)\n+ return np.inv(x)\n \n # def lstsq():\n-# from .. import lstsq\n-# return lstsq()\n+# return np.lstsq()\n #\n # def matmul():\n-# from .. import matmul\n-# return matmul()\n+# return np.matmul()\n #\n # def matrix_power():\n-# from .. import matrix_power\n-# return matrix_power()\n+# return np.matrix_power()\n #\n # def matrix_rank():\n-# from .. import matrix_rank\n-# return matrix_rank()\n+# return np.matrix_rank()\n \n def norm(x, /, *, axis=None, keepdims=False, ord=None):\n # Note: this function is being imported from a nondefault namespace\n- from ..linalg import norm\n # Note: this is different from the default behavior\n if axis == None and x.ndim > 2:\n x = x.flatten()\n- return norm(x, axis=axis, keepdims=keepdims, ord=ord)\n+ return np.norm(x, axis=axis, keepdims=keepdims, ord=ord)\n \n def outer(x1, x2, /):\n- from .. import outer\n- return outer(x1, x2)\n+ return np.outer(x1, x2)\n \n # def pinv():\n-# from .. import pinv\n-# return pinv()\n+# return np.pinv()\n #\n # def qr():\n-# from .. import qr\n-# return qr()\n+# return np.qr()\n #\n # def slogdet():\n-# from .. import slogdet\n-# return slogdet()\n+# return np.slogdet()\n #\n # def solve():\n-# from .. import solve\n-# return solve()\n+# return np.solve()\n #\n # def svd():\n-# from .. import svd\n-# return svd()\n+# return np.svd()\n \n def trace(x, /, *, axis1=0, axis2=1, offset=0):\n- from .. import trace\n- return trace(x, axis1=axis1, axis2=axis2, offset=offset)\n+ return np.trace(x, axis1=axis1, axis2=axis2, offset=offset)\n \n def transpose(x, /, *, axes=None):\n- from .. import transpose\n- return transpose(x, axes=axes)\n+ return np.transpose(x, axes=axes)\n", "new_path": "numpy/_array_api/_linear_algebra_functions.py", "old_path": "numpy/_array_api/_linear_algebra_functions.py" }, { "change_type": "MODIFY", "diff": "@@ -1,28 +1,23 @@\n+import numpy as np\n+\n def concat(arrays, /, *, axis=0):\n # Note: the function name is different here\n- from .. import concatenate\n- return concatenate(arrays, axis=axis)\n+ return np.concatenate(arrays, axis=axis)\n \n def expand_dims(x, axis, /):\n- from .. import expand_dims\n- return expand_dims(x, axis)\n+ return np.expand_dims(x, axis)\n \n def flip(x, /, *, axis=None):\n- from .. import flip\n- return flip(x, axis=axis)\n+ return np.flip(x, axis=axis)\n \n def reshape(x, shape, /):\n- from .. import reshape\n- return reshape(x, shape)\n+ return np.reshape(x, shape)\n \n def roll(x, shift, /, *, axis=None):\n- from .. import roll\n- return roll(x, shift, axis=axis)\n+ return np.roll(x, shift, axis=axis)\n \n def squeeze(x, /, *, axis=None):\n- from .. import squeeze\n- return squeeze(x, axis=axis)\n+ return np.squeeze(x, axis=axis)\n \n def stack(arrays, /, *, axis=0):\n- from .. import stack\n- return stack(arrays, axis=axis)\n+ return np.stack(arrays, axis=axis)\n", "new_path": "numpy/_array_api/_manipulation_functions.py", "old_path": "numpy/_array_api/_manipulation_functions.py" }, { "change_type": "MODIFY", "diff": "@@ -1,15 +1,13 @@\n+import numpy as np\n+\n def argmax(x, /, *, axis=None, keepdims=False):\n- from .. import argmax\n- return argmax(x, axis=axis, keepdims=keepdims)\n+ return np.argmax(x, axis=axis, keepdims=keepdims)\n \n def argmin(x, /, *, axis=None, keepdims=False):\n- from .. import argmin\n- return argmin(x, axis=axis, keepdims=keepdims)\n+ return np.argmin(x, axis=axis, keepdims=keepdims)\n \n def nonzero(x, /):\n- from .. import nonzero\n- return nonzero(x)\n+ return np.nonzero(x)\n \n def where(condition, x1, x2, /):\n- from .. import where\n- return where(condition, x1, x2)\n+ return np.where(condition, x1, x2)\n", "new_path": "numpy/_array_api/_searching_functions.py", "old_path": "numpy/_array_api/_searching_functions.py" }, { "change_type": "MODIFY", "diff": "@@ -1,3 +1,4 @@\n+import numpy as np\n+\n def unique(x, /, *, return_counts=False, return_index=False, return_inverse=False, sorted=True):\n- from .. import unique\n- return unique(x, return_counts=return_counts, return_index=return_index, return_inverse=return_inverse, sorted=sorted)\n+ return np.unique(x, return_counts=return_counts, return_index=return_index, return_inverse=return_inverse, sorted=sorted)\n", "new_path": "numpy/_array_api/_set_functions.py", "old_path": "numpy/_array_api/_set_functions.py" }, { "change_type": "MODIFY", "diff": "@@ -1,19 +1,17 @@\n+import numpy as np\n+\n def argsort(x, /, *, axis=-1, descending=False, stable=True):\n- from .. import argsort\n- from .. import flip\n # Note: this keyword argument is different, and the default is different.\n kind = 'stable' if stable else 'quicksort'\n- res = argsort(x, axis=axis, kind=kind)\n+ res = np.argsort(x, axis=axis, kind=kind)\n if descending:\n- res = flip(res, axis=axis)\n+ res = np.flip(res, axis=axis)\n return res\n \n def sort(x, /, *, axis=-1, descending=False, stable=True):\n- from .. import sort\n- from .. import flip\n # Note: this keyword argument is different, and the default is different.\n kind = 'stable' if stable else 'quicksort'\n- res = sort(x, axis=axis, kind=kind)\n+ res = np.sort(x, axis=axis, kind=kind)\n if descending:\n- res = flip(res, axis=axis)\n+ res = np.flip(res, axis=axis)\n return res\n", "new_path": "numpy/_array_api/_sorting_functions.py", "old_path": "numpy/_array_api/_sorting_functions.py" }, { "change_type": "MODIFY", "diff": "@@ -1,29 +1,24 @@\n+import numpy as np\n+\n def max(x, /, *, axis=None, keepdims=False):\n- from .. import max\n- return max(x, axis=axis, keepdims=keepdims)\n+ return np.max(x, axis=axis, keepdims=keepdims)\n \n def mean(x, /, *, axis=None, keepdims=False):\n- from .. import mean\n- return mean(x, axis=axis, keepdims=keepdims)\n+ return np.mean(x, axis=axis, keepdims=keepdims)\n \n def min(x, /, *, axis=None, keepdims=False):\n- from .. import min\n- return min(x, axis=axis, keepdims=keepdims)\n+ return np.min(x, axis=axis, keepdims=keepdims)\n \n def prod(x, /, *, axis=None, keepdims=False):\n- from .. import prod\n- return prod(x, axis=axis, keepdims=keepdims)\n+ return np.prod(x, axis=axis, keepdims=keepdims)\n \n def std(x, /, *, axis=None, correction=0.0, keepdims=False):\n- from .. import std\n # Note: the keyword argument correction is different here\n- return std(x, axis=axis, ddof=correction, keepdims=keepdims)\n+ return np.std(x, axis=axis, ddof=correction, keepdims=keepdims)\n \n def sum(x, /, *, axis=None, keepdims=False):\n- from .. import sum\n- return sum(x, axis=axis, keepdims=keepdims)\n+ return np.sum(x, axis=axis, keepdims=keepdims)\n \n def var(x, /, *, axis=None, correction=0.0, keepdims=False):\n- from .. import var\n # Note: the keyword argument correction is different here\n- return var(x, axis=axis, ddof=correction, keepdims=keepdims)\n+ return np.var(x, axis=axis, ddof=correction, keepdims=keepdims)\n", "new_path": "numpy/_array_api/_statistical_functions.py", "old_path": "numpy/_array_api/_statistical_functions.py" }, { "change_type": "MODIFY", "diff": "@@ -1,7 +1,7 @@\n+import numpy as np\n+\n def all(x, /, *, axis=None, keepdims=False):\n- from .. import all\n- return all(x, axis=axis, keepdims=keepdims)\n+ return np.all(x, axis=axis, keepdims=keepdims)\n \n def any(x, /, *, axis=None, keepdims=False):\n- from .. import any\n- return any(x, axis=axis, keepdims=keepdims)\n+ return np.any(x, axis=axis, keepdims=keepdims)\n", "new_path": "numpy/_array_api/_utility_functions.py", "old_path": "numpy/_array_api/_utility_functions.py" } ]
0da4b09082036d2c62a638d751b660a75e543bc9
cupy/cupy
null
null
Add an explanatory docstring to _array_api/__init__.py This is mostly aimed at any potential reviewers of the module for now.
[ { "change_type": "MODIFY", "diff": "@@ -1,3 +1,69 @@\n+\"\"\"\n+A NumPy sub-namespace that conforms to the Python array API standard.\n+\n+This is a proof-of-concept namespace that wraps the corresponding NumPy\n+functions to give a conforming implementation of the Python array API standard\n+(https://data-apis.github.io/array-api/latest/). The standard is currently in\n+an RFC phase and comments on it are both welcome and encouraged. Comments\n+should be made either at https://github.com/data-apis/array-api or at\n+https://github.com/data-apis/consortium-feedback/discussions.\n+\n+This submodule will be accompanied with a NEP (not yet written) proposing its\n+inclusion in NumPy.\n+\n+NumPy already follows the proposed spec for the most part, so this module\n+serves mostly as a thin wrapper around it. However, NumPy also implements a\n+lot of behavior that is not included in the spec, so this serves as a\n+restricted subset of the API. Only those functions that are part of the spec\n+are included in this namespace, and all functions are given with the exact\n+signature given in the spec, including the use of position-only arguments, and\n+omitting any extra keyword arguments implemented by NumPy but not part of the\n+spec. Note that the array object itself is unchanged, as implementing a\n+restricted subclass of ndarray seems unnecessarily complex for the purposes of\n+this namespace, so the API of array methods and other behaviors of the array\n+object will include things that are not part of the spec.\n+\n+The spec is designed as a \"minimal API subset\" and explicitly allows libraries\n+to include behaviors not specified by it. But users of this module that intend\n+to write portable code should be aware that only those behaviors that are\n+listed in the spec are guaranteed to be implemented across libraries.\n+\n+A few notes about the current state of this submodule:\n+\n+- There is a test suite that tests modules against the array API standard at\n+ https://github.com/data-apis/array-api-tests. The test suite is still a work\n+ in progress, but the existing tests pass on this module, with a few\n+ exceptions:\n+\n+ - Device support is not yet implemented in NumPy\n+ (https://data-apis.github.io/array-api/latest/design_topics/device_support.html).\n+ As a result, the `device` attribute of the array object is missing, and\n+ array creation functions that take the `device` keyword argument will fail\n+ with NotImplementedError.\n+\n+ - DLPack support (see https://github.com/data-apis/array-api/pull/106) is\n+ not included here, as it requires a full implementation in NumPy proper\n+ first.\n+\n+ - np.argmin and np.argmax do not implement the keepdims keyword argument.\n+\n+ - Some linear algebra functions in the spec are still a work in progress (to\n+ be added soon). These will be updated once the spec is.\n+\n+ - Some tests in the test suite are still not fully correct in that they test\n+ all datatypes whereas certain functions are only defined for a subset of\n+ datatypes.\n+\n+ The test suite is yet complete, and even the tests that exist are not\n+ guaranteed to give a comprehensive coverage of the spec. Therefore, those\n+ reviewing this submodule should refer to the standard documents themselves.\n+\n+- All places where the implementations in this submodule are known to deviate\n+ from their corresponding functions in NumPy are marked with \"# Note\"\n+ comments. Reviewers should make note of these comments.\n+\n+\"\"\"\n+\n __all__ = []\n \n from ._constants import e, inf, nan, pi\n", "new_path": "numpy/_array_api/__init__.py", "old_path": "numpy/_array_api/__init__.py" } ]
76eb888612183768d9e1b0c818fcf5416c5f28c7
cupy/cupy
null
null
Use _implementation on all functions that have it in the array API submodule That way they only work on actual ndarray inputs, not array-like, which is more inline with the spec.
[ { "change_type": "MODIFY", "diff": "@@ -35,7 +35,7 @@ def empty_like(x: array, /, *, dtype: Optional[dtype] = None, device: Optional[d\n if device is not None:\n # Note: Device support is not yet implemented on ndarray\n raise NotImplementedError(\"Device support is not yet implemented\")\n- return np.empty_like(x, dtype=dtype)\n+ return np.empty_like._implementation(x, dtype=dtype)\n \n def eye(N: int, /, *, M: Optional[int] = None, k: Optional[int] = 0, dtype: Optional[dtype] = None, device: Optional[device] = None) -> array:\n \"\"\"\n@@ -68,7 +68,7 @@ def full_like(x: array, fill_value: Union[int, float], /, *, dtype: Optional[dty\n if device is not None:\n # Note: Device support is not yet implemented on ndarray\n raise NotImplementedError(\"Device support is not yet implemented\")\n- return np.full_like(x, fill_value, dtype=dtype)\n+ return np.full_like._implementation(x, fill_value, dtype=dtype)\n \n def linspace(start: Union[int, float], stop: Union[int, float], num: int, /, *, dtype: Optional[dtype] = None, device: Optional[device] = None, endpoint: bool = True) -> array:\n \"\"\"\n@@ -101,7 +101,7 @@ def ones_like(x: array, /, *, dtype: Optional[dtype] = None, device: Optional[de\n if device is not None:\n # Note: Device support is not yet implemented on ndarray\n raise NotImplementedError(\"Device support is not yet implemented\")\n- return np.ones_like(x, dtype=dtype)\n+ return np.ones_like._implementation(x, dtype=dtype)\n \n def zeros(shape: Union[int, Tuple[int, ...]], /, *, dtype: Optional[dtype] = None, device: Optional[device] = None) -> array:\n \"\"\"\n@@ -123,4 +123,4 @@ def zeros_like(x: array, /, *, dtype: Optional[dtype] = None, device: Optional[d\n if device is not None:\n # Note: Device support is not yet implemented on ndarray\n raise NotImplementedError(\"Device support is not yet implemented\")\n- return np.zeros_like(x, dtype=dtype)\n+ return np.zeros_like._implementation(x, dtype=dtype)\n", "new_path": "numpy/_array_api/_creation_functions.py", "old_path": "numpy/_array_api/_creation_functions.py" }, { "change_type": "MODIFY", "diff": "@@ -381,7 +381,7 @@ def round(x: array, /) -> array:\n \n See its docstring for more information.\n \"\"\"\n- return np.round(x)\n+ return np.round._implementation(x)\n \n def sign(x: array, /) -> array:\n \"\"\"\n", "new_path": "numpy/_array_api/_elementwise_functions.py", "old_path": "numpy/_array_api/_elementwise_functions.py" }, { "change_type": "MODIFY", "diff": "@@ -18,7 +18,7 @@ def cross(x1: array, x2: array, /, *, axis: int = -1) -> array:\n \n See its docstring for more information.\n \"\"\"\n- return np.cross(x1, x2, axis=axis)\n+ return np.cross._implementation(x1, x2, axis=axis)\n \n def det(x: array, /) -> array:\n \"\"\"\n@@ -35,7 +35,7 @@ def diagonal(x: array, /, *, axis1: int = 0, axis2: int = 1, offset: int = 0) ->\n \n See its docstring for more information.\n \"\"\"\n- return np.diagonal(x, axis1=axis1, axis2=axis2, offset=offset)\n+ return np.diagonal._implementation(x, axis1=axis1, axis2=axis2, offset=offset)\n \n # def dot():\n # \"\"\"\n@@ -128,7 +128,7 @@ def outer(x1: array, x2: array, /) -> array:\n \n See its docstring for more information.\n \"\"\"\n- return np.outer(x1, x2)\n+ return np.outer._implementation(x1, x2)\n \n # def pinv():\n # \"\"\"\n@@ -176,7 +176,7 @@ def trace(x: array, /, *, axis1: int = 0, axis2: int = 1, offset: int = 0) -> ar\n \n See its docstring for more information.\n \"\"\"\n- return np.asarray(np.trace(x, axis1=axis1, axis2=axis2, offset=offset))\n+ return np.asarray(np.trace._implementation(x, axis1=axis1, axis2=axis2, offset=offset))\n \n def transpose(x: array, /, *, axes: Optional[Tuple[int, ...]] = None) -> array:\n \"\"\"\n@@ -184,4 +184,4 @@ def transpose(x: array, /, *, axes: Optional[Tuple[int, ...]] = None) -> array:\n \n See its docstring for more information.\n \"\"\"\n- return np.transpose(x, axes=axes)\n+ return np.transpose._implementation(x, axes=axes)\n", "new_path": "numpy/_array_api/_linear_algebra_functions.py", "old_path": "numpy/_array_api/_linear_algebra_functions.py" }, { "change_type": "MODIFY", "diff": "@@ -19,7 +19,7 @@ def expand_dims(x: array, axis: int, /) -> array:\n \n See its docstring for more information.\n \"\"\"\n- return np.expand_dims(x, axis)\n+ return np.expand_dims._implementation(x, axis)\n \n def flip(x: array, /, *, axis: Optional[Union[int, Tuple[int, ...]]] = None) -> array:\n \"\"\"\n@@ -27,7 +27,7 @@ def flip(x: array, /, *, axis: Optional[Union[int, Tuple[int, ...]]] = None) ->\n \n See its docstring for more information.\n \"\"\"\n- return np.flip(x, axis=axis)\n+ return np.flip._implementation(x, axis=axis)\n \n def reshape(x: array, shape: Tuple[int, ...], /) -> array:\n \"\"\"\n@@ -35,7 +35,7 @@ def reshape(x: array, shape: Tuple[int, ...], /) -> array:\n \n See its docstring for more information.\n \"\"\"\n- return np.reshape(x, shape)\n+ return np.reshape._implementation(x, shape)\n \n def roll(x: array, shift: Union[int, Tuple[int, ...]], /, *, axis: Optional[Union[int, Tuple[int, ...]]] = None) -> array:\n \"\"\"\n@@ -43,7 +43,7 @@ def roll(x: array, shift: Union[int, Tuple[int, ...]], /, *, axis: Optional[Unio\n \n See its docstring for more information.\n \"\"\"\n- return np.roll(x, shift, axis=axis)\n+ return np.roll._implementation(x, shift, axis=axis)\n \n def squeeze(x: array, /, *, axis: Optional[Union[int, Tuple[int, ...]]] = None) -> array:\n \"\"\"\n@@ -51,7 +51,7 @@ def squeeze(x: array, /, *, axis: Optional[Union[int, Tuple[int, ...]]] = None)\n \n See its docstring for more information.\n \"\"\"\n- return np.squeeze(x, axis=axis)\n+ return np.squeeze._implementation(x, axis=axis)\n \n def stack(arrays: Tuple[array], /, *, axis: int = 0) -> array:\n \"\"\"\n@@ -59,4 +59,4 @@ def stack(arrays: Tuple[array], /, *, axis: int = 0) -> array:\n \n See its docstring for more information.\n \"\"\"\n- return np.stack(arrays, axis=axis)\n+ return np.stack._implementation(arrays, axis=axis)\n", "new_path": "numpy/_array_api/_manipulation_functions.py", "old_path": "numpy/_array_api/_manipulation_functions.py" }, { "change_type": "MODIFY", "diff": "@@ -11,7 +11,7 @@ def argmax(x: array, /, *, axis: int = None, keepdims: bool = False) -> array:\n See its docstring for more information.\n \"\"\"\n # Note: this currently fails as np.argmax does not implement keepdims\n- return np.asarray(np.argmax(x, axis=axis, keepdims=keepdims))\n+ return np.asarray(np.argmax._implementation(x, axis=axis, keepdims=keepdims))\n \n def argmin(x: array, /, *, axis: int = None, keepdims: bool = False) -> array:\n \"\"\"\n@@ -20,7 +20,7 @@ def argmin(x: array, /, *, axis: int = None, keepdims: bool = False) -> array:\n See its docstring for more information.\n \"\"\"\n # Note: this currently fails as np.argmin does not implement keepdims\n- return np.asarray(np.argmin(x, axis=axis, keepdims=keepdims))\n+ return np.asarray(np.argmin._implementation(x, axis=axis, keepdims=keepdims))\n \n def nonzero(x: array, /) -> Tuple[array, ...]:\n \"\"\"\n@@ -28,7 +28,7 @@ def nonzero(x: array, /) -> Tuple[array, ...]:\n \n See its docstring for more information.\n \"\"\"\n- return np.nonzero(x)\n+ return np.nonzero._implementation(x)\n \n def where(condition: array, x1: array, x2: array, /) -> array:\n \"\"\"\n@@ -36,4 +36,4 @@ def where(condition: array, x1: array, x2: array, /) -> array:\n \n See its docstring for more information.\n \"\"\"\n- return np.where(condition, x1, x2)\n+ return np.where._implementation(condition, x1, x2)\n", "new_path": "numpy/_array_api/_searching_functions.py", "old_path": "numpy/_array_api/_searching_functions.py" }, { "change_type": "MODIFY", "diff": "@@ -10,4 +10,4 @@ def unique(x: array, /, *, return_counts: bool = False, return_index: bool = Fal\n \n See its docstring for more information.\n \"\"\"\n- return np.unique(x, return_counts=return_counts, return_index=return_index, return_inverse=return_inverse, sorted=sorted)\n+ return np.unique._implementation(x, return_counts=return_counts, return_index=return_index, return_inverse=return_inverse, sorted=sorted)\n", "new_path": "numpy/_array_api/_set_functions.py", "old_path": "numpy/_array_api/_set_functions.py" }, { "change_type": "MODIFY", "diff": "@@ -12,7 +12,7 @@ def argsort(x: array, /, *, axis: int = -1, descending: bool = False, stable: bo\n \"\"\"\n # Note: this keyword argument is different, and the default is different.\n kind = 'stable' if stable else 'quicksort'\n- res = np.argsort(x, axis=axis, kind=kind)\n+ res = np.argsort._implementation(x, axis=axis, kind=kind)\n if descending:\n res = np.flip(res, axis=axis)\n return res\n@@ -25,7 +25,7 @@ def sort(x: array, /, *, axis: int = -1, descending: bool = False, stable: bool\n \"\"\"\n # Note: this keyword argument is different, and the default is different.\n kind = 'stable' if stable else 'quicksort'\n- res = np.sort(x, axis=axis, kind=kind)\n+ res = np.sort._implementation(x, axis=axis, kind=kind)\n if descending:\n res = np.flip(res, axis=axis)\n return res\n", "new_path": "numpy/_array_api/_sorting_functions.py", "old_path": "numpy/_array_api/_sorting_functions.py" }, { "change_type": "MODIFY", "diff": "@@ -5,24 +5,24 @@ from ._types import Optional, Tuple, Union, array\n import numpy as np\n \n def max(x: array, /, *, axis: Optional[Union[int, Tuple[int, ...]]] = None, keepdims: bool = False) -> array:\n- return np.max(x, axis=axis, keepdims=keepdims)\n+ return np.max._implementation(x, axis=axis, keepdims=keepdims)\n \n def mean(x: array, /, *, axis: Optional[Union[int, Tuple[int, ...]]] = None, keepdims: bool = False) -> array:\n- return np.asarray(np.mean(x, axis=axis, keepdims=keepdims))\n+ return np.asarray(np.mean._implementation(x, axis=axis, keepdims=keepdims))\n \n def min(x: array, /, *, axis: Optional[Union[int, Tuple[int, ...]]] = None, keepdims: bool = False) -> array:\n- return np.min(x, axis=axis, keepdims=keepdims)\n+ return np.min._implementation(x, axis=axis, keepdims=keepdims)\n \n def prod(x: array, /, *, axis: Optional[Union[int, Tuple[int, ...]]] = None, keepdims: bool = False) -> array:\n- return np.asarray(np.prod(x, axis=axis, keepdims=keepdims))\n+ return np.asarray(np.prod._implementation(x, axis=axis, keepdims=keepdims))\n \n def std(x: array, /, *, axis: Optional[Union[int, Tuple[int, ...]]] = None, correction: Union[int, float] = 0.0, keepdims: bool = False) -> array:\n # Note: the keyword argument correction is different here\n- return np.asarray(np.std(x, axis=axis, ddof=correction, keepdims=keepdims))\n+ return np.asarray(np.std._implementation(x, axis=axis, ddof=correction, keepdims=keepdims))\n \n def sum(x: array, /, *, axis: Optional[Union[int, Tuple[int, ...]]] = None, keepdims: bool = False) -> array:\n- return np.asarray(np.sum(x, axis=axis, keepdims=keepdims))\n+ return np.asarray(np.sum._implementation(x, axis=axis, keepdims=keepdims))\n \n def var(x: array, /, *, axis: Optional[Union[int, Tuple[int, ...]]] = None, correction: Union[int, float] = 0.0, keepdims: bool = False) -> array:\n # Note: the keyword argument correction is different here\n- return np.asarray(np.var(x, axis=axis, ddof=correction, keepdims=keepdims))\n+ return np.asarray(np.var._implementation(x, axis=axis, ddof=correction, keepdims=keepdims))\n", "new_path": "numpy/_array_api/_statistical_functions.py", "old_path": "numpy/_array_api/_statistical_functions.py" }, { "change_type": "MODIFY", "diff": "@@ -10,7 +10,7 @@ def all(x: array, /, *, axis: Optional[Union[int, Tuple[int, ...]]] = None, keep\n \n See its docstring for more information.\n \"\"\"\n- return np.asarray(np.all(x, axis=axis, keepdims=keepdims))\n+ return np.asarray(np.all._implementation(x, axis=axis, keepdims=keepdims))\n \n def any(x: array, /, *, axis: Optional[Union[int, Tuple[int, ...]]] = None, keepdims: bool = False) -> array:\n \"\"\"\n@@ -18,4 +18,4 @@ def any(x: array, /, *, axis: Optional[Union[int, Tuple[int, ...]]] = None, keep\n \n See its docstring for more information.\n \"\"\"\n- return np.asarray(np.any(x, axis=axis, keepdims=keepdims))\n+ return np.asarray(np.any._implementation(x, axis=axis, keepdims=keepdims))\n", "new_path": "numpy/_array_api/_utility_functions.py", "old_path": "numpy/_array_api/_utility_functions.py" } ]
994ce07595026d5de54f52ef5748b578f9fae1bc
cupy/cupy
null
null
Use better type signatures in the array API module This includes returning custom dataclasses for finfo and iinfo that only contain the properties required by the array API specification.
[ { "change_type": "MODIFY", "diff": "@@ -396,7 +396,8 @@ class Array:\n res = self._array.__le__(other._array)\n return self.__class__._new(res)\n \n- def __len__(self, /):\n+ # Note: __len__ may end up being removed from the array API spec.\n+ def __len__(self, /) -> int:\n \"\"\"\n Performs the operation __len__.\n \"\"\"\n@@ -843,7 +844,7 @@ class Array:\n return self.__class__._new(res)\n \n @property\n- def dtype(self):\n+ def dtype(self) -> Dtype:\n \"\"\"\n Array API compatible wrapper for :py:meth:`np.ndaray.dtype <numpy.ndarray.dtype>`.\n \n@@ -852,7 +853,7 @@ class Array:\n return self._array.dtype\n \n @property\n- def device(self):\n+ def device(self) -> Device:\n \"\"\"\n Array API compatible wrapper for :py:meth:`np.ndaray.device <numpy.ndarray.device>`.\n \n@@ -862,7 +863,7 @@ class Array:\n raise NotImplementedError(\"The device attribute is not yet implemented\")\n \n @property\n- def ndim(self):\n+ def ndim(self) -> int:\n \"\"\"\n Array API compatible wrapper for :py:meth:`np.ndaray.ndim <numpy.ndarray.ndim>`.\n \n@@ -871,7 +872,7 @@ class Array:\n return self._array.ndim\n \n @property\n- def shape(self):\n+ def shape(self) -> Tuple[int, ...]:\n \"\"\"\n Array API compatible wrapper for :py:meth:`np.ndaray.shape <numpy.ndarray.shape>`.\n \n@@ -880,7 +881,7 @@ class Array:\n return self._array.shape\n \n @property\n- def size(self):\n+ def size(self) -> int:\n \"\"\"\n Array API compatible wrapper for :py:meth:`np.ndaray.size <numpy.ndarray.size>`.\n \n@@ -889,7 +890,7 @@ class Array:\n return self._array.size\n \n @property\n- def T(self):\n+ def T(self) -> Array:\n \"\"\"\n Array API compatible wrapper for :py:meth:`np.ndaray.T <numpy.ndarray.T>`.\n \n", "new_path": "numpy/_array_api/_array_object.py", "old_path": "numpy/_array_api/_array_object.py" }, { "change_type": "MODIFY", "diff": "@@ -10,7 +10,7 @@ from ._dtypes import _all_dtypes\n \n import numpy as np\n \n-def asarray(obj: Union[float, NestedSequence[bool|int|float], SupportsDLPack, SupportsBufferProtocol], /, *, dtype: Optional[Dtype] = None, device: Optional[Device] = None, copy: Optional[bool] = None) -> Array:\n+def asarray(obj: Union[Array, float, NestedSequence[bool|int|float], SupportsDLPack, SupportsBufferProtocol], /, *, dtype: Optional[Dtype] = None, device: Optional[Device] = None, copy: Optional[bool] = None) -> Array:\n \"\"\"\n Array API compatible wrapper for :py:func:`np.asarray <numpy.asarray>`.\n \n", "new_path": "numpy/_array_api/_creation_functions.py", "old_path": "numpy/_array_api/_creation_functions.py" }, { "change_type": "MODIFY", "diff": "@@ -2,6 +2,7 @@ from __future__ import annotations\n \n from ._array_object import Array\n \n+from dataclasses import dataclass\n from typing import TYPE_CHECKING\n if TYPE_CHECKING:\n from ._types import List, Tuple, Union, Dtype\n@@ -38,13 +39,44 @@ def can_cast(from_: Union[Dtype, Array], to: Dtype, /) -> bool:\n from_ = from_._array\n return np.can_cast(from_, to)\n \n+# These are internal objects for the return types of finfo and iinfo, since\n+# the NumPy versions contain extra data that isn't part of the spec.\n+@dataclass\n+class finfo_object:\n+ bits: int\n+ # Note: The types of the float data here are float, whereas in NumPy they\n+ # are scalars of the corresponding float dtype.\n+ eps: float\n+ max: float\n+ min: float\n+ # Note: smallest_normal is part of the array API spec, but cannot be used\n+ # until https://github.com/numpy/numpy/pull/18536 is merged.\n+\n+ # smallest_normal: float\n+\n+@dataclass\n+class iinfo_object:\n+ bits: int\n+ max: int\n+ min: int\n+\n def finfo(type: Union[Dtype, Array], /) -> finfo_object:\n \"\"\"\n Array API compatible wrapper for :py:func:`np.finfo <numpy.finfo>`.\n \n See its docstring for more information.\n \"\"\"\n- return np.finfo(type)\n+ fi = np.finfo(type)\n+ # Note: The types of the float data here are float, whereas in NumPy they\n+ # are scalars of the corresponding float dtype.\n+ return finfo_object(\n+ fi.bits,\n+ float(fi.eps),\n+ float(fi.max),\n+ float(fi.min),\n+ # TODO: Uncomment this when #18536 is merged.\n+ # float(fi.smallest_normal),\n+ )\n \n def iinfo(type: Union[Dtype, Array], /) -> iinfo_object:\n \"\"\"\n@@ -52,7 +84,8 @@ def iinfo(type: Union[Dtype, Array], /) -> iinfo_object:\n \n See its docstring for more information.\n \"\"\"\n- return np.iinfo(type)\n+ ii = np.iinfo(type)\n+ return iinfo_object(ii.bits, ii.max, ii.min)\n \n def result_type(*arrays_and_dtypes: Sequence[Union[Array, Dtype]]) -> Dtype:\n \"\"\"\n", "new_path": "numpy/_array_api/_data_type_functions.py", "old_path": "numpy/_array_api/_data_type_functions.py" }, { "change_type": "MODIFY", "diff": "@@ -7,7 +7,7 @@ from typing import List, Optional, Tuple, Union\n import numpy as np\n \n # Note: the function name is different here\n-def concat(arrays: Tuple[Array, ...], /, *, axis: Optional[int] = 0) -> Array:\n+def concat(arrays: Union[Tuple[Array, ...], List[Array]], /, *, axis: Optional[int] = 0) -> Array:\n \"\"\"\n Array API compatible wrapper for :py:func:`np.concatenate <numpy.concatenate>`.\n \n@@ -56,7 +56,7 @@ def squeeze(x: Array, /, axis: Optional[Union[int, Tuple[int, ...]]] = None) ->\n \"\"\"\n return Array._new(np.squeeze(x._array, axis=axis))\n \n-def stack(arrays: Tuple[Array, ...], /, *, axis: int = 0) -> Array:\n+def stack(arrays: Union[Tuple[Array, ...], List[Array]], /, *, axis: int = 0) -> Array:\n \"\"\"\n Array API compatible wrapper for :py:func:`np.stack <numpy.stack>`.\n \n", "new_path": "numpy/_array_api/_manipulation_functions.py", "old_path": "numpy/_array_api/_manipulation_functions.py" } ]
4877478d275959f746dab4f7b91bfe68956f26f1
netflix/security_monkey
null
null
Fix for orphaned items that may develop from a failed watcher event. Also added optional (but on by default) silencing of verbose and useless botocore logs.
[ { "change_type": "MODIFY", "diff": "@@ -95,7 +95,6 @@ def create_item(item, technology, account):\n )\n \n \n-\n def detect_change(item, account, technology, complete_hash, durable_hash):\n \"\"\"\n Checks the database to see if the latest revision of the specified\n", "new_path": "security_monkey/datastore_utils.py", "old_path": "security_monkey/datastore_utils.py" }, { "change_type": "MODIFY", "diff": "@@ -12,7 +12,7 @@ import traceback\n \n from security_monkey import app, db, jirasync, sentry\n from security_monkey.alerter import Alerter\n-from security_monkey.datastore import store_exception, clear_old_exceptions\n+from security_monkey.datastore import store_exception, clear_old_exceptions, Technology, Account, Item, ItemRevision\n from security_monkey.monitors import get_monitors, get_monitors_and_dependencies\n from security_monkey.reporter import Reporter\n from security_monkey.task_scheduler.util import CELERY, setup\n@@ -70,9 +70,57 @@ def clear_expired_exceptions():\n app.logger.info(\"[-] Completed clearing out exceptions that have an expired TTL.\")\n \n \n+def fix_orphaned_deletions(account_name, technology_name):\n+ \"\"\"\n+ Possible issue with orphaned items. This will check if there are any, and will assume that the item\n+ was deleted. This will create a deletion change record to it.\n+\n+ :param account_name:\n+ :param technology_name:\n+ :return:\n+ \"\"\"\n+ # If technology doesn't exist, then create it:\n+ technology = Technology.query.filter(Technology.name == technology_name).first()\n+ if not technology:\n+ technology = Technology(name=technology_name)\n+ db.session.add(technology)\n+ db.session.commit()\n+ app.logger.info(\"Technology: {} did not exist... created it...\".format(technology_name))\n+\n+ account = Account.query.filter(Account.name == account_name).one()\n+\n+ # Query for orphaned items of the given technology/account pair:\n+ orphaned_items = Item.query.filter(Item.account_id == account.id, Item.tech_id == technology.id,\n+ Item.latest_revision_id == None).all() # noqa\n+\n+ if not orphaned_items:\n+ app.logger.info(\"[@] No orphaned items have been found. (This is good)\")\n+ return\n+\n+ # Fix the orphaned items:\n+ for oi in orphaned_items:\n+ app.logger.error(\"[?] Found an orphaned item: {}. Creating a deletion record for it\".format(oi.name))\n+ revision = ItemRevision(active=False, config={})\n+ oi.revisions.append(revision)\n+ db.session.add(revision)\n+ db.session.add(oi)\n+ db.session.commit()\n+\n+ # Update the latest revision id:\n+ db.session.refresh(revision)\n+ oi.latest_revision_id = revision.id\n+ db.session.add(oi)\n+\n+ db.session.commit()\n+ app.logger.info(\"[-] Created deletion record for item: {}.\".format(oi.name))\n+\n+\n def reporter_logic(account_name, technology_name):\n \"\"\"Logic for the run change reporter\"\"\"\n try:\n+ # Before doing anything... Look for orphaned items for this given technology. If they exist, then delete them:\n+ fix_orphaned_deletions(account_name, technology_name)\n+\n # Watch and Audit:\n monitors = find_changes(account_name, technology_name)\n \n@@ -140,6 +188,9 @@ def find_changes(account_name, monitor_name, debug=True):\n Runs the watcher and stores the result, re-audits all types to account\n for downstream dependencies.\n \"\"\"\n+ # Before doing anything... Look for orphaned items for this given technology. If they exist, then delete them:\n+ fix_orphaned_deletions(account_name, monitor_name)\n+\n monitors = get_monitors(account_name, [monitor_name], debug)\n for mon in monitors:\n cw = mon.watcher\n", "new_path": "security_monkey/task_scheduler/tasks.py", "old_path": "security_monkey/task_scheduler/tasks.py" }, { "change_type": "MODIFY", "diff": "@@ -84,7 +84,8 @@ class CelerySchedulerTestCase(SecurityMonkeyTestCase):\n \n db.session.commit()\n \n- def test_find_batch_changes(self):\n+ @patch(\"security_monkey.task_scheduler.tasks.fix_orphaned_deletions\")\n+ def test_find_batch_changes(self, mock_fix_orphaned):\n \"\"\"\n Runs through a full find job via the IAMRole watcher, as that supports batching.\n \n@@ -92,7 +93,7 @@ class CelerySchedulerTestCase(SecurityMonkeyTestCase):\n not going to do any boto work and that will instead be mocked out.\n :return:\n \"\"\"\n- from security_monkey.task_scheduler.tasks import manual_run_change_finder, setup\n+ from security_monkey.task_scheduler.tasks import manual_run_change_finder\n from security_monkey.monitors import Monitor\n from security_monkey.watchers.iam.iam_role import IAMRole\n from security_monkey.auditors.iam.iam_role import IAMRoleAuditor\n@@ -142,6 +143,7 @@ class CelerySchedulerTestCase(SecurityMonkeyTestCase):\n watcher.slurp = mock_slurp\n \n manual_run_change_finder([test_account.name], [watcher.index])\n+ assert mock_fix_orphaned.called\n \n # Check that all items were added to the DB:\n assert len(Item.query.all()) == 11\n@@ -271,8 +273,9 @@ class CelerySchedulerTestCase(SecurityMonkeyTestCase):\n client.put_role_policy(RoleName=\"roleNumber{}\".format(x), PolicyName=\"testpolicy\",\n PolicyDocument=json.dumps(OPEN_POLICY, indent=4))\n \n- def test_report_batch_changes(self):\n- from security_monkey.task_scheduler.tasks import manual_run_change_reporter, setup\n+ @patch(\"security_monkey.task_scheduler.tasks.fix_orphaned_deletions\")\n+ def test_report_batch_changes(self, mock_fix_orphaned):\n+ from security_monkey.task_scheduler.tasks import manual_run_change_reporter\n from security_monkey.datastore import Item, ItemRevision, ItemAudit\n from security_monkey.monitors import Monitor\n from security_monkey.watchers.iam.iam_role import IAMRole\n@@ -327,6 +330,8 @@ class CelerySchedulerTestCase(SecurityMonkeyTestCase):\n \n manual_run_change_reporter([test_account.name])\n \n+ assert mock_fix_orphaned.called\n+\n # Check that all items were added to the DB:\n assert len(Item.query.all()) == 11\n \n@@ -348,6 +353,32 @@ class CelerySchedulerTestCase(SecurityMonkeyTestCase):\n purge_it()\n assert mock.control.purge.called\n \n+ def test_fix_orphaned_deletions(self):\n+ test_account = Account.query.filter(Account.name == \"TEST_ACCOUNT1\").one()\n+ technology = Technology(name=\"orphaned\")\n+\n+ db.session.add(technology)\n+ db.session.commit()\n+\n+ orphaned_item = Item(name=\"orphaned\", region=\"us-east-1\", tech_id=technology.id, account_id=test_account.id)\n+ db.session.add(orphaned_item)\n+ db.session.commit()\n+\n+ assert not orphaned_item.latest_revision_id\n+ assert not orphaned_item.revisions.count()\n+ assert len(Item.query.filter(Item.account_id == test_account.id, Item.tech_id == technology.id,\n+ Item.latest_revision_id == None).all()) == 1 # noqa\n+\n+ from security_monkey.task_scheduler.tasks import fix_orphaned_deletions\n+ fix_orphaned_deletions(test_account.name, technology.name)\n+\n+ assert not Item.query.filter(Item.account_id == test_account.id, Item.tech_id == technology.id,\n+ Item.latest_revision_id == None).all() # noqa\n+\n+ assert orphaned_item.latest_revision_id\n+ assert orphaned_item.revisions.count() == 1\n+ assert orphaned_item.latest_config == {}\n+\n @patch(\"security_monkey.task_scheduler.beat.setup\")\n @patch(\"security_monkey.task_scheduler.beat.purge_it\")\n @patch(\"security_monkey.task_scheduler.tasks.task_account_tech\")\n", "new_path": "security_monkey/tests/scheduling/test_celery_scheduler.py", "old_path": "security_monkey/tests/scheduling/test_celery_scheduler.py" }, { "change_type": "MODIFY", "diff": "@@ -26,10 +26,17 @@ from copy import deepcopy\n import dpath.util\n from dpath.exceptions import PathNotFound\n \n+import logging\n+\n watcher_registry = {}\n abstract_classes = set(['Watcher', 'CloudAuxWatcher', 'CloudAuxBatchedWatcher'])\n \n \n+if not app.config.get(\"DONT_IGNORE_BOTO_VERBOSE_LOGGERS\"):\n+ logging.getLogger('botocore.vendored.requests.packages.urllib3').setLevel(logging.WARNING)\n+ logging.getLogger('botocore.credentials').setLevel(logging.WARNING)\n+\n+\n class WatcherType(type):\n def __init__(cls, name, bases, attrs):\n super(WatcherType, cls).__init__(name, bases, attrs)\n", "new_path": "security_monkey/watcher.py", "old_path": "security_monkey/watcher.py" }, { "change_type": "MODIFY", "diff": "@@ -67,10 +67,15 @@ class SQS(CloudAuxBatchedWatcher):\n \n # Offset by the existing items in the list (from other regions)\n offset = len(self.corresponding_items)\n+ queue_count = -1\n \n- for i in range(0, len(queues)):\n- items.append({\"Url\": queues[i], \"Region\": kwargs[\"region\"]})\n- self.corresponding_items[queues[i]] = i + offset\n+ for item_count in range(0, len(queues)):\n+ if self.corresponding_items.get(queues[item_count]):\n+ app.logger.error(\"[?] Received a duplicate item in the SQS list: {}. Skipping it.\".format(queues[item_count]))\n+ continue\n+ queue_count += 1\n+ items.append({\"Url\": queues[item_count], \"Region\": kwargs[\"region\"]})\n+ self.corresponding_items[queues[item_count]] = queue_count + offset\n \n return items\n \n", "new_path": "security_monkey/watchers/sqs.py", "old_path": "security_monkey/watchers/sqs.py" } ]
84fd14194ddaa5b890e4479def071ce53a93b9d4
netflix/security_monkey
null
null
Add options to post metrics to queue This commit adds an option to SM to post metrics to cloudwatch. Metric data will be posted whenever scan queue items are added or removed.
[ { "change_type": "MODIFY", "diff": "@@ -5,6 +5,7 @@ This document outlines how to configure Security Monkey to:\n \n 1. Automatically run the API\n 1. Automatically scan for changes in your environment.\n+1. Configure Security Monkey to send scanning performance metrics\n \n Each section is important, please read them thoroughly.\n \n@@ -180,6 +181,11 @@ Supervisor will run the Celery `worker` command, which is:\n so keep the supervisor configurations on these instances separate.\n \n \n+Configure Security Monkey to send scanning performance metrics\n+--------------------------------------------------------------\n+Security Monkey can be configured to send metrics when objects are added or removed from the scanning queue. This allows operators to check Security Monkey performance and ensure that items are being processed from the queue in a timely manner. To do so set `METRICS_ENABLED` to `True`. You will need `cloudwatch:PutMetricData` permission. Metrics will be posted with the namespace `securitymonkey` unless configured using the variable `METRICS_NAMESPACE`. You will also want to set `METRICS_POST_REGION` with the region you want to post CloudWatch Metrics to (default: `us-east-1`).\n+\n+\n Deployment Strategies\n --------------------\n A typical deployment strategy is:\n", "new_path": "docs/autostarting.md", "old_path": "docs/autostarting.md" }, { "change_type": "MODIFY", "diff": "@@ -26,6 +26,7 @@ from security_monkey.datastore import store_exception, clear_old_exceptions, Tec\n from security_monkey.monitors import get_monitors, get_monitors_and_dependencies\n from security_monkey.reporter import Reporter\n from security_monkey.task_scheduler.util import CELERY, setup\n+import boto3\n from sqlalchemy.exc import OperationalError, InvalidRequestError, StatementError\n \n \n@@ -216,6 +217,8 @@ def find_changes(account_name, monitor_name, debug=True):\n fix_orphaned_deletions(account_name, monitor_name)\n \n monitors = get_monitors(account_name, [monitor_name], debug)\n+\n+ items = []\n for mon in monitors:\n cw = mon.watcher\n app.logger.info(\"[-->] Looking for changes in account: {}, technology: {}\".format(account_name, cw.index))\n@@ -224,17 +227,26 @@ def find_changes(account_name, monitor_name, debug=True):\n else:\n # Just fetch normally...\n (items, exception_map) = cw.slurp()\n+\n+ _post_metric(\n+ 'queue_items_added',\n+ len(items),\n+ account_name=account_name,\n+ tech=cw.i_am_singular\n+ )\n+\n cw.find_changes(current=items, exception_map=exception_map)\n+\n cw.save()\n \n # Batched monitors have already been monitored, and they will be skipped over.\n- audit_changes([account_name], [monitor_name], False, debug)\n+ audit_changes([account_name], [monitor_name], False, debug, items_count=len(items))\n db.session.close()\n \n return monitors\n \n \n-def audit_changes(accounts, monitor_names, send_report, debug=True, skip_batch=True):\n+def audit_changes(accounts, monitor_names, send_report, debug=True, skip_batch=True, items_count=None):\n \"\"\"\n Audits changes in the accounts\n :param accounts:\n@@ -254,6 +266,13 @@ def audit_changes(accounts, monitor_names, send_report, debug=True, skip_batch=T\n app.logger.debug(\"[-->] Auditing account: {}, technology: {}\".format(account, monitor.watcher.index))\n _audit_changes(account, monitor.auditors, send_report, debug)\n \n+ _post_metric(\n+ 'queue_items_completed',\n+ items_count,\n+ account_name=account,\n+ tech=monitor.watcher.i_am_singular\n+ )\n+\n \n def batch_logic(monitor, current_watcher, account_name, debug):\n \"\"\"\n@@ -293,9 +312,23 @@ def batch_logic(monitor, current_watcher, account_name, debug):\n ))\n (items, exception_map) = current_watcher.slurp()\n \n+ _post_metric(\n+ 'queue_items_added',\n+ len(items),\n+ account_name=account_name,\n+ tech=current_watcher.i_am_singular\n+ )\n+\n audit_items = current_watcher.find_changes(current=items, exception_map=exception_map)\n _audit_specific_changes(monitor, audit_items, False, debug)\n \n+ _post_metric(\n+ 'queue_items_completed',\n+ len(items),\n+ account_name=account_name,\n+ tech=current_watcher.i_am_singular\n+ )\n+\n # Delete the items that no longer exist:\n app.logger.debug(\"[-->] Deleting all items for {technology}/{account} that no longer exist.\".format(\n technology=current_watcher.i_am_plural, account=account_name\n@@ -349,3 +382,31 @@ def _audit_specific_changes(monitor, audit_items, send_report, debug=True):\n monitor.watcher.accounts[0])\n db.session.remove()\n store_exception(\"scheduler-audit-changes\", None, e)\n+\n+\n+def _post_metric(event_type, amount, account_name=None, tech=None):\n+ if not app.config.get('METRICS_ENABLED', False):\n+ return\n+\n+ cw_client = boto3.client('cloudwatch', region_name=app.config.get('METRICS_POST_REGION', 'us-east-1'))\n+ cw_client.put_metric_data(\n+ Namespace=app.config.get('METRICS_NAMESPACE', 'securitymonkey'),\n+ MetricData=[\n+ {\n+ 'MetricName': event_type,\n+ 'Timestamp': int(time.time()),\n+ 'Value': amount,\n+ 'Unit': 'Count',\n+ 'Dimensions': [\n+ {\n+ 'Name': 'tech',\n+ 'Value': tech\n+ },\n+ {\n+ 'Name': 'account_number',\n+ 'Value': Account.query.filter(Account.name == account_name).first().identifier\n+ }\n+ ]\n+ }\n+ ]\n+ )\n", "new_path": "security_monkey/task_scheduler/tasks.py", "old_path": "security_monkey/task_scheduler/tasks.py" } ]
9ef1a06814c1aedb30dd4a932eb15cfb4a9a5e06
geopy/geopy
null
null
Drop Python 3.5 support + remove async_generator async_generator is not compatible with the latest pytest
[ { "change_type": "MODIFY", "diff": "@@ -40,7 +40,7 @@ jobs:\n strategy:\n fail-fast: false\n matrix: # &test-matrix\n- python-version: [3.5, 3.6, 3.7, 3.8, 3.9, 'pypy3']\n+ python-version: [3.6, 3.7, 3.8, 3.9, 'pypy3']\n experimental: [false]\n include:\n - python-version: '3.10-dev'\n@@ -64,7 +64,7 @@ jobs:\n strategy:\n fail-fast: false\n matrix: # *test-matrix https://github.community/t/support-for-yaml-anchors/16128\n- python-version: [3.5, 3.6, 3.7, 3.8, 3.9, 'pypy3']\n+ python-version: [3.6, 3.7, 3.8, 3.9, 'pypy3']\n experimental: [false]\n include:\n - python-version: '3.10-dev'\n", "new_path": ".github/workflows/ci.yml", "old_path": ".github/workflows/ci.yml" }, { "change_type": "MODIFY", "diff": "@@ -5,7 +5,7 @@ geopy makes it easy for Python developers to locate the coordinates of\n addresses, cities, countries, and landmarks across the globe using third-party\n geocoders and other data sources.\n \n-geopy is tested against CPython (versions 3.5, 3.6, 3.7, 3.8, 3.9)\n+geopy is tested against CPython (versions 3.6, 3.7, 3.8, 3.9)\n and PyPy3. geopy 1.x line also supported CPython 2.7, 3.4 and PyPy2.\n \"\"\"\n \n", "new_path": "geopy/__init__.py", "old_path": "geopy/__init__.py" }, { "change_type": "MODIFY", "diff": "@@ -1,29 +1,14 @@\n #!/usr/bin/env python\n-\"\"\"\n-geopy\n-\"\"\"\n-\n-import sys\n \n from setuptools import find_packages, setup\n \n-if sys.version_info < (3, 5):\n- raise RuntimeError(\n- \"geopy 2 supports Python 3.5 and above. \"\n- \"Use geopy 1.x if you need Python 2.7 or 3.4 support.\"\n- )\n-\n-# This import must be below the above `sys.version_info` check,\n-# because the code being imported here is not compatible with the older\n-# versions of Python.\n-from geopy import __version__ as version # noqa # isort:skip\n+from geopy import __version__ as version\n \n INSTALL_REQUIRES = [\n 'geographiclib<3,>=1.49',\n ]\n \n EXTRAS_DEV_TESTFILES_COMMON = [\n- \"async_generator\",\n ]\n \n EXTRAS_DEV_LINT = [\n@@ -85,7 +70,7 @@ setup(\n },\n license='MIT',\n keywords='geocode geocoding gis geographical maps earth distance',\n- python_requires=\">=3.5\",\n+ python_requires=\">=3.6\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Intended Audience :: Developers\",\n@@ -97,7 +82,6 @@ setup(\n \"Topic :: Software Development :: Libraries :: Python Modules\",\n \"Programming Language :: Python :: 3 :: Only\",\n \"Programming Language :: Python :: 3\",\n- \"Programming Language :: Python :: 3.5\",\n \"Programming Language :: Python :: 3.6\",\n \"Programming Language :: Python :: 3.7\",\n \"Programming Language :: Python :: 3.8\",\n", "new_path": "setup.py", "old_path": "setup.py" }, { "change_type": "MODIFY", "diff": "@@ -1,3 +1,4 @@\n+import contextlib\n import os\n import ssl\n from unittest.mock import patch\n@@ -5,7 +6,6 @@ from urllib.parse import urljoin\n from urllib.request import getproxies, urlopen\n \n import pytest\n-from async_generator import async_generator, asynccontextmanager, yield_\n \n import geopy.geocoders\n from geopy.adapters import (\n@@ -134,14 +134,13 @@ def adapter_factory(request):\n yield adapter_factory\n \n \n-@asynccontextmanager\n-@async_generator\n+@contextlib.asynccontextmanager\n async def make_dummy_async_geocoder(**kwargs):\n geocoder = DummyGeocoder(**kwargs)\n run_async = isinstance(geocoder.adapter, BaseAsyncAdapter)\n if run_async:\n async with geocoder:\n- await yield_(geocoder)\n+ yield geocoder\n else:\n orig_geocode = geocoder.geocode\n \n@@ -149,7 +148,7 @@ async def make_dummy_async_geocoder(**kwargs):\n return orig_geocode(*args, **kwargs)\n \n geocoder.geocode = geocode\n- await yield_(geocoder)\n+ yield geocoder\n \n \n @pytest.mark.parametrize(\"adapter_cls\", NOT_AVAILABLE_ADAPTERS)\n", "new_path": "test/adapters/each_adapter.py", "old_path": "test/adapters/each_adapter.py" }, { "change_type": "MODIFY", "diff": "@@ -2,7 +2,6 @@ import asyncio\n import atexit\n import contextlib\n import importlib\n-import inspect\n import os\n import types\n from collections import defaultdict\n@@ -19,35 +18,6 @@ import geopy.geocoders\n from geopy.adapters import AdapterHTTPError, BaseAsyncAdapter, BaseSyncAdapter\n from geopy.geocoders.base import _DEFAULT_ADAPTER_CLASS\n \n-# pytest-aiohttp calls `inspect.isasyncgenfunction` to detect\n-# async generators in fixtures.\n-# To support Python 3.5 we use `async_generator` library.\n-# However:\n-# - Since Python 3.6 there is a native implementation of\n-# `inspect.isasyncgenfunction`, but it returns False\n-# for `async_generator`'s functions.\n-# - The stock `async_generator.isasyncgenfunction` doesn't detect\n-# generators wrapped in `@pytest.fixture`.\n-#\n-# Thus we resort to monkey-patching it (for now).\n-if getattr(inspect, \"isasyncgenfunction\", None) is not None:\n- # >=py36\n- original_isasyncgenfunction = inspect.isasyncgenfunction\n-else:\n- # ==py35\n- original_isasyncgenfunction = lambda func: False # noqa\n-\n-\n-def isasyncgenfunction(obj):\n- if original_isasyncgenfunction(obj):\n- return True\n- # Detect async_generator function, possibly wrapped in `@pytest.fixture`:\n- # See https://github.com/python-trio/async_generator/blob/v1.10/async_generator/_impl.py#L451-L455 # noqa\n- return bool(getattr(obj, \"_async_gen_function\", None))\n-\n-\n-inspect.isasyncgenfunction = isasyncgenfunction\n-\n \n def load_adapter_cls(adapter_ref):\n actual_adapter_class = _DEFAULT_ADAPTER_CLASS\n", "new_path": "test/conftest.py", "old_path": "test/conftest.py" }, { "change_type": "MODIFY", "diff": "@@ -1,5 +1,4 @@\n import pytest\n-from async_generator import async_generator, yield_\n \n from geopy.exc import ConfigurationError, GeocoderQueryError\n from geopy.geocoders import IGNFrance\n@@ -254,14 +253,13 @@ class TestIGNFranceUsernameAuthProxy(BaseTestGeocoder):\n )\n \n @pytest.fixture(scope='class', autouse=True)\n- @async_generator\n async def start_proxy(_, request, class_geocoder):\n cls = request.cls\n cls.proxy_server = ProxyServerThread(timeout=cls.proxy_timeout)\n cls.proxy_server.start()\n cls.proxy_url = cls.proxy_server.get_proxy_url()\n async with cls.inject_geocoder(cls.make_geocoder(proxies=cls.proxy_url)):\n- await yield_()\n+ yield\n cls.proxy_server.stop()\n cls.proxy_server.join()\n \n", "new_path": "test/geocoders/ignfrance.py", "old_path": "test/geocoders/ignfrance.py" }, { "change_type": "MODIFY", "diff": "@@ -1,10 +1,10 @@\n+import contextlib\n import json\n import os\n from abc import ABC, abstractmethod\n from unittest.mock import ANY, patch\n \n import pytest\n-from async_generator import async_generator, asynccontextmanager, yield_\n \n from geopy import exc\n from geopy.adapters import BaseAsyncAdapter\n@@ -47,7 +47,6 @@ class BaseTestGeocoder(ABC):\n delta = 0.5\n \n @pytest.fixture(scope='class', autouse=True)\n- @async_generator\n async def class_geocoder(_, request, patch_adapter, is_internet_access_allowed):\n \"\"\"Prepare a class-level Geocoder instance.\"\"\"\n cls = request.cls\n@@ -59,13 +58,12 @@ class BaseTestGeocoder(ABC):\n run_async = isinstance(geocoder.adapter, BaseAsyncAdapter)\n if run_async:\n async with geocoder:\n- await yield_(geocoder)\n+ yield geocoder\n else:\n- await yield_(geocoder)\n+ yield geocoder\n \n @classmethod\n- @asynccontextmanager\n- @async_generator\n+ @contextlib.asynccontextmanager\n async def inject_geocoder(cls, geocoder):\n \"\"\"An async context manager allowing to inject a custom\n geocoder instance in a single test method which will\n@@ -75,9 +73,9 @@ class BaseTestGeocoder(ABC):\n run_async = isinstance(geocoder.adapter, BaseAsyncAdapter)\n if run_async:\n async with geocoder:\n- await yield_(geocoder)\n+ yield geocoder\n else:\n- await yield_(geocoder)\n+ yield geocoder\n \n @pytest.fixture(autouse=True)\n def ensure_no_geocoder_assignment(self):\n", "new_path": "test/geocoders/util.py", "old_path": "test/geocoders/util.py" }, { "change_type": "MODIFY", "diff": "@@ -1,7 +1,7 @@\n [tox]\n envlist=\n- py{35,36,37,38,39,310,py3},\n- py35{-async,-noextras},\n+ py36{-async,-noextras},\n+ py{37,38,39,310,py3},\n lint,\n rst,\n \n@@ -15,19 +15,18 @@ passenv = *\n whitelist_externals = make\n commands = make {env:GEOPY_TOX_TARGET:test}\n \n-[testenv:py35-async]\n+[testenv:py36-async]\n # Run a single job with asyncio adapter:\n # (not the whole matrix, to avoid spending extra quota)\n setenv = GEOPY_TEST_ADAPTER=geopy.adapters.AioHTTPAdapter\n \n-[testenv:py35-noextras]\n+[testenv:py36-noextras]\n # Ensure `pip install geopy` without any non-test extras doesn't break.\n extras =\n dev-test\n \n [gh-actions]\n python =\n- 3.5: py35\n 3.6: py36\n 3.7: py37\n 3.8: py38\n", "new_path": "tox.ini", "old_path": "tox.ini" } ]
0b2146c8f794d5642a0a4feb9152916b49fd4be8
mesonbuild/meson
null
null
Use named field for command_template when generating ninja command. The command template become easier to read with named field.
[ { "change_type": "MODIFY", "diff": "@@ -1232,15 +1232,16 @@ int dummy;\n return\n rule = 'rule STATIC%s_LINKER\\n' % crstr\n if mesonlib.is_windows():\n- command_templ = ''' command = %s @$out.rsp\n+ command_template = ''' command = {executable} @$out.rsp\n rspfile = $out.rsp\n- rspfile_content = $LINK_ARGS %s $in\n+ rspfile_content = $LINK_ARGS {output_args} $in\n '''\n else:\n- command_templ = ' command = %s $LINK_ARGS %s $in\\n'\n- command = command_templ % (\n- ' '.join(static_linker.get_exelist()),\n- ' '.join(static_linker.get_output_args('$out')))\n+ command_template = ' command = {executable} $LINK_ARGS {output_args} $in\\n'\n+ command = command_template.format(\n+ executable=' '.join(static_linker.get_exelist()),\n+ output_args=' '.join(static_linker.get_output_args('$out'))\n+ )\n description = ' description = Static linking library $out\\n\\n'\n outfile.write(rule)\n outfile.write(command)\n@@ -1273,16 +1274,17 @@ int dummy;\n pass\n rule = 'rule %s%s_LINKER\\n' % (langname, crstr)\n if mesonlib.is_windows():\n- command_template = ''' command = %s @$out.rsp\n+ command_template = ''' command = {executable} @$out.rsp\n rspfile = $out.rsp\n- rspfile_content = %s $ARGS %s $in $LINK_ARGS $aliasing\n+ rspfile_content = {cross_args} $ARGS {output_args} $in $LINK_ARGS $aliasing\n '''\n else:\n- command_template = ' command = %s %s $ARGS %s $in $LINK_ARGS $aliasing\\n'\n- command = command_template % (\n- ' '.join(compiler.get_linker_exelist()),\n- ' '.join(cross_args),\n- ' '.join(compiler.get_linker_output_args('$out')))\n+ command_template = ' command = {executable} {cross_args} $ARGS {output_args} $in $LINK_ARGS $aliasing\\n'\n+ command = command_template.format(\n+ executable=' '.join(compiler.get_linker_exelist()),\n+ cross_args=' '.join(cross_args),\n+ output_args=' '.join(compiler.get_linker_output_args('$out'))\n+ )\n description = ' description = Linking target $out'\n outfile.write(rule)\n outfile.write(command)\n@@ -1386,17 +1388,18 @@ rule FORTRAN_DEP_HACK\n if getattr(self, 'created_llvm_ir_rule', False):\n return\n rule = 'rule llvm_ir{}_COMPILER\\n'.format('_CROSS' if is_cross else '')\n- args = [' '.join([ninja_quote(i) for i in compiler.get_exelist()]),\n- ' '.join(self.get_cross_info_lang_args(compiler.language, is_cross)),\n- ' '.join(compiler.get_output_args('$out')),\n- ' '.join(compiler.get_compile_only_args())]\n if mesonlib.is_windows():\n- command_template = ' command = {} @$out.rsp\\n' \\\n+ command_template = ' command = {executable} @$out.rsp\\n' \\\n ' rspfile = $out.rsp\\n' \\\n- ' rspfile_content = {} $ARGS {} {} $in\\n'\n+ ' rspfile_content = {cross_args} $ARGS {output_args} {compile_only_args} $in\\n'\n else:\n- command_template = ' command = {} {} $ARGS {} {} $in\\n'\n- command = command_template.format(*args)\n+ command_template = ' command = {executable} {cross_args} $ARGS {output_args} {compile_only_args} $in\\n'\n+ command = command_template.format(\n+ executable=' '.join([ninja_quote(i) for i in compiler.get_exelist()]),\n+ cross_args=' '.join(self.get_cross_info_lang_args(compiler.language, is_cross)),\n+ output_args=' '.join(compiler.get_output_args('$out')),\n+ compile_only_args=' '.join(compiler.get_compile_only_args())\n+ )\n description = ' description = Compiling LLVM IR object $in.\\n'\n outfile.write(rule)\n outfile.write(command)\n@@ -1448,18 +1451,19 @@ rule FORTRAN_DEP_HACK\n quoted_depargs.append(d)\n cross_args = self.get_cross_info_lang_args(langname, is_cross)\n if mesonlib.is_windows():\n- command_template = ''' command = %s @$out.rsp\n+ command_template = ''' command = {executable} @$out.rsp\n rspfile = $out.rsp\n- rspfile_content = %s $ARGS %s %s %s $in\n+ rspfile_content = {cross_args} $ARGS {dep_args} {output_args} {compile_only_args} $in\n '''\n else:\n- command_template = ' command = %s %s $ARGS %s %s %s $in\\n'\n- command = command_template % (\n- ' '.join([ninja_quote(i) for i in compiler.get_exelist()]),\n- ' '.join(cross_args),\n- ' '.join(quoted_depargs),\n- ' '.join(compiler.get_output_args('$out')),\n- ' '.join(compiler.get_compile_only_args()))\n+ command_template = ' command = {executable} {cross_args} $ARGS {dep_args} {output_args} {compile_only_args} $in\\n'\n+ command = command_template.format(\n+ executable=' '.join([ninja_quote(i) for i in compiler.get_exelist()]),\n+ cross_args=' '.join(cross_args),\n+ dep_args=' '.join(quoted_depargs),\n+ output_args=' '.join(compiler.get_output_args('$out')),\n+ compile_only_args=' '.join(compiler.get_compile_only_args())\n+ )\n description = ' description = Compiling %s object $out\\n' % langname\n if compiler.get_id() == 'msvc':\n deps = ' deps = msvc\\n'\n@@ -1497,12 +1501,13 @@ rule FORTRAN_DEP_HACK\n output = ''\n else:\n output = ' '.join(compiler.get_output_args('$out'))\n- command = \" command = %s %s $ARGS %s %s %s $in\\n\" % (\n- ' '.join(compiler.get_exelist()),\n- ' '.join(cross_args),\n- ' '.join(quoted_depargs),\n- output,\n- ' '.join(compiler.get_compile_only_args()))\n+ command = \" command = {executable} {cross_args} $ARGS {dep_args} {output_args} {compile_only_args} $in\\n\".format(\n+ executable=' '.join(compiler.get_exelist()),\n+ cross_args=' '.join(cross_args),\n+ dep_args=' '.join(quoted_depargs),\n+ output_args=output,\n+ compile_only_args=' '.join(compiler.get_compile_only_args())\n+ )\n description = ' description = Precompiling header %s\\n' % '$in'\n if compiler.get_id() == 'msvc':\n deps = ' deps = msvc\\n'\n", "new_path": "mesonbuild/backend/ninjabackend.py", "old_path": "mesonbuild/backend/ninjabackend.py" } ]
003e0a0610582020d1b213e0c8d16fe63bc6eabe
mesonbuild/meson
null
null
Use the same function for detection of C and C++ compilers The mechanism is identical which means there's a high likelihood of unintended divergence. In fact, a slight divergence was already there.
[ { "change_type": "MODIFY", "diff": "@@ -400,9 +400,9 @@ class Environment:\n errmsg += '\\nRunning \"{0}\" gave \"{1}\"'.format(c, e)\n raise EnvironmentException(errmsg)\n \n- def detect_c_compiler(self, want_cross):\n+ def _detect_c_or_cpp_compiler(self, lang, evar, want_cross):\n popen_exceptions = {}\n- compilers, ccache, is_cross, exe_wrap = self._get_compilers('c', 'CC', want_cross)\n+ compilers, ccache, is_cross, exe_wrap = self._get_compilers(lang, evar, want_cross)\n for compiler in compilers:\n if isinstance(compiler, str):\n compiler = [compiler]\n@@ -424,24 +424,34 @@ class Environment:\n continue\n gtype = self.get_gnu_compiler_type(defines)\n version = self.get_gnu_version_from_defines(defines)\n- return GnuCCompiler(ccache + compiler, version, gtype, is_cross, exe_wrap, defines)\n+ cls = GnuCCompiler if lang == 'c' else GnuCPPCompiler\n+ return cls(ccache + compiler, version, gtype, is_cross, exe_wrap, defines)\n if 'clang' in out:\n if 'Apple' in out or for_darwin(want_cross, self):\n cltype = CLANG_OSX\n else:\n cltype = CLANG_STANDARD\n- return ClangCCompiler(ccache + compiler, version, cltype, is_cross, exe_wrap)\n+ cls = ClangCCompiler if lang == 'c' else ClangCPPCompiler\n+ return cls(ccache + compiler, version, cltype, is_cross, exe_wrap)\n if 'Microsoft' in out or 'Microsoft' in err:\n # Visual Studio prints version number to stderr but\n # everything else to stdout. Why? Lord only knows.\n version = search_version(err)\n- return VisualStudioCCompiler(compiler, version, is_cross, exe_wrap)\n+ cls = VisualStudioCCompiler if lang == 'c' else VisualStudioCPPCompiler\n+ return cls(compiler, version, is_cross, exe_wrap)\n if '(ICC)' in out:\n # TODO: add microsoft add check OSX\n inteltype = ICC_STANDARD\n- return IntelCCompiler(ccache + compiler, version, inteltype, is_cross, exe_wrap)\n+ cls = IntelCCompiler if lang == 'c' else IntelCPPCompiler\n+ return cls(ccache + compiler, version, inteltype, is_cross, exe_wrap)\n self._handle_compiler_exceptions(popen_exceptions, compilers)\n \n+ def detect_c_compiler(self, want_cross):\n+ return self._detect_c_or_cpp_compiler('c', 'CC', want_cross)\n+\n+ def detect_cpp_compiler(self, want_cross):\n+ return self._detect_c_or_cpp_compiler('cpp', 'CXX', want_cross)\n+\n def detect_fortran_compiler(self, want_cross):\n popen_exceptions = {}\n compilers, ccache, is_cross, exe_wrap = self._get_compilers('fortran', 'FC', want_cross)\n@@ -496,46 +506,6 @@ class Environment:\n path = os.path.split(__file__)[0]\n return os.path.join(path, 'depfixer.py')\n \n- def detect_cpp_compiler(self, want_cross):\n- popen_exceptions = {}\n- compilers, ccache, is_cross, exe_wrap = self._get_compilers('cpp', 'CXX', want_cross)\n- for compiler in compilers:\n- if isinstance(compiler, str):\n- compiler = [compiler]\n- basename = os.path.basename(compiler[-1]).lower()\n- if basename == 'cl' or basename == 'cl.exe':\n- arg = '/?'\n- else:\n- arg = '--version'\n- try:\n- p, out, err = Popen_safe(compiler + [arg])\n- except OSError as e:\n- popen_exceptions[' '.join(compiler + [arg])] = e\n- continue\n- version = search_version(out)\n- if 'Free Software Foundation' in out:\n- defines = self.get_gnu_compiler_defines(compiler)\n- if not defines:\n- popen_exceptions[compiler] = 'no pre-processor defines'\n- continue\n- gtype = self.get_gnu_compiler_type(defines)\n- version = self.get_gnu_version_from_defines(defines)\n- return GnuCPPCompiler(ccache + compiler, version, gtype, is_cross, exe_wrap, defines)\n- if 'clang' in out:\n- if 'Apple' in out:\n- cltype = CLANG_OSX\n- else:\n- cltype = CLANG_STANDARD\n- return ClangCPPCompiler(ccache + compiler, version, cltype, is_cross, exe_wrap)\n- if 'Microsoft' in out or 'Microsoft' in err:\n- version = search_version(err)\n- return VisualStudioCPPCompiler(compiler, version, is_cross, exe_wrap)\n- if '(ICC)' in out:\n- # TODO: add microsoft add check OSX\n- inteltype = ICC_STANDARD\n- return IntelCPPCompiler(ccache + compiler, version, inteltype, is_cross, exe_wrap)\n- self._handle_compiler_exceptions(popen_exceptions, compilers)\n-\n def detect_objc_compiler(self, want_cross):\n popen_exceptions = {}\n compilers, ccache, is_cross, exe_wrap = self._get_compilers('objc', 'OBJC', want_cross)\n", "new_path": "mesonbuild/environment.py", "old_path": "mesonbuild/environment.py" } ]
1fbf6300c5d38b12a4347a9327e54a9a315ef8de
mesonbuild/meson
null
null
Use an enum instead of strings for method names. If a non-string value is passed as a method, reject this explicitly with a clear error message rather than trying to match with it and failing.
[ { "change_type": "MODIFY", "diff": "@@ -24,6 +24,7 @@ import sys\n import os, stat, glob, shutil\n import subprocess\n import sysconfig\n+from enum import Enum\n from collections import OrderedDict\n from . mesonlib import MesonException, version_compare, version_compare_many, Popen_safe\n from . import mlog\n@@ -33,21 +34,35 @@ from .environment import detect_cpu_family, for_windows\n class DependencyException(MesonException):\n '''Exceptions raised while trying to find dependencies'''\n \n+class DependencyMethods(Enum):\n+ # Auto means to use whatever dependency checking mechanisms in whatever order meson thinks is best.\n+ AUTO = 'auto'\n+ PKGCONFIG = 'pkg-config'\n+ QMAKE = 'qmake'\n+ # Just specify the standard link arguments, assuming the operating system provides the library.\n+ SYSTEM = 'system'\n+ # Detect using sdl2-config\n+ SDLCONFIG = 'sdlconfig'\n+ # This is only supported on OSX - search the frameworks directory by name.\n+ EXTRAFRAMEWORK = 'extraframework'\n+ # Detect using the sysconfig module.\n+ SYSCONFIG = 'sysconfig'\n+\n class Dependency:\n def __init__(self, type_name, kwargs):\n self.name = \"null\"\n self.is_found = False\n self.type_name = type_name\n- method = kwargs.get('method', 'auto')\n+ method = DependencyMethods(kwargs.get('method', 'auto'))\n \n # Set the detection method. If the method is set to auto, use any available method.\n # If method is set to a specific string, allow only that detection method.\n- if method == \"auto\":\n+ if method == DependencyMethods.AUTO:\n self.methods = self.get_methods()\n elif method in self.get_methods():\n self.methods = [method]\n else:\n- raise MesonException('Unsupported detection method: {}, allowed methods are {}'.format(method, mlog.format_list([\"auto\"] + self.get_methods())))\n+ raise MesonException('Unsupported detection method: {}, allowed methods are {}'.format(method.value, mlog.format_list(map(lambda x: x.value, [DependencyMethods.AUTO] + self.get_methods()))))\n \n def __repr__(self):\n s = '<{0} {1}: {2}>'\n@@ -68,7 +83,7 @@ class Dependency:\n return []\n \n def get_methods(self):\n- return ['auto']\n+ return [DependencyMethods.AUTO]\n \n def get_name(self):\n return self.name\n@@ -268,7 +283,7 @@ class PkgConfigDependency(Dependency):\n return self.libs\n \n def get_methods(self):\n- return ['pkg-config']\n+ return [DependencyMethods.PKGCONFIG]\n \n def check_pkgconfig(self):\n evar = 'PKG_CONFIG'\n@@ -985,10 +1000,10 @@ class QtBaseDependency(Dependency):\n # Keep track of the detection methods used, for logging purposes.\n methods = []\n # Prefer pkg-config, then fallback to `qmake -query`\n- if 'pkg-config' in self.methods:\n+ if DependencyMethods.PKGCONFIG in self.methods:\n self._pkgconfig_detect(mods, env, kwargs)\n methods.append('pkgconfig')\n- if not self.is_found and 'qmake' in self.methods:\n+ if not self.is_found and DependencyMethods.QMAKE in self.methods:\n from_text = self._qmake_detect(mods, env, kwargs)\n methods.append('qmake-' + self.name)\n methods.append('qmake')\n@@ -1137,7 +1152,7 @@ class QtBaseDependency(Dependency):\n return self.largs\n \n def get_methods(self):\n- return ['pkg-config', 'qmake']\n+ return [DependencyMethods.PKGCONFIG, DependencyMethods.QMAKE]\n \n def found(self):\n return self.is_found\n@@ -1301,7 +1316,7 @@ class GLDependency(Dependency):\n self.is_found = False\n self.cargs = []\n self.linkargs = []\n- if 'pkg-config' in self.methods:\n+ if DependencyMethods.PKGCONFIG in self.methods:\n try:\n pcdep = PkgConfigDependency('gl', environment, kwargs)\n if pcdep.found():\n@@ -1313,7 +1328,7 @@ class GLDependency(Dependency):\n return\n except Exception:\n pass\n- if 'system' in self.methods:\n+ if DependencyMethods.SYSTEM in self.methods:\n if mesonlib.is_osx():\n self.is_found = True\n self.linkargs = ['-framework', 'OpenGL']\n@@ -1333,9 +1348,9 @@ class GLDependency(Dependency):\n \n def get_methods(self):\n if mesonlib.is_osx() or mesonlib.is_windows():\n- return ['pkg-config', 'system']\n+ return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM]\n else:\n- return ['pkg-config']\n+ return [DependencyMethods.PKGCONFIG]\n \n # There are three different ways of depending on SDL2:\n # sdl2-config, pkg-config and OSX framework\n@@ -1345,7 +1360,7 @@ class SDL2Dependency(Dependency):\n self.is_found = False\n self.cargs = []\n self.linkargs = []\n- if 'pkg-config' in self.methods:\n+ if DependencyMethods.PKGCONFIG in self.methods:\n try:\n pcdep = PkgConfigDependency('sdl2', environment, kwargs)\n if pcdep.found():\n@@ -1358,7 +1373,7 @@ class SDL2Dependency(Dependency):\n except Exception as e:\n mlog.debug('SDL 2 not found via pkgconfig. Trying next, error was:', str(e))\n pass\n- if 'sdlconfig' in self.methods:\n+ if DependencyMethods.SDLCONFIG in self.methods:\n sdlconf = shutil.which('sdl2-config')\n if sdlconf:\n stdo = Popen_safe(['sdl2-config', '--cflags'])[1]\n@@ -1372,7 +1387,7 @@ class SDL2Dependency(Dependency):\n self.version, '(%s)' % sdlconf)\n return\n mlog.debug('Could not find sdl2-config binary, trying next.')\n- if 'extraframework' in self.methods:\n+ if DependencyMethods.EXTRAFRAMEWORK in self.methods:\n if mesonlib.is_osx():\n fwdep = ExtraFrameworkDependency('sdl2', kwargs.get('required', True), None, kwargs)\n if fwdep.found():\n@@ -1397,9 +1412,9 @@ class SDL2Dependency(Dependency):\n \n def get_methods(self):\n if mesonlib.is_osx():\n- return ['pkg-config', 'sdlconfig', 'extraframework']\n+ return [DependencyMethods.PKGCONFIG, DependencyMethods.SDLCONFIG, DependencyMethods.EXTRAFRAMEWORK]\n else:\n- return ['pkg-config', 'sdlconfig']\n+ return [DependencyMethods.PKGCONFIG, DependencyMethods.SDLCONFIG]\n \n class ExtraFrameworkDependency(Dependency):\n def __init__(self, name, required, path, kwargs):\n@@ -1465,7 +1480,7 @@ class Python3Dependency(Dependency):\n self.is_found = False\n # We can only be sure that it is Python 3 at this point\n self.version = '3'\n- if 'pkg-config' in self.methods:\n+ if DependencyMethods.PKGCONFIG in self.methods:\n try:\n pkgdep = PkgConfigDependency('python3', environment, kwargs)\n if pkgdep.found():\n@@ -1477,9 +1492,9 @@ class Python3Dependency(Dependency):\n except Exception:\n pass\n if not self.is_found:\n- if mesonlib.is_windows() and 'sysconfig' in self.methods:\n+ if mesonlib.is_windows() and DependencyMethods.SYSCONFIG in self.methods:\n self._find_libpy3_windows(environment)\n- elif mesonlib.is_osx() and 'extraframework' in self.methods:\n+ elif mesonlib.is_osx() and DependencyMethods.EXTRAFRAMEWORK in self.methods:\n # In OSX the Python 3 framework does not have a version\n # number in its name.\n fw = ExtraFrameworkDependency('python', False, None, kwargs)\n@@ -1536,11 +1551,11 @@ class Python3Dependency(Dependency):\n \n def get_methods(self):\n if mesonlib.is_windows():\n- return ['pkg-config', 'sysconfig']\n+ return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSCONFIG]\n elif mesonlib.is_osx():\n- return ['pkg-config', 'extraframework']\n+ return [DependencyMethods.PKGCONFIG, DependencyMethods.EXTRAFRAMEWORK]\n else:\n- return ['pkg-config']\n+ return [DependencyMethods.PKGCONFIG]\n \n def get_version(self):\n return self.version\n@@ -1574,6 +1589,8 @@ def find_external_dependency(name, environment, kwargs):\n required = kwargs.get('required', True)\n if not isinstance(required, bool):\n raise DependencyException('Keyword \"required\" must be a boolean.')\n+ if not isinstance(kwargs.get('method', ''), str):\n+ raise DependencyException('Keyword \"method\" must be a string.')\n lname = name.lower()\n if lname in packages:\n dep = packages[lname](environment, kwargs)\n", "new_path": "mesonbuild/dependencies.py", "old_path": "mesonbuild/dependencies.py" } ]
30645ed54b4e08611ae4883137d774a4c02b0278
mesonbuild/meson
null
null
Remove extra casts on InterpreterBase.evaluate_statement. The result of this method is always a "native" object, and code coverage (plus a manual inspection) shows that this conversion is never done.
[ { "change_type": "MODIFY", "diff": "@@ -2625,11 +2625,10 @@ different subdirectory.\n raise InterpreterException('Tried to add non-existing source file %s.' % s)\n \n def format_string(self, templ, args):\n- templ = self.to_native(templ)\n if isinstance(args, mparser.ArgumentNode):\n args = args.arguments\n for (i, arg) in enumerate(args):\n- arg = self.to_native(self.evaluate_statement(arg))\n+ arg = self.evaluate_statement(arg)\n if isinstance(arg, bool): # Python boolean is upper case.\n arg = str(arg).lower()\n templ = templ.replace('@{}@'.format(i), str(arg))\n", "new_path": "mesonbuild/interpreter.py", "old_path": "mesonbuild/interpreter.py" }, { "change_type": "MODIFY", "diff": "@@ -198,8 +198,6 @@ class InterpreterBase:\n \n def evaluate_notstatement(self, cur):\n v = self.evaluate_statement(cur.value)\n- if isinstance(v, mparser.BooleanNode):\n- v = v.value\n if not isinstance(v, bool):\n raise InterpreterException('Argument to \"not\" is not a boolean.')\n return not v\n@@ -217,16 +215,8 @@ class InterpreterBase:\n self.evaluate_codeblock(node.elseblock)\n \n def evaluate_comparison(self, node):\n- v1 = self.evaluate_statement(node.left)\n- v2 = self.evaluate_statement(node.right)\n- if self.is_elementary_type(v1):\n- val1 = v1\n- else:\n- val1 = v1.value\n- if self.is_elementary_type(v2):\n- val2 = v2\n- else:\n- val2 = v2.value\n+ val1 = self.evaluate_statement(node.left)\n+ val2 = self.evaluate_statement(node.right)\n if node.ctype == '==':\n return val1 == val2\n elif node.ctype == '!=':\n@@ -244,45 +234,35 @@ class InterpreterBase:\n \n def evaluate_andstatement(self, cur):\n l = self.evaluate_statement(cur.left)\n- if isinstance(l, mparser.BooleanNode):\n- l = l.value\n if not isinstance(l, bool):\n raise InterpreterException('First argument to \"and\" is not a boolean.')\n if not l:\n return False\n r = self.evaluate_statement(cur.right)\n- if isinstance(r, mparser.BooleanNode):\n- r = r.value\n if not isinstance(r, bool):\n raise InterpreterException('Second argument to \"and\" is not a boolean.')\n return r\n \n def evaluate_orstatement(self, cur):\n l = self.evaluate_statement(cur.left)\n- if isinstance(l, mparser.BooleanNode):\n- l = l.get_value()\n if not isinstance(l, bool):\n raise InterpreterException('First argument to \"or\" is not a boolean.')\n if l:\n return True\n r = self.evaluate_statement(cur.right)\n- if isinstance(r, mparser.BooleanNode):\n- r = r.get_value()\n if not isinstance(r, bool):\n raise InterpreterException('Second argument to \"or\" is not a boolean.')\n return r\n \n def evaluate_uminusstatement(self, cur):\n v = self.evaluate_statement(cur.value)\n- if isinstance(v, mparser.NumberNode):\n- v = v.value\n if not isinstance(v, int):\n raise InterpreterException('Argument to negation is not an integer.')\n return -v\n \n def evaluate_arithmeticstatement(self, cur):\n- l = self.to_native(self.evaluate_statement(cur.left))\n- r = self.to_native(self.evaluate_statement(cur.right))\n+ l = self.evaluate_statement(cur.left)\n+ r = self.evaluate_statement(cur.right)\n \n if cur.operation == 'add':\n try:\n@@ -382,8 +362,6 @@ class InterpreterBase:\n obj = self.evaluate_statement(invokable)\n method_name = node.name\n args = node.args\n- if isinstance(obj, mparser.StringNode):\n- obj = obj.get_value()\n if isinstance(obj, str):\n return self.string_method_call(obj, method_name, args)\n if isinstance(obj, bool):\n@@ -402,7 +380,6 @@ class InterpreterBase:\n return obj.method_call(method_name, self.flatten(args), kwargs)\n \n def bool_method_call(self, obj, method_name, args):\n- obj = self.to_native(obj)\n (posargs, _) = self.reduce_arguments(args)\n if method_name == 'to_string':\n if not posargs:\n@@ -426,7 +403,6 @@ class InterpreterBase:\n raise InterpreterException('Unknown method \"%s\" for a boolean.' % method_name)\n \n def int_method_call(self, obj, method_name, args):\n- obj = self.to_native(obj)\n (posargs, _) = self.reduce_arguments(args)\n if method_name == 'is_even':\n if not posargs:\n@@ -442,7 +418,6 @@ class InterpreterBase:\n raise InterpreterException('Unknown method \"%s\" for an integer.' % method_name)\n \n def string_method_call(self, obj, method_name, args):\n- obj = self.to_native(obj)\n (posargs, _) = self.reduce_arguments(args)\n if method_name == 'strip':\n return obj.strip()\n@@ -534,8 +509,6 @@ class InterpreterBase:\n raise InvalidArguments('Keyword argument name is not a string.')\n a = args.kwargs[key]\n reduced_kw[key] = self.evaluate_statement(a)\n- if not isinstance(reduced_pos, list):\n- reduced_pos = [reduced_pos]\n self.argument_depth -= 1\n return reduced_pos, reduced_kw\n \n@@ -564,7 +537,6 @@ To specify a keyword argument, use : instead of =.''')\n if not isinstance(var_name, str):\n raise InvalidArguments('Tried to assign value to a non-variable.')\n value = self.evaluate_statement(node.value)\n- value = self.to_native(value)\n if not self.is_assignable(value):\n raise InvalidCode('Tried to assign an invalid value to variable.')\n # For mutable objects we need to make a copy on assignment\n@@ -593,12 +565,6 @@ To specify a keyword argument, use : instead of =.''')\n return self.variables[varname]\n raise InvalidCode('Unknown variable \"%s\".' % varname)\n \n- def to_native(self, arg):\n- if isinstance(arg, (mparser.StringNode, mparser.NumberNode,\n- mparser.BooleanNode)):\n- return arg.value\n- return arg\n-\n def is_assignable(self, value):\n return isinstance(value, (InterpreterObject, dependencies.Dependency,\n str, int, list, mesonlib.File))\n@@ -624,7 +590,7 @@ To specify a keyword argument, use : instead of =.''')\n if len(args) != 2:\n raise InvalidCode('Set_variable takes two arguments.')\n varname = args[0]\n- value = self.to_native(args[1])\n+ value = args[1]\n self.set_variable(varname, value)\n \n # @noKwargs\n", "new_path": "mesonbuild/interpreterbase.py", "old_path": "mesonbuild/interpreterbase.py" } ]
a681348b057e3ef8df3d0e35fd146ec75a880b4a
mesonbuild/meson
null
null
Add some colour to test output. Bold the section names and colourize errors&skips.
[ { "change_type": "MODIFY", "diff": "@@ -250,6 +250,23 @@ def log_text_file(logfile, testdir, stdo, stde):\n executor.shutdown()\n raise StopException()\n \n+\n+def bold(text):\n+ return mlog.bold(text).get_text(mlog.colorize_console)\n+\n+\n+def green(text):\n+ return mlog.green(text).get_text(mlog.colorize_console)\n+\n+\n+def red(text):\n+ return mlog.red(text).get_text(mlog.colorize_console)\n+\n+\n+def yellow(text):\n+ return mlog.yellow(text).get_text(mlog.colorize_console)\n+\n+\n def run_test_inprocess(testdir):\n old_stdout = sys.stdout\n sys.stdout = mystdout = StringIO()\n@@ -475,10 +492,12 @@ def run_tests(all_tests, log_name_base, extra_args):\n \n for name, test_cases, skipped in all_tests:\n current_suite = ET.SubElement(junit_root, 'testsuite', {'name': name, 'tests': str(len(test_cases))})\n+ print()\n if skipped:\n- print('\\nNot running %s tests.\\n' % name)\n+ print(bold('Not running %s tests.' % name))\n else:\n- print('\\nRunning %s tests.\\n' % name)\n+ print(bold('Running %s tests.' % name))\n+ print()\n futures = []\n for t in test_cases:\n # Jenkins screws us over by automatically sorting test cases by name\n@@ -494,7 +513,7 @@ def run_tests(all_tests, log_name_base, extra_args):\n sys.stdout.flush()\n result = result.result()\n if result is None or 'MESON_SKIP_TEST' in result.stdo:\n- print('Skipping:', t)\n+ print(yellow('Skipping:'), t)\n current_test = ET.SubElement(current_suite, 'testcase', {'name': testname,\n 'classname': name})\n ET.SubElement(current_test, 'skipped', {})\n@@ -502,7 +521,7 @@ def run_tests(all_tests, log_name_base, extra_args):\n else:\n without_install = \"\" if len(install_commands) > 0 else \" (without install)\"\n if result.msg != '':\n- print('Failed test{} during {}: {!r}'.format(without_install, result.step.name, t))\n+ print(red('Failed test{} during {}: {!r}'.format(without_install, result.step.name, t)))\n print('Reason:', result.msg)\n failing_tests += 1\n if result.step == BuildStep.configure and result.mlog != no_meson_log_msg:\n@@ -648,9 +667,9 @@ if __name__ == '__main__':\n pass\n for f in pbfiles:\n os.unlink(f)\n- print('\\nTotal passed tests:', passing_tests)\n- print('Total failed tests:', failing_tests)\n- print('Total skipped tests:', skipped_tests)\n+ print('\\nTotal passed tests:', green(str(passing_tests)))\n+ print('Total failed tests:', red(str(failing_tests)))\n+ print('Total skipped tests:', yellow(str(skipped_tests)))\n if failing_tests > 0:\n print('\\nMesonlogs of failing tests\\n')\n for l in failing_logs:\n", "new_path": "run_project_tests.py", "old_path": "run_project_tests.py" }, { "change_type": "MODIFY", "diff": "@@ -23,6 +23,7 @@ import tempfile\n import platform\n from mesonbuild import mesonlib\n from mesonbuild import mesonmain\n+from mesonbuild import mlog\n from mesonbuild.environment import detect_ninja\n from io import StringIO\n from enum import Enum\n@@ -177,7 +178,8 @@ if __name__ == '__main__':\n if 'APPVEYOR' in os.environ and os.environ['arch'] == 'x86':\n os.environ.pop('platform')\n # Run tests\n- print('Running unittests.\\n')\n+ print(mlog.bold('Running unittests.').get_text(mlog.colorize_console))\n+ print()\n units = ['InternalTests', 'AllPlatformTests', 'FailureTests']\n if mesonlib.is_linux():\n units += ['LinuxlikeTests']\n@@ -200,7 +202,8 @@ if __name__ == '__main__':\n returncode += subprocess.call([sys.executable, 'run_unittests.py', '-v'] + units, env=env)\n # Ubuntu packages do not have a binary without -6 suffix.\n if should_run_linux_cross_tests():\n- print('Running cross compilation tests.\\n')\n+ print(mlog.bold('Running cross compilation tests.').get_text(mlog.colorize_console))\n+ print()\n returncode += subprocess.call([sys.executable, 'run_cross_test.py', 'cross/ubuntu-armhf.txt'], env=env)\n returncode += subprocess.call([sys.executable, 'run_project_tests.py'] + sys.argv[1:], env=env)\n sys.exit(returncode)\n", "new_path": "run_tests.py", "old_path": "run_tests.py" } ]
fab5634916191816ddecf1a2a958fa7ed2eac1ec
mesonbuild/meson
null
null
Add 'Compiler.get_display_language' Use this when we print language-related information to the console and via the Ninja backend.
[ { "change_type": "MODIFY", "diff": "@@ -1606,7 +1606,7 @@ rule FORTRAN_DEP_HACK\n output_args=' '.join(compiler.get_output_args('$out')),\n compile_only_args=' '.join(compiler.get_compile_only_args())\n )\n- description = ' description = Compiling %s object $out.\\n' % langname.title()\n+ description = ' description = Compiling %s object $out.\\n' % compiler.get_display_language()\n if compiler.get_id() == 'msvc':\n deps = ' deps = msvc\\n'\n else:\n", "new_path": "mesonbuild/backend/ninjabackend.py", "old_path": "mesonbuild/backend/ninjabackend.py" }, { "change_type": "MODIFY", "diff": "@@ -179,7 +179,7 @@ class CCompiler(Compiler):\n return ['-Wl,--out-implib=' + implibname]\n \n def sanity_check_impl(self, work_dir, environment, sname, code):\n- mlog.debug('Sanity testing ' + self.language + ' compiler:', ' '.join(self.exelist))\n+ mlog.debug('Sanity testing ' + self.get_display_language() + ' compiler:', ' '.join(self.exelist))\n mlog.debug('Is cross compiler: %s.' % str(self.is_cross))\n \n extra_flags = []\n", "new_path": "mesonbuild/compilers/c.py", "old_path": "mesonbuild/compilers/c.py" }, { "change_type": "MODIFY", "diff": "@@ -584,6 +584,9 @@ class Compiler:\n def get_language(self):\n return self.language\n \n+ def get_display_language(self):\n+ return self.language.capitalize()\n+\n def get_default_suffix(self):\n return self.default_suffix\n \n", "new_path": "mesonbuild/compilers/compilers.py", "old_path": "mesonbuild/compilers/compilers.py" }, { "change_type": "MODIFY", "diff": "@@ -32,6 +32,9 @@ class CPPCompiler(CCompiler):\n self.language = 'cpp'\n CCompiler.__init__(self, exelist, version, is_cross, exe_wrap)\n \n+ def get_display_language(self):\n+ return 'C++'\n+\n def get_no_stdinc_args(self):\n return ['-nostdinc++']\n \n", "new_path": "mesonbuild/compilers/cpp.py", "old_path": "mesonbuild/compilers/cpp.py" }, { "change_type": "MODIFY", "diff": "@@ -25,6 +25,9 @@ class MonoCompiler(Compiler):\n self.id = 'mono'\n self.monorunner = 'mono'\n \n+ def get_display_language(self):\n+ return 'C#'\n+\n def get_output_args(self, fname):\n return ['-out:' + fname]\n \n", "new_path": "mesonbuild/compilers/cs.py", "old_path": "mesonbuild/compilers/cs.py" }, { "change_type": "MODIFY", "diff": "@@ -24,6 +24,9 @@ class ObjCCompiler(CCompiler):\n self.language = 'objc'\n CCompiler.__init__(self, exelist, version, is_cross, exe_wrap)\n \n+ def get_display_language(self):\n+ return 'Objective-C'\n+\n def sanity_check(self, work_dir, environment):\n # TODO try to use sanity_check_impl instead of duplicated code\n source_name = os.path.join(work_dir, 'sanitycheckobjc.m')\n", "new_path": "mesonbuild/compilers/objc.py", "old_path": "mesonbuild/compilers/objc.py" }, { "change_type": "MODIFY", "diff": "@@ -24,6 +24,9 @@ class ObjCPPCompiler(CPPCompiler):\n self.language = 'objcpp'\n CPPCompiler.__init__(self, exelist, version, is_cross, exe_wrap)\n \n+ def get_display_language(self):\n+ return 'Objective-C++'\n+\n def sanity_check(self, work_dir, environment):\n # TODO try to use sanity_check_impl instead of duplicated code\n source_name = os.path.join(work_dir, 'sanitycheckobjcpp.mm')\n", "new_path": "mesonbuild/compilers/objcpp.py", "old_path": "mesonbuild/compilers/objcpp.py" }, { "change_type": "MODIFY", "diff": "@@ -741,7 +741,7 @@ class CompilerHolder(InterpreterObject):\n def unittest_args_method(self, args, kwargs):\n # At time, only D compilers have this feature.\n if not hasattr(self.compiler, 'get_unittest_args'):\n- raise InterpreterException('This {} compiler has no unittest arguments.'.format(self.compiler.language))\n+ raise InterpreterException('This {} compiler has no unittest arguments.'.format(self.compiler.get_display_language()))\n return self.compiler.get_unittest_args()\n \n def has_member_method(self, args, kwargs):\n@@ -971,8 +971,7 @@ class CompilerHolder(InterpreterObject):\n raise InvalidCode('Search directory %s is not an absolute path.' % i)\n linkargs = self.compiler.find_library(libname, self.environment, search_dirs)\n if required and not linkargs:\n- l = self.compiler.language.capitalize()\n- raise InterpreterException('{} library {!r} not found'.format(l, libname))\n+ raise InterpreterException('{} library {!r} not found'.format(self.compiler.get_display_language(), libname))\n lib = dependencies.ExternalLibrary(libname, linkargs, self.environment,\n self.compiler.language)\n return ExternalLibraryHolder(lib)\n@@ -986,7 +985,7 @@ class CompilerHolder(InterpreterObject):\n h = mlog.green('YES')\n else:\n h = mlog.red('NO')\n- mlog.log('Compiler for {} supports argument {}:'.format(self.compiler.language, args[0]), h)\n+ mlog.log('Compiler for {} supports argument {}:'.format(self.compiler.get_display_language(), args[0]), h)\n return result\n \n def has_multi_arguments_method(self, args, kwargs):\n@@ -998,7 +997,7 @@ class CompilerHolder(InterpreterObject):\n h = mlog.red('NO')\n mlog.log(\n 'Compiler for {} supports arguments {}:'.format(\n- self.compiler.language, ' '.join(args)),\n+ self.compiler.get_display_language(), ' '.join(args)),\n h)\n return result\n \n@@ -1794,7 +1793,7 @@ class Interpreter(InterpreterBase):\n continue\n else:\n raise\n- mlog.log('Native %s compiler: ' % lang, mlog.bold(' '.join(comp.get_exelist())), ' (%s %s)' % (comp.id, comp.version), sep='')\n+ mlog.log('Native %s compiler: ' % comp.get_display_language(), mlog.bold(' '.join(comp.get_exelist())), ' (%s %s)' % (comp.id, comp.version), sep='')\n if not comp.get_language() in self.coredata.external_args:\n (preproc_args, compile_args, link_args) = environment.get_args_from_envvars(comp)\n self.coredata.external_preprocess_args[comp.get_language()] = preproc_args\n@@ -1802,7 +1801,7 @@ class Interpreter(InterpreterBase):\n self.coredata.external_link_args[comp.get_language()] = link_args\n self.build.add_compiler(comp)\n if need_cross_compiler:\n- mlog.log('Cross %s compiler: ' % lang, mlog.bold(' '.join(cross_comp.get_exelist())), ' (%s %s)' % (cross_comp.id, cross_comp.version), sep='')\n+ mlog.log('Cross %s compiler: ' % cross_comp.get_display_language(), mlog.bold(' '.join(cross_comp.get_exelist())), ' (%s %s)' % (cross_comp.id, cross_comp.version), sep='')\n self.build.add_cross_compiler(cross_comp)\n if self.environment.is_cross_build() and not need_cross_compiler:\n self.build.add_cross_compiler(comp)\n", "new_path": "mesonbuild/interpreter.py", "old_path": "mesonbuild/interpreter.py" } ]
bb0e18b73885de374f8461c0e4f3c911fded1e46
mesonbuild/meson
null
null
Use listify and extract_as_list everywhere They now flatten by default and unhold objects if required Includes unit tests.
[ { "change_type": "MODIFY", "diff": "@@ -20,7 +20,7 @@ from . import environment\n from . import dependencies\n from . import mlog\n from .mesonlib import File, MesonException, listify, extract_as_list\n-from .mesonlib import flatten, typeslistify, stringlistify, classify_unity_sources\n+from .mesonlib import typeslistify, stringlistify, classify_unity_sources\n from .mesonlib import get_filenames_templates_dict, substitute_values\n from .environment import for_windows, for_darwin, for_cygwin\n from .compilers import is_object, clike_langs, sort_clike, lang_suffixes\n@@ -682,7 +682,7 @@ class BuildTarget(Target):\n if 'd' in self.compilers:\n self.add_compiler_args('d', self.compilers['d'].get_feature_args(dfeatures))\n \n- self.link_args = flatten(kwargs.get('link_args', []))\n+ self.link_args = extract_as_list(kwargs, 'link_args')\n for i in self.link_args:\n if not isinstance(i, str):\n raise InvalidArguments('Link_args arguments must be strings.')\n@@ -856,9 +856,7 @@ You probably should put it in link_with instead.''')\n return self.external_deps\n \n def link(self, target):\n- for t in flatten(target):\n- if hasattr(t, 'held_object'):\n- t = t.held_object\n+ for t in listify(target, unholder=True):\n if not t.is_linkable_target():\n raise InvalidArguments('Link target {!r} is not linkable.'.format(t))\n if isinstance(self, SharedLibrary) and isinstance(t, StaticLibrary) and not t.pic:\n@@ -870,9 +868,7 @@ You probably should put it in link_with instead.''')\n self.link_targets.append(t)\n \n def link_whole(self, target):\n- for t in flatten(target):\n- if hasattr(t, 'held_object'):\n- t = t.held_object\n+ for t in listify(target, unholder=True):\n if not isinstance(t, StaticLibrary):\n raise InvalidArguments('{!r} is not a static library.'.format(t))\n if isinstance(self, SharedLibrary) and not t.pic:\n@@ -915,7 +911,7 @@ You probably should put it in link_with instead.''')\n self.include_dirs += ids\n \n def add_compiler_args(self, language, args):\n- args = flatten(args)\n+ args = listify(args)\n for a in args:\n if not isinstance(a, (str, File)):\n raise InvalidArguments('A non-string passed to compiler args.')\n@@ -1546,11 +1542,9 @@ class CustomTarget(Target):\n return deps\n \n def flatten_command(self, cmd):\n- cmd = listify(cmd)\n+ cmd = listify(cmd, unholder=True)\n final_cmd = []\n for c in cmd:\n- if hasattr(c, 'held_object'):\n- c = c.held_object\n if isinstance(c, str):\n final_cmd.append(c)\n elif isinstance(c, File):\n@@ -1573,12 +1567,7 @@ class CustomTarget(Target):\n \n def process_kwargs(self, kwargs):\n super().process_kwargs(kwargs)\n- sources = flatten(kwargs.get('input', []))\n- self.sources = []\n- for s in sources:\n- if hasattr(s, 'held_object'):\n- s = s.held_object\n- self.sources.append(s)\n+ self.sources = extract_as_list(kwargs, 'input', unholder=True)\n if 'output' not in kwargs:\n raise InvalidArguments('Missing keyword argument \"output\".')\n self.outputs = listify(kwargs['output'])\n", "new_path": "mesonbuild/build.py", "old_path": "mesonbuild/build.py" }, { "change_type": "MODIFY", "diff": "@@ -23,7 +23,7 @@ from enum import Enum\n \n from .. import mlog\n from .. import mesonlib\n-from ..mesonlib import MesonException, Popen_safe, flatten, version_compare_many, listify\n+from ..mesonlib import MesonException, Popen_safe, version_compare_many, listify\n \n \n # These must be defined in this file to avoid cyclical references.\n@@ -586,7 +586,7 @@ class ExtraFrameworkDependency(ExternalDependency):\n \n def get_dep_identifier(name, kwargs, want_cross):\n # Need immutable objects since the identifier will be used as a dict key\n- version_reqs = flatten(kwargs.get('version', []))\n+ version_reqs = listify(kwargs.get('version', []))\n if isinstance(version_reqs, list):\n version_reqs = frozenset(version_reqs)\n identifier = (name, version_reqs, want_cross)\n@@ -599,7 +599,7 @@ def get_dep_identifier(name, kwargs, want_cross):\n continue\n # All keyword arguments are strings, ints, or lists (or lists of lists)\n if isinstance(value, list):\n- value = frozenset(flatten(value))\n+ value = frozenset(listify(value))\n identifier += (key, value)\n return identifier\n \n", "new_path": "mesonbuild/dependencies/base.py", "old_path": "mesonbuild/dependencies/base.py" }, { "change_type": "MODIFY", "diff": "@@ -21,7 +21,7 @@ import shutil\n \n from .. import mlog\n from .. import mesonlib\n-from ..mesonlib import version_compare, Popen_safe\n+from ..mesonlib import version_compare, Popen_safe, stringlistify, extract_as_list\n from .base import DependencyException, ExternalDependency, PkgConfigDependency\n \n class GTestDependency(ExternalDependency):\n@@ -185,7 +185,7 @@ class LLVMDependency(ExternalDependency):\n raise DependencyException('Could not generate modules for LLVM.')\n self.modules = shlex.split(out)\n \n- modules = mesonlib.stringlistify(mesonlib.flatten(kwargs.get('modules', [])))\n+ modules = stringlistify(extract_as_list(kwargs, 'modules'))\n for mod in sorted(set(modules)):\n if mod not in self.modules:\n mlog.log('LLVM module', mod, 'found:', mlog.red('NO'))\n", "new_path": "mesonbuild/dependencies/dev.py", "old_path": "mesonbuild/dependencies/dev.py" }, { "change_type": "MODIFY", "diff": "@@ -1560,12 +1560,11 @@ class Interpreter(InterpreterBase):\n version = kwargs.get('version', self.project_version)\n if not isinstance(version, str):\n raise InterpreterException('Version must be a string.')\n- incs = extract_as_list(kwargs, 'include_directories')\n- libs = extract_as_list(kwargs, 'link_with')\n+ incs = extract_as_list(kwargs, 'include_directories', unholder=True)\n+ libs = extract_as_list(kwargs, 'link_with', unholder=True)\n sources = extract_as_list(kwargs, 'sources')\n- sources = self.source_strings_to_files(self.flatten(sources))\n- deps = self.flatten(kwargs.get('dependencies', []))\n- deps = listify(deps)\n+ sources = listify(self.source_strings_to_files(sources), unholder=True)\n+ deps = extract_as_list(kwargs, 'dependencies', unholder=True)\n compile_args = mesonlib.stringlistify(kwargs.get('compile_args', []))\n link_args = mesonlib.stringlistify(kwargs.get('link_args', []))\n final_deps = []\n@@ -1577,13 +1576,8 @@ class Interpreter(InterpreterBase):\n if not isinstance(d, (dependencies.Dependency, dependencies.ExternalLibrary, dependencies.InternalDependency)):\n raise InterpreterException('Dependencies must be external deps')\n final_deps.append(d)\n- dep = dependencies.InternalDependency(version,\n- mesonlib.unholder_array(incs),\n- compile_args,\n- link_args,\n- mesonlib.unholder_array(libs),\n- mesonlib.unholder_array(sources),\n- final_deps)\n+ dep = dependencies.InternalDependency(version, incs, compile_args,\n+ link_args, libs, sources, final_deps)\n return DependencyHolder(dep)\n \n @noKwargs\n@@ -1638,7 +1632,7 @@ class Interpreter(InterpreterBase):\n 'or not executable'.format(cmd))\n cmd = prog\n expanded_args = []\n- for a in mesonlib.flatten(cargs):\n+ for a in listify(cargs):\n if isinstance(a, str):\n expanded_args.append(a)\n elif isinstance(a, mesonlib.File):\n@@ -2308,11 +2302,7 @@ to directly access options of other subprojects.''')\n raise InterpreterException('Run_target needs at least one positional argument.')\n \n cleaned_args = []\n- for i in mesonlib.flatten(all_args):\n- try:\n- i = i.held_object\n- except AttributeError:\n- pass\n+ for i in listify(all_args, unholder=True):\n if not isinstance(i, (str, build.BuildTarget, build.CustomTarget, dependencies.ExternalProgram, mesonlib.File)):\n mlog.debug('Wrong type:', str(i))\n raise InterpreterException('Invalid argument to run_target.')\n@@ -2383,11 +2373,10 @@ to directly access options of other subprojects.''')\n par = kwargs.get('is_parallel', True)\n if not isinstance(par, bool):\n raise InterpreterException('Keyword argument is_parallel must be a boolean.')\n- cmd_args = extract_as_list(kwargs, 'args')\n+ cmd_args = extract_as_list(kwargs, 'args', unholder=True)\n for i in cmd_args:\n- if not isinstance(i, (str, mesonlib.File, TargetHolder)):\n+ if not isinstance(i, (str, mesonlib.File, build.Target)):\n raise InterpreterException('Command line arguments must be strings, files or targets.')\n- cmd_args = mesonlib.unholder_array(cmd_args)\n env = self.unpack_env_kwarg(kwargs)\n should_fail = kwargs.get('should_fail', False)\n if not isinstance(should_fail, bool):\n@@ -2805,7 +2794,8 @@ different subdirectory.\n elif isinstance(s, str):\n s = mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, s)\n else:\n- raise InterpreterException(\"Source item is not string or File-type object.\")\n+ raise InterpreterException('Source item is {!r} instead of '\n+ 'string or File-type object'.format(s))\n results.append(s)\n return results\n \n@@ -2831,7 +2821,7 @@ different subdirectory.\n if not args:\n raise InterpreterException('Target does not have a name.')\n name = args[0]\n- sources = args[1:]\n+ sources = listify(args[1:])\n if self.environment.is_cross_build():\n if kwargs.get('native', False):\n is_cross = False\n@@ -2839,19 +2829,14 @@ different subdirectory.\n is_cross = True\n else:\n is_cross = False\n- try:\n- kw_src = self.flatten(kwargs['sources'])\n- kw_src = listify(kw_src)\n- except KeyError:\n- kw_src = []\n- sources += kw_src\n+ if 'sources' in kwargs:\n+ sources += listify(kwargs['sources'])\n sources = self.source_strings_to_files(sources)\n- objs = self.flatten(kwargs.get('objects', []))\n- kwargs['dependencies'] = self.flatten(kwargs.get('dependencies', []))\n+ objs = extract_as_list(kwargs, 'objects')\n+ kwargs['dependencies'] = extract_as_list(kwargs, 'dependencies')\n if 'extra_files' in kwargs:\n ef = extract_as_list(kwargs, 'extra_files')\n kwargs['extra_files'] = self.source_strings_to_files(ef)\n- objs = listify(objs)\n self.check_sources_exist(os.path.join(self.source_root, self.subdir), sources)\n if targetholder is ExecutableHolder:\n targetclass = build.Executable\n", "new_path": "mesonbuild/interpreter.py", "old_path": "mesonbuild/interpreter.py" }, { "change_type": "MODIFY", "diff": "@@ -199,9 +199,6 @@ def classify_unity_sources(compilers, sources):\n compsrclist[comp].append(src)\n return compsrclist\n \n-def flatten(item):\n- return listify(item, flatten=True)\n-\n def is_osx():\n return platform.system().lower() == 'darwin'\n \n@@ -466,34 +463,45 @@ def replace_if_different(dst, dst_tmp):\n else:\n os.unlink(dst_tmp)\n \n-\n-def listify(item, flatten=True):\n+def listify(item, flatten=True, unholder=False):\n '''\n- Returns a list with all args embedded in a list if they are not of type list.\n+ Returns a list with all args embedded in a list if they are not a list.\n This function preserves order.\n+ @flatten: Convert lists of lists to a flat list\n+ @unholder: Replace each item with the object it holds, if required\n+\n+ Note: unholding only works recursively when flattening\n '''\n if not isinstance(item, list):\n+ if unholder and hasattr(item, 'held_object'):\n+ item = item.held_object\n return [item]\n result = []\n- if flatten:\n- for i in item:\n- if isinstance(i, list):\n- result += listify(i, flatten=True)\n- else:\n- result.append(i)\n- else:\n- for i in item:\n+ for i in item:\n+ if unholder and hasattr(i, 'held_object'):\n+ i = i.held_object\n+ if flatten and isinstance(i, list):\n+ result += listify(i, flatten=True, unholder=unholder)\n+ else:\n result.append(i)\n return result\n \n \n-def extract_as_list(dict_object, *keys, pop=False):\n+def extract_as_list(dict_object, *keys, pop=False, **kwargs):\n '''\n Extracts all values from given dict_object and listifies them.\n '''\n+ result = []\n+ fetch = dict_object.get\n if pop:\n- return flatten([dict_object.pop(key, []) for key in keys])\n- return flatten([dict_object.get(key, []) for key in keys])\n+ fetch = dict_object.pop\n+ # If there's only one key, we don't return a list with one element\n+ if len(keys) == 1:\n+ return listify(fetch(keys[0], []), **kwargs)\n+ # Return a list of values corresponding to *keys\n+ for key in keys:\n+ result.append(listify(fetch(key, []), **kwargs))\n+ return result\n \n \n def typeslistify(item, types):\n@@ -752,15 +760,6 @@ def windows_proof_rmtree(f):\n # Try one last time and throw if it fails.\n shutil.rmtree(f)\n \n-def unholder_array(entries):\n- result = []\n- entries = flatten(entries)\n- for e in entries:\n- if hasattr(e, 'held_object'):\n- e = e.held_object\n- result.append(e)\n- return result\n-\n class OrderedSet(collections.MutableSet):\n \"\"\"A set that preserves the order in which items are added, by first\n insertion.\n", "new_path": "mesonbuild/mesonlib.py", "old_path": "mesonbuild/mesonlib.py" }, { "change_type": "MODIFY", "diff": "@@ -20,7 +20,7 @@ import os\n import copy\n import subprocess\n from . import ModuleReturnValue\n-from ..mesonlib import MesonException, OrderedSet, unholder_array, Popen_safe\n+from ..mesonlib import MesonException, OrderedSet, Popen_safe, extract_as_list\n from ..dependencies import Dependency, PkgConfigDependency, InternalDependency\n from .. import mlog\n from .. import mesonlib\n@@ -323,7 +323,7 @@ class GnomeModule(ExtensionModule):\n cflags = OrderedSet()\n ldflags = OrderedSet()\n gi_includes = OrderedSet()\n- deps = unholder_array(deps)\n+ deps = mesonlib.listify(deps, unholder=True)\n \n for dep in deps:\n if isinstance(dep, InternalDependency):\n@@ -415,7 +415,7 @@ class GnomeModule(ExtensionModule):\n raise MesonException('gobject-introspection dependency was not found, gir cannot be generated.')\n ns = kwargs.pop('namespace')\n nsversion = kwargs.pop('nsversion')\n- libsources = mesonlib.flatten(kwargs.pop('sources'))\n+ libsources = mesonlib.extract_as_list(kwargs, 'sources', pop=True)\n girfile = '%s-%s.gir' % (ns, nsversion)\n srcdir = os.path.join(state.environment.get_source_dir(), state.subdir)\n builddir = os.path.join(state.environment.get_build_dir(), state.subdir)\n@@ -524,7 +524,7 @@ class GnomeModule(ExtensionModule):\n raise MesonException('Gir export packages must be str or list')\n \n deps = (girtarget.get_all_link_deps() + girtarget.get_external_deps() +\n- unholder_array(kwargs.pop('dependencies', [])))\n+ extract_as_list(kwargs, 'dependencies', pop=True, unholder=True))\n # Need to recursively add deps on GirTarget sources from our\n # dependencies and also find the include directories needed for the\n # typelib generation custom target below.\n@@ -791,7 +791,7 @@ This will become a hard error in the future.''')\n \n def _get_build_args(self, kwargs, state):\n args = []\n- deps = unholder_array(kwargs.get('dependencies', []))\n+ deps = extract_as_list(kwargs, 'dependencies', unholder=True)\n cflags, ldflags, gi_includes = self._get_dependencies_flags(deps, state, include_rpath=True)\n inc_dirs = mesonlib.extract_as_list(kwargs, 'include_directories')\n for incd in inc_dirs:\n", "new_path": "mesonbuild/modules/gnome.py", "old_path": "mesonbuild/modules/gnome.py" }, { "change_type": "MODIFY", "diff": "@@ -31,6 +31,7 @@ import mesonbuild.compilers\n import mesonbuild.environment\n import mesonbuild.mesonlib\n import mesonbuild.coredata\n+from mesonbuild.interpreter import ObjectHolder\n from mesonbuild.mesonlib import is_linux, is_windows, is_osx, is_cygwin, windows_proof_rmtree\n from mesonbuild.environment import Environment\n from mesonbuild.dependencies import DependencyException\n@@ -62,7 +63,6 @@ def get_soname(fname):\n def get_rpath(fname):\n return get_dynamic_section_entry(fname, r'(?:rpath|runpath)')\n \n-\n class InternalTests(unittest.TestCase):\n \n def test_version_number(self):\n@@ -398,6 +398,49 @@ class InternalTests(unittest.TestCase):\n \n self.assertEqual(forced_value, desired_value)\n \n+ def test_listify(self):\n+ listify = mesonbuild.mesonlib.listify\n+ # Test sanity\n+ self.assertEqual([1], listify(1))\n+ self.assertEqual([], listify([]))\n+ self.assertEqual([1], listify([1]))\n+ # Test flattening\n+ self.assertEqual([1, 2, 3], listify([1, [2, 3]]))\n+ self.assertEqual([1, 2, 3], listify([1, [2, [3]]]))\n+ self.assertEqual([1, [2, [3]]], listify([1, [2, [3]]], flatten=False))\n+ # Test flattening and unholdering\n+ holder1 = ObjectHolder(1)\n+ holder3 = ObjectHolder(3)\n+ self.assertEqual([holder1], listify(holder1))\n+ self.assertEqual([holder1], listify([holder1]))\n+ self.assertEqual([holder1, 2], listify([holder1, 2]))\n+ self.assertEqual([holder1, 2, 3], listify([holder1, 2, [3]]))\n+ self.assertEqual([1], listify(holder1, unholder=True))\n+ self.assertEqual([1], listify([holder1], unholder=True))\n+ self.assertEqual([1, 2], listify([holder1, 2], unholder=True))\n+ self.assertEqual([1, 2, 3], listify([holder1, 2, [holder3]], unholder=True))\n+ # Unholding doesn't work recursively when not flattening\n+ self.assertEqual([1, [2], [holder3]], listify([holder1, [2], [holder3]], unholder=True, flatten=False))\n+\n+ def test_extract_as_list(self):\n+ extract = mesonbuild.mesonlib.extract_as_list\n+ # Test sanity\n+ kwargs = {'sources': [1, 2, 3]}\n+ self.assertEqual([1, 2, 3], extract(kwargs, 'sources'))\n+ self.assertEqual(kwargs, {'sources': [1, 2, 3]})\n+ self.assertEqual([1, 2, 3], extract(kwargs, 'sources', pop=True))\n+ self.assertEqual(kwargs, {})\n+ # Test unholding\n+ holder3 = ObjectHolder(3)\n+ kwargs = {'sources': [1, 2, holder3]}\n+ self.assertEqual([1, 2, 3], extract(kwargs, 'sources', unholder=True))\n+ self.assertEqual(kwargs, {'sources': [1, 2, holder3]})\n+ self.assertEqual([1, 2, 3], extract(kwargs, 'sources', unholder=True, pop=True))\n+ self.assertEqual(kwargs, {})\n+ # Test listification\n+ kwargs = {'sources': [1, 2, 3], 'pch_sources': [4, 5, 6]}\n+ self.assertEqual([[1, 2, 3], [4, 5, 6]], extract(kwargs, 'sources', 'pch_sources'))\n+\n \n class BasePlatformTests(unittest.TestCase):\n def setUp(self):\n", "new_path": "run_unittests.py", "old_path": "run_unittests.py" } ]
cda0e33650341f0a82c7d4164607fd74805e670f
mesonbuild/meson
null
null
Add ConfigToolDependency class This class is meant abstract away some of the tedium of writing a config tool wrapper dependency, and allow these instances to share some basic code that they all need.
[ { "change_type": "MODIFY", "diff": "@@ -24,7 +24,9 @@ from enum import Enum\n \n from .. import mlog\n from .. import mesonlib\n-from ..mesonlib import MesonException, Popen_safe, version_compare_many, listify\n+from ..mesonlib import (\n+ MesonException, Popen_safe, version_compare_many, version_compare, listify\n+)\n \n \n # These must be defined in this file to avoid cyclical references.\n@@ -55,6 +57,8 @@ class DependencyMethods(Enum):\n EXTRAFRAMEWORK = 'extraframework'\n # Detect using the sysconfig module.\n SYSCONFIG = 'sysconfig'\n+ # Specify using a \"program\"-config style tool\n+ CONFIG_TOOL = 'config-tool'\n \n \n class Dependency:\n@@ -167,6 +171,94 @@ class ExternalDependency(Dependency):\n return self.compiler\n \n \n+class ConfigToolDependency(ExternalDependency):\n+\n+ \"\"\"Class representing dependencies found using a config tool.\"\"\"\n+\n+ tools = None\n+ tool_name = None\n+\n+ def __init__(self, name, environment, language, kwargs):\n+ super().__init__('config-tool', environment, language, kwargs)\n+ self.name = name\n+ self.tools = listify(kwargs.get('tools', self.tools))\n+\n+ req_version = kwargs.get('version', None)\n+ tool, version = self.find_config(req_version)\n+ self.config = tool\n+ self.is_found = self.report_config(version, req_version)\n+ if not self.is_found:\n+ self.config = None\n+ return\n+ self.version = version\n+\n+ def find_config(self, versions=None):\n+ \"\"\"Helper method that searchs for config tool binaries in PATH and\n+ returns the one that best matches the given version requirements.\n+ \"\"\"\n+ if not isinstance(versions, list) and versions is not None:\n+ versions = listify(versions)\n+\n+ best_match = (None, None)\n+ for tool in self.tools:\n+ try:\n+ p, out = Popen_safe([tool, '--version'])[:2]\n+ except (FileNotFoundError, PermissionError):\n+ continue\n+ if p.returncode != 0:\n+ continue\n+\n+ out = out.strip()\n+ # Some tools, like pcap-config don't supply a version, but also\n+ # dont fail with --version, in that case just assume that there is\n+ # only one verison and return it.\n+ if not out:\n+ return (tool, 'none')\n+ if versions:\n+ is_found = version_compare_many(out, versions)[0]\n+ # This allows returning a found version without a config tool,\n+ # which is useful to inform the user that you found version x,\n+ # but y was required.\n+ if not is_found:\n+ tool = None\n+ if best_match[1]:\n+ if version_compare(out, '> {}'.format(best_match[1])):\n+ best_match = (tool, out)\n+ else:\n+ best_match = (tool, out)\n+\n+ return best_match\n+\n+ def report_config(self, version, req_version):\n+ \"\"\"Helper method to print messages about the tool.\"\"\"\n+ if self.config is None:\n+ if version is not None:\n+ mlog.log('found {} {!r} but need:'.format(self.tool_name, version),\n+ req_version)\n+ else:\n+ mlog.log(\"No {} found; can't detect dependency\".format(self.tool_name))\n+ mlog.log('Dependency {} found:'.format(self.name), mlog.red('NO'))\n+ if self.required:\n+ raise DependencyException('Dependency {} not found'.format(self.name))\n+ return False\n+ mlog.log('Found {}:'.format(self.tool_name), mlog.bold(shutil.which(self.config)),\n+ '({})'.format(version))\n+ mlog.log('Dependency {} found:'.format(self.name), mlog.green('YES'))\n+ return True\n+\n+ def get_config_value(self, args, stage):\n+ p, out, _ = Popen_safe([self.config] + args)\n+ if p.returncode != 0:\n+ if self.required:\n+ raise DependencyException('Could not generate {} for {}'.format(\n+ stage, self.name))\n+ return []\n+ return shlex.split(out)\n+\n+ def get_methods(self):\n+ return [DependencyMethods.AUTO, DependencyMethods.CONFIG_TOOL]\n+\n+\n class PkgConfigDependency(ExternalDependency):\n # The class's copy of the pkg-config path. Avoids having to search for it\n # multiple times in the same Meson invocation.\n", "new_path": "mesonbuild/dependencies/base.py", "old_path": "mesonbuild/dependencies/base.py" } ]
c59ec8749661f242c6a15634cdb32fab65eda7c8
mesonbuild/meson
null
null
Create GL dependency objects via a factory function Create GL dependency objects via a factory function, so they can be the correct type of object (e.g. a PkgConfigDependency when it's found by pkg-config) Factor out method: kwarg processing, so it can be used by the factory before the dependency object is constructed
[ { "change_type": "MODIFY", "diff": "@@ -61,15 +61,8 @@ class DependencyMethods(Enum):\n \n \n class Dependency:\n- def __init__(self, type_name, kwargs):\n- self.name = \"null\"\n- self.version = 'none'\n- self.language = None # None means C-like\n- self.is_found = False\n- self.type_name = type_name\n- self.compile_args = []\n- self.link_args = []\n- self.sources = []\n+ @classmethod\n+ def _process_method_kw(cls, kwargs):\n method = kwargs.get('method', 'auto')\n if method not in [e.value for e in DependencyMethods]:\n raise DependencyException('method {!r} is invalid'.format(method))\n@@ -88,14 +81,27 @@ class Dependency:\n # Set the detection method. If the method is set to auto, use any available method.\n # If method is set to a specific string, allow only that detection method.\n if method == DependencyMethods.AUTO:\n- self.methods = self.get_methods()\n- elif method in self.get_methods():\n- self.methods = [method]\n+ methods = cls.get_methods()\n+ elif method in cls.get_methods():\n+ methods = [method]\n else:\n raise DependencyException(\n 'Unsupported detection method: {}, allowed methods are {}'.format(\n method.value,\n- mlog.format_list([x.value for x in [DependencyMethods.AUTO] + self.get_methods()])))\n+ mlog.format_list([x.value for x in [DependencyMethods.AUTO] + cls.get_methods()])))\n+\n+ return methods\n+\n+ def __init__(self, type_name, kwargs):\n+ self.name = \"null\"\n+ self.version = 'none'\n+ self.language = None # None means C-like\n+ self.is_found = False\n+ self.type_name = type_name\n+ self.compile_args = []\n+ self.link_args = []\n+ self.sources = []\n+ self.methods = self._process_method_kw(kwargs)\n \n def __repr__(self):\n s = '<{0} {1}: {2}>'\n@@ -890,7 +896,12 @@ def find_external_dependency(name, env, kwargs):\n if lname in packages:\n if lname not in _packages_accept_language and 'language' in kwargs:\n raise DependencyException('%s dependency does not accept \"language\" keyword argument' % (lname, ))\n- dep = packages[lname](env, kwargs)\n+ # Create the dependency object using a factory class method, if one\n+ # exists, otherwise it is just constructed directly.\n+ if getattr(packages[lname], '_factory', None):\n+ dep = packages[lname]._factory(env, kwargs)\n+ else:\n+ dep = packages[lname](env, kwargs)\n if required and not dep.found():\n raise DependencyException('Dependency \"%s\" not found' % name)\n return dep\n", "new_path": "mesonbuild/dependencies/base.py", "old_path": "mesonbuild/dependencies/base.py" }, { "change_type": "MODIFY", "diff": "@@ -38,19 +38,6 @@ from .base import ConfigToolDependency\n class GLDependency(ExternalDependency):\n def __init__(self, environment, kwargs):\n super().__init__('gl', environment, None, kwargs)\n- if DependencyMethods.PKGCONFIG in self.methods:\n- try:\n- pcdep = PkgConfigDependency('gl', environment, kwargs)\n- if pcdep.found():\n- self.type_name = 'pkgconfig'\n- self.is_found = True\n- self.compile_args = pcdep.get_compile_args()\n- self.link_args = pcdep.get_link_args()\n- self.version = pcdep.get_version()\n- self.pcdep = pcdep\n- return\n- except Exception:\n- pass\n if DependencyMethods.SYSTEM in self.methods:\n if mesonlib.is_osx():\n self.is_found = True\n@@ -67,6 +54,17 @@ class GLDependency(ExternalDependency):\n self.version = '1'\n return\n \n+ @classmethod\n+ def _factory(cls, environment, kwargs):\n+ if DependencyMethods.PKGCONFIG in cls._process_method_kw(kwargs):\n+ try:\n+ pcdep = PkgConfigDependency('gl', environment, kwargs)\n+ if pcdep.found():\n+ return pcdep\n+ except Exception:\n+ pass\n+ return GLDependency(environment, kwargs)\n+\n @staticmethod\n def get_methods():\n if mesonlib.is_osx() or mesonlib.is_windows():\n", "new_path": "mesonbuild/dependencies/ui.py", "old_path": "mesonbuild/dependencies/ui.py" } ]
cf98f5e3705603ae21bef9b0a577bcd001a8c92e
mesonbuild/meson
null
null
Enable searching system crossfile locations on more platforms There's no reason not to also look in these places on Cygwin or OSX. Don't do this on Windows, as these paths aren't meaningful there. Move test_cross_file_system_paths from LinuxlikeTests to AllPlatformTests.
[ { "change_type": "MODIFY", "diff": "@@ -222,17 +222,17 @@ class CoreData:\n (after resolving variables and ~), return that absolute path. Next,\n check if the file is relative to the current source dir. If the path\n still isn't resolved do the following:\n- Linux + BSD:\n+ Windows:\n+ - Error\n+ *:\n - $XDG_DATA_HOME/meson/cross (or ~/.local/share/meson/cross if\n undefined)\n - $XDG_DATA_DIRS/meson/cross (or\n /usr/local/share/meson/cross:/usr/share/meson/cross if undefined)\n - Error\n- *:\n- - Error\n- BSD follows the Linux path and will honor XDG_* if set. This simplifies\n- the implementation somewhat, especially since most BSD users wont set\n- those environment variables.\n+\n+ Non-Windows follows the Linux path and will honor XDG_* if set. This\n+ simplifies the implementation somewhat.\n \"\"\"\n if filename is None:\n return None\n@@ -242,7 +242,7 @@ class CoreData:\n path_to_try = os.path.abspath(filename)\n if os.path.exists(path_to_try):\n return path_to_try\n- if sys.platform == 'linux' or 'bsd' in sys.platform.lower():\n+ if sys.platform != 'win32':\n paths = [\n os.environ.get('XDG_DATA_HOME', os.path.expanduser('~/.local/share')),\n ] + os.environ.get('XDG_DATA_DIRS', '/usr/local/share:/usr/share').split(':')\n", "new_path": "mesonbuild/coredata.py", "old_path": "mesonbuild/coredata.py" }, { "change_type": "MODIFY", "diff": "@@ -1749,6 +1749,53 @@ int main(int argc, char **argv) {\n self._run(ninja,\n workdir=os.path.join(tmpdir, 'builddir'))\n \n+ def test_cross_file_system_paths(self):\n+ if is_windows():\n+ raise unittest.SkipTest('system crossfile paths not defined for Windows (yet)')\n+\n+ testdir = os.path.join(self.common_test_dir, '1 trivial')\n+ cross_content = textwrap.dedent(\"\"\"\\\n+ [binaries]\n+ c = '/usr/bin/cc'\n+ ar = '/usr/bin/ar'\n+ strip = '/usr/bin/ar'\n+\n+ [properties]\n+\n+ [host_machine]\n+ system = 'linux'\n+ cpu_family = 'x86'\n+ cpu = 'i686'\n+ endian = 'little'\n+ \"\"\")\n+\n+ with tempfile.TemporaryDirectory() as d:\n+ dir_ = os.path.join(d, 'meson', 'cross')\n+ os.makedirs(dir_)\n+ with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:\n+ f.write(cross_content)\n+ name = os.path.basename(f.name)\n+\n+ with mock.patch.dict(os.environ, {'XDG_DATA_HOME': d}):\n+ self.init(testdir, ['--cross-file=' + name], inprocess=True)\n+ self.wipe()\n+\n+ with mock.patch.dict(os.environ, {'XDG_DATA_DIRS': d}):\n+ os.environ.pop('XDG_DATA_HOME', None)\n+ self.init(testdir, ['--cross-file=' + name], inprocess=True)\n+ self.wipe()\n+\n+ with tempfile.TemporaryDirectory() as d:\n+ dir_ = os.path.join(d, '.local', 'share', 'meson', 'cross')\n+ os.makedirs(dir_)\n+ with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:\n+ f.write(cross_content)\n+ name = os.path.basename(f.name)\n+\n+ with mock.patch('mesonbuild.coredata.os.path.expanduser', lambda x: x.replace('~', d)):\n+ self.init(testdir, ['--cross-file=' + name], inprocess=True)\n+ self.wipe()\n+\n \n class FailureTests(BasePlatformTests):\n '''\n@@ -2546,50 +2593,6 @@ endian = 'little'\n self.init(testdir, ['-Db_lto=true'], default_args=False)\n self.build('reconfigure')\n \n- def test_cross_file_system_paths(self):\n- testdir = os.path.join(self.common_test_dir, '1 trivial')\n- cross_content = textwrap.dedent(\"\"\"\\\n- [binaries]\n- c = '/usr/bin/cc'\n- ar = '/usr/bin/ar'\n- strip = '/usr/bin/ar'\n-\n- [properties]\n-\n- [host_machine]\n- system = 'linux'\n- cpu_family = 'x86'\n- cpu = 'i686'\n- endian = 'little'\n- \"\"\")\n-\n- with tempfile.TemporaryDirectory() as d:\n- dir_ = os.path.join(d, 'meson', 'cross')\n- os.makedirs(dir_)\n- with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:\n- f.write(cross_content)\n- name = os.path.basename(f.name)\n-\n- with mock.patch.dict(os.environ, {'XDG_DATA_HOME': d}):\n- self.init(testdir, ['--cross-file=' + name], inprocess=True)\n- self.wipe()\n-\n- with mock.patch.dict(os.environ, {'XDG_DATA_DIRS': d}):\n- os.environ.pop('XDG_DATA_HOME', None)\n- self.init(testdir, ['--cross-file=' + name], inprocess=True)\n- self.wipe()\n-\n- with tempfile.TemporaryDirectory() as d:\n- dir_ = os.path.join(d, '.local', 'share', 'meson', 'cross')\n- os.makedirs(dir_)\n- with tempfile.NamedTemporaryFile('w', dir=dir_, delete=False) as f:\n- f.write(cross_content)\n- name = os.path.basename(f.name)\n-\n- with mock.patch('mesonbuild.coredata.os.path.expanduser', lambda x: x.replace('~', d)):\n- self.init(testdir, ['--cross-file=' + name], inprocess=True)\n- self.wipe()\n-\n def test_vala_generated_source_buildir_inside_source_tree(self):\n '''\n Test that valac outputs generated C files in the expected location when\n", "new_path": "run_unittests.py", "old_path": "run_unittests.py" } ]
d012b5b997e917a971bca1236a065453493c780d
mesonbuild/meson
null
null
Create a helper for checking if a string has a path component This is used in a number of places, and in some places it is incomplete. Use a helper to ensure it's used properly.
[ { "change_type": "MODIFY", "diff": "@@ -27,7 +27,7 @@ from .. import compilers\n from ..compilers import CompilerArgs\n from ..linkers import ArLinker\n from ..mesonlib import File, MesonException, OrderedSet\n-from ..mesonlib import get_compiler_for_source\n+from ..mesonlib import get_compiler_for_source, has_path_sep\n from .backends import CleanTrees, InstallData\n from ..build import InvalidArguments\n \n@@ -1335,7 +1335,7 @@ int dummy;\n \n # Set runtime-paths so we can run executables without needing to set\n # LD_LIBRARY_PATH, etc in the environment. Doesn't work on Windows.\n- if '/' in target.name or '\\\\' in target.name:\n+ if has_path_sep(target.name):\n # Target names really should not have slashes in them, but\n # unfortunately we did not check for that and some downstream projects\n # now have them. Once slashes are forbidden, remove this bit.\n@@ -2324,7 +2324,7 @@ rule FORTRAN_DEP_HACK\n # FIXME FIXME: The usage of this is a terrible and unreliable hack\n if isinstance(fname, File):\n return fname.subdir != ''\n- return '/' in fname or '\\\\' in fname\n+ return has_path_sep(fname)\n \n # Fortran is a bit weird (again). When you link against a library, just compiling a source file\n # requires the mod files that are output when single files are built. To do this right we would need to\n@@ -2370,7 +2370,7 @@ rule FORTRAN_DEP_HACK\n pch = target.get_pch(lang)\n if not pch:\n continue\n- if '/' not in pch[0] or '/' not in pch[-1]:\n+ if not has_path_sep(pch[0]) or not has_path_sep(pch[-1]):\n msg = 'Precompiled header of {!r} must not be in the same ' \\\n 'directory as source, please put it in a subdirectory.' \\\n ''.format(target.get_basename())\n@@ -2547,7 +2547,7 @@ rule FORTRAN_DEP_HACK\n commands += linker.get_option_link_args(self.environment.coredata.compiler_options)\n # Set runtime-paths so we can run executables without needing to set\n # LD_LIBRARY_PATH, etc in the environment. Doesn't work on Windows.\n- if '/' in target.name or '\\\\' in target.name:\n+ if has_path_sep(target.name):\n # Target names really should not have slashes in them, but\n # unfortunately we did not check for that and some downstream projects\n # now have them. Once slashes are forbidden, remove this bit.\n", "new_path": "mesonbuild/backend/ninjabackend.py", "old_path": "mesonbuild/backend/ninjabackend.py" }, { "change_type": "MODIFY", "diff": "@@ -23,7 +23,7 @@ from . import mlog\n from .mesonlib import File, MesonException, listify, extract_as_list\n from .mesonlib import typeslistify, stringlistify, classify_unity_sources\n from .mesonlib import get_filenames_templates_dict, substitute_values\n-from .mesonlib import for_windows, for_darwin, for_cygwin, for_android\n+from .mesonlib import for_windows, for_darwin, for_cygwin, for_android, has_path_sep\n from .compilers import is_object, clike_langs, sort_clike, lang_suffixes\n \n known_basic_kwargs = {'install': True,\n@@ -286,7 +286,7 @@ class EnvironmentVariables:\n \n class Target:\n def __init__(self, name, subdir, subproject, build_by_default):\n- if '/' in name or '\\\\' in name:\n+ if has_path_sep(name):\n # Fix failing test 53 when this becomes an error.\n mlog.warning('''Target \"%s\" has a path separator in its name.\n This is not supported, it can cause unexpected failures and will become\n@@ -1067,7 +1067,7 @@ class Generator:\n raise InvalidArguments('\"output\" may only contain strings.')\n if '@BASENAME@' not in rule and '@PLAINNAME@' not in rule:\n raise InvalidArguments('Every element of \"output\" must contain @BASENAME@ or @PLAINNAME@.')\n- if '/' in rule or '\\\\' in rule:\n+ if has_path_sep(rule):\n raise InvalidArguments('\"outputs\" must not contain a directory separator.')\n if len(outputs) > 1:\n for o in outputs:\n@@ -1666,7 +1666,7 @@ class CustomTarget(Target):\n raise InvalidArguments('Output must not be empty.')\n if i.strip() == '':\n raise InvalidArguments('Output must not consist only of whitespace.')\n- if '/' in i:\n+ if has_path_sep(i):\n raise InvalidArguments('Output must not contain a path segment.')\n if '@INPUT@' in i or '@INPUT0@' in i:\n m = 'Output cannot contain @INPUT@ or @INPUT0@, did you ' \\\n", "new_path": "mesonbuild/build.py", "old_path": "mesonbuild/build.py" }, { "change_type": "MODIFY", "diff": "@@ -21,7 +21,7 @@ from . import optinterpreter\n from . import compilers\n from .wrap import wrap, WrapMode\n from . import mesonlib\n-from .mesonlib import FileMode, Popen_safe, listify, extract_as_list\n+from .mesonlib import FileMode, Popen_safe, listify, extract_as_list, has_path_sep\n from .dependencies import ExternalProgram\n from .dependencies import InternalDependency, Dependency, DependencyException\n from .interpreterbase import InterpreterBase\n@@ -1863,7 +1863,7 @@ external dependencies (including libraries) must go to \"dependencies\".''')\n raise InterpreterException('Subproject name must not contain a \"..\" path segment.')\n if os.path.isabs(dirname):\n raise InterpreterException('Subproject name must not be an absolute path.')\n- if '\\\\' in dirname or '/' in dirname:\n+ if has_path_sep(dirname):\n mlog.warning('Subproject name has a path separator. This may cause unexpected behaviour.')\n if dirname in self.subproject_stack:\n fullstack = self.subproject_stack + [dirname]\n", "new_path": "mesonbuild/interpreter.py", "old_path": "mesonbuild/interpreter.py" }, { "change_type": "MODIFY", "diff": "@@ -519,6 +519,12 @@ def get_library_dirs():\n unixdirs += glob('/lib/' + plat + '*')\n return unixdirs\n \n+def has_path_sep(name, sep='/\\\\'):\n+ 'Checks if any of the specified @sep path separators are in @name'\n+ for each in sep:\n+ if each in name:\n+ return True\n+ return False\n \n def do_replacement(regex, line, confdata):\n missing_variables = set()\n", "new_path": "mesonbuild/mesonlib.py", "old_path": "mesonbuild/mesonlib.py" }, { "change_type": "MODIFY", "diff": "@@ -17,6 +17,7 @@ import subprocess\n import shutil\n import argparse\n from .. import mlog\n+from ..mesonlib import has_path_sep\n from . import destdir_join\n from .gettext import read_linguas\n \n@@ -79,7 +80,7 @@ def install_help(srcdir, blddir, sources, media, langs, install_dir, destdir, pr\n elif symlinks:\n srcfile = os.path.join(c_install_dir, m)\n mlog.log('Symlinking %s to %s.' % (outfile, srcfile))\n- if '/' in m or '\\\\' in m:\n+ if has_path_sep(m):\n os.makedirs(os.path.dirname(outfile), exist_ok=True)\n try:\n try:\n@@ -94,7 +95,7 @@ def install_help(srcdir, blddir, sources, media, langs, install_dir, destdir, pr\n # Lang doesn't have media file so copy it over 'C' one\n infile = os.path.join(srcdir, 'C', m)\n mlog.log('Installing %s to %s' % (infile, outfile))\n- if '/' in m or '\\\\' in m:\n+ if has_path_sep(m):\n os.makedirs(os.path.dirname(outfile), exist_ok=True)\n shutil.copyfile(infile, outfile)\n shutil.copystat(infile, outfile)\n", "new_path": "mesonbuild/scripts/yelphelper.py", "old_path": "mesonbuild/scripts/yelphelper.py" } ]
2febb99eee8ed71c9122db88ca58dd33be0b9550
mesonbuild/meson
null
null
Fix b_ndebug=if-release option Provide get_{type}_options_for_target() methods that unite {type} and builtin options.
[ { "change_type": "MODIFY", "diff": "@@ -89,12 +89,17 @@ class OptionProxy:\n class OptionOverrideProxy:\n '''Mimic an option list but transparently override\n selected option values.'''\n- def __init__(self, overrides, options):\n+ def __init__(self, overrides, *options):\n self.overrides = overrides\n self.options = options\n \n def __getitem__(self, option_name):\n- base_opt = self.options[option_name]\n+ for opts in self.options:\n+ if option_name in opts:\n+ return self._get_override(option_name, opts[option_name])\n+ raise KeyError('Option not found', option_name)\n+\n+ def _get_override(self, option_name, base_opt):\n if option_name in self.overrides:\n return OptionProxy(base_opt.name, base_opt.validate_value(self.overrides[option_name]))\n return base_opt\n@@ -123,6 +128,20 @@ class Backend:\n def get_target_filename_abs(self, target):\n return os.path.join(self.environment.get_build_dir(), self.get_target_filename(target))\n \n+ def get_builtin_options_for_target(self, target):\n+ return OptionOverrideProxy(target.option_overrides,\n+ self.environment.coredata.builtins)\n+\n+ def get_base_options_for_target(self, target):\n+ return OptionOverrideProxy(target.option_overrides,\n+ self.environment.coredata.builtins,\n+ self.environment.coredata.base_options)\n+\n+ def get_compiler_options_for_target(self, target):\n+ return OptionOverrideProxy(target.option_overrides,\n+ # no code depends on builtins for now\n+ self.environment.coredata.compiler_options)\n+\n def get_option_for_target(self, option_name, target):\n if option_name in target.option_overrides:\n override = target.option_overrides[option_name]\n@@ -444,7 +463,7 @@ class Backend:\n # starting from hard-coded defaults followed by build options and so on.\n commands = CompilerArgs(compiler)\n \n- copt_proxy = OptionOverrideProxy(target.option_overrides, self.environment.coredata.compiler_options)\n+ copt_proxy = self.get_compiler_options_for_target(target)\n # First, the trivial ones that are impossible to override.\n #\n # Add -nostdinc/-nostdinc++ if needed; can't be overridden\n", "new_path": "mesonbuild/backend/backends.py", "old_path": "mesonbuild/backend/backends.py" }, { "change_type": "MODIFY", "diff": "@@ -2137,8 +2137,7 @@ rule FORTRAN_DEP_HACK\n return incs\n \n def _generate_single_compile(self, target, compiler, is_generated=False):\n- base_proxy = backends.OptionOverrideProxy(target.option_overrides,\n- self.environment.coredata.base_options)\n+ base_proxy = self.get_base_options_for_target(target)\n # Create an empty commands list, and start adding arguments from\n # various sources in the order in which they must override each other\n commands = CompilerArgs(compiler)\n", "new_path": "mesonbuild/backend/ninjabackend.py", "old_path": "mesonbuild/backend/ninjabackend.py" }, { "change_type": "MODIFY", "diff": "@@ -315,7 +315,9 @@ def get_base_compile_args(options, compiler):\n except KeyError:\n pass\n try:\n- if options['b_ndebug'].value == 'true' or (options['b_ndebug'].value == 'if-release' and options['buildtype'] == 'release'):\n+ if (options['b_ndebug'].value == 'true' or\n+ (options['b_ndebug'].value == 'if-release' and\n+ options['buildtype'].value == 'release')):\n args += ['-DNDEBUG']\n except KeyError:\n pass\n", "new_path": "mesonbuild/compilers/compilers.py", "old_path": "mesonbuild/compilers/compilers.py" } ]
05c43cdcd19db98d53d5c9f1b50028d881471c2f
mesonbuild/meson
null
null
Add 'install_mode' to all installable targets This makes it possible to customize permissions of all installable targets, such as executable(), libraries, man pages, header files and custom or generated targets. This is useful, for instance, to install setuid/setgid binaries, which was hard to accomplish without access to this attribute.
[ { "change_type": "MODIFY", "diff": "@@ -724,6 +724,7 @@ int dummy;\n \"Pass 'false' for outputs that should not be installed and 'true' for\\n\" \\\n 'using the default installation directory for an output.'\n raise MesonException(m.format(t.name, num_out, t.get_outputs(), num_outdirs))\n+ install_mode = t.get_custom_install_mode()\n # Install the target output(s)\n if isinstance(t, build.BuildTarget):\n should_strip = self.get_option_for_target('strip', t)\n@@ -731,7 +732,7 @@ int dummy;\n # Done separately because of strip/aliases/rpath\n if outdirs[0] is not False:\n i = [self.get_target_filename(t), outdirs[0],\n- t.get_aliases(), should_strip, t.install_rpath]\n+ t.get_aliases(), should_strip, t.install_rpath, install_mode]\n d.targets.append(i)\n # On toolchains/platforms that use an import library for\n # linking (separate from the shared library with all the\n@@ -749,7 +750,7 @@ int dummy;\n implib_install_dir,\n # It has no aliases, should not be stripped, and\n # doesn't have an install_rpath\n- {}, False, '']\n+ {}, False, '', install_mode]\n d.targets.append(i)\n # Install secondary outputs. Only used for Vala right now.\n if num_outdirs > 1:\n@@ -758,7 +759,7 @@ int dummy;\n if outdir is False:\n continue\n f = os.path.join(self.get_target_dir(t), output)\n- d.targets.append([f, outdir, {}, False, None])\n+ d.targets.append([f, outdir, {}, False, None, install_mode])\n elif isinstance(t, build.CustomTarget):\n # If only one install_dir is specified, assume that all\n # outputs will be installed into it. This is for\n@@ -770,14 +771,14 @@ int dummy;\n if num_outdirs == 1 and num_out > 1:\n for output in t.get_outputs():\n f = os.path.join(self.get_target_dir(t), output)\n- d.targets.append([f, outdirs[0], {}, False, None])\n+ d.targets.append([f, outdirs[0], {}, False, None, install_mode])\n else:\n for output, outdir in zip(t.get_outputs(), outdirs):\n # User requested that we not install this output\n if outdir is False:\n continue\n f = os.path.join(self.get_target_dir(t), output)\n- d.targets.append([f, outdir, {}, False, None])\n+ d.targets.append([f, outdir, {}, False, None, install_mode])\n \n def generate_custom_install_script(self, d):\n result = []\n@@ -809,7 +810,7 @@ int dummy;\n msg = 'Invalid header type {!r} can\\'t be installed'\n raise MesonException(msg.format(f))\n abspath = f.absolute_path(srcdir, builddir)\n- i = [abspath, outdir]\n+ i = [abspath, outdir, h.get_custom_install_mode()]\n d.headers.append(i)\n \n def generate_man_install(self, d):\n@@ -823,7 +824,7 @@ int dummy;\n subdir = os.path.join(manroot, 'man' + num)\n srcabs = f.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir())\n dstabs = os.path.join(subdir, os.path.basename(f.fname) + '.gz')\n- i = [srcabs, dstabs]\n+ i = [srcabs, dstabs, m.get_custom_install_mode()]\n d.man.append(i)\n \n def generate_data_install(self, d):\n", "new_path": "mesonbuild/backend/ninjabackend.py", "old_path": "mesonbuild/backend/ninjabackend.py" }, { "change_type": "MODIFY", "diff": "@@ -64,6 +64,7 @@ buildtarget_kwargs = set([\n 'install',\n 'install_rpath',\n 'install_dir',\n+ 'install_mode',\n 'name_prefix',\n 'name_suffix',\n 'native',\n@@ -668,6 +669,9 @@ class BuildTarget(Target):\n def get_custom_install_dir(self):\n return self.install_dir\n \n+ def get_custom_install_mode(self):\n+ return self.install_mode\n+\n def process_kwargs(self, kwargs, environment):\n super().process_kwargs(kwargs)\n self.copy_kwargs(kwargs)\n@@ -745,6 +749,7 @@ This will become a hard error in a future Meson release.''')\n # the list index of that item will not be installed\n self.install_dir = typeslistify(kwargs.get('install_dir', [None]),\n (str, bool))\n+ self.install_mode = kwargs.get('install_mode', None)\n main_class = kwargs.get('main_class', '')\n if not isinstance(main_class, str):\n raise InvalidArguments('Main class must be a string')\n@@ -1626,6 +1631,7 @@ class CustomTarget(Target):\n 'capture',\n 'install',\n 'install_dir',\n+ 'install_mode',\n 'build_always',\n 'depends',\n 'depend_files',\n@@ -1774,9 +1780,11 @@ class CustomTarget(Target):\n # If an item in this list is False, the output corresponding to\n # the list index of that item will not be installed\n self.install_dir = typeslistify(kwargs['install_dir'], (str, bool))\n+ self.install_mode = kwargs.get('install_mode', None)\n else:\n self.install = False\n self.install_dir = [None]\n+ self.install_mode = None\n self.build_always = kwargs.get('build_always', False)\n if not isinstance(self.build_always, bool):\n raise InvalidArguments('Argument build_always must be a boolean.')\n@@ -1803,6 +1811,9 @@ class CustomTarget(Target):\n def get_custom_install_dir(self):\n return self.install_dir\n \n+ def get_custom_install_mode(self):\n+ return self.install_mode\n+\n def get_outputs(self):\n return self.outputs\n \n", "new_path": "mesonbuild/build.py", "old_path": "mesonbuild/build.py" }, { "change_type": "MODIFY", "diff": "@@ -577,6 +577,7 @@ class Headers(InterpreterObject):\n self.sources = sources\n self.install_subdir = kwargs.get('subdir', '')\n self.custom_install_dir = kwargs.get('install_dir', None)\n+ self.custom_install_mode = kwargs.get('install_mode', None)\n if self.custom_install_dir is not None:\n if not isinstance(self.custom_install_dir, str):\n raise InterpreterException('Custom_install_dir must be a string.')\n@@ -593,6 +594,9 @@ class Headers(InterpreterObject):\n def get_custom_install_dir(self):\n return self.custom_install_dir\n \n+ def get_custom_install_mode(self):\n+ return self.custom_install_mode\n+\n class DataHolder(InterpreterObject, ObjectHolder):\n def __init__(self, data):\n InterpreterObject.__init__(self)\n@@ -624,6 +628,7 @@ class Man(InterpreterObject):\n self.sources = sources\n self.validate_sources()\n self.custom_install_dir = kwargs.get('install_dir', None)\n+ self.custom_install_mode = kwargs.get('install_mode', None)\n if self.custom_install_dir is not None and not isinstance(self.custom_install_dir, str):\n raise InterpreterException('Custom_install_dir must be a string.')\n \n@@ -639,6 +644,9 @@ class Man(InterpreterObject):\n def get_custom_install_dir(self):\n return self.custom_install_dir\n \n+ def get_custom_install_mode(self):\n+ return self.custom_install_mode\n+\n def get_sources(self):\n return self.sources\n \n@@ -1716,8 +1724,8 @@ permitted_kwargs = {'add_global_arguments': {'language'},\n 'add_test_setup': {'exe_wrapper', 'gdb', 'timeout_multiplier', 'env'},\n 'benchmark': {'args', 'env', 'should_fail', 'timeout', 'workdir', 'suite'},\n 'build_target': known_build_target_kwargs,\n- 'configure_file': {'input', 'output', 'configuration', 'command', 'copy', 'install_dir', 'capture', 'install', 'format', 'output_format'},\n- 'custom_target': {'input', 'output', 'command', 'install', 'install_dir', 'build_always', 'capture', 'depends', 'depend_files', 'depfile', 'build_by_default'},\n+ 'configure_file': {'input', 'output', 'configuration', 'command', 'copy', 'install_dir', 'install_mode', 'capture', 'install', 'format', 'output_format'},\n+ 'custom_target': {'input', 'output', 'command', 'install', 'install_dir', 'install_mode', 'build_always', 'capture', 'depends', 'depend_files', 'depfile', 'build_by_default'},\n 'dependency': {'default_options', 'fallback', 'language', 'main', 'method', 'modules', 'optional_modules', 'native', 'required', 'static', 'version', 'private_headers'},\n 'declare_dependency': {'include_directories', 'link_with', 'sources', 'dependencies', 'compile_args', 'link_args', 'link_whole', 'version'},\n 'executable': build.known_exe_kwargs,\n@@ -1725,8 +1733,8 @@ permitted_kwargs = {'add_global_arguments': {'language'},\n 'generator': {'arguments', 'output', 'depfile', 'capture', 'preserve_path_from'},\n 'include_directories': {'is_system'},\n 'install_data': {'install_dir', 'install_mode', 'rename', 'sources'},\n- 'install_headers': {'install_dir', 'subdir'},\n- 'install_man': {'install_dir'},\n+ 'install_headers': {'install_dir', 'install_mode', 'subdir'},\n+ 'install_man': {'install_dir', 'install_mode'},\n 'install_subdir': {'exclude_files', 'exclude_directories', 'install_dir', 'install_mode', 'strip_directory'},\n 'jar': build.known_jar_kwargs,\n 'project': {'version', 'meson_version', 'default_options', 'license', 'subproject_dir'},\n@@ -2932,6 +2940,7 @@ root and issuing %s.\n if len(args) != 1:\n raise InterpreterException('custom_target: Only one positional argument is allowed, and it must be a string name')\n name = args[0]\n+ kwargs['install_mode'] = self._get_kwarg_install_mode(kwargs)\n tg = CustomTargetHolder(build.CustomTarget(name, self.subdir, self.subproject, kwargs), self)\n self.add_target(name, tg.held_object)\n return tg\n@@ -3058,6 +3067,7 @@ root and issuing %s.\n @permittedKwargs(permitted_kwargs['install_headers'])\n def func_install_headers(self, node, args, kwargs):\n source_files = self.source_strings_to_files(args)\n+ kwargs['install_mode'] = self._get_kwarg_install_mode(kwargs)\n h = Headers(source_files, kwargs)\n self.build.headers.append(h)\n return h\n@@ -3065,6 +3075,7 @@ root and issuing %s.\n @permittedKwargs(permitted_kwargs['install_man'])\n def func_install_man(self, node, args, kwargs):\n fargs = self.source_strings_to_files(args)\n+ kwargs['install_mode'] = self._get_kwarg_install_mode(kwargs)\n m = Man(fargs, kwargs)\n self.build.man.append(m)\n return m\n@@ -3115,7 +3126,7 @@ root and issuing %s.\n self.subdir = prev_subdir\n \n def _get_kwarg_install_mode(self, kwargs):\n- if 'install_mode' not in kwargs:\n+ if kwargs.get('install_mode', None) is None:\n return None\n install_mode = []\n mode = mesonlib.typeslistify(kwargs.get('install_mode', []), (str, int))\n@@ -3358,7 +3369,8 @@ root and issuing %s.\n idir = kwargs.get('install_dir', None)\n if isinstance(idir, str) and idir:\n cfile = mesonlib.File.from_built_file(ofile_path, ofile_fname)\n- self.build.data.append(build.Data([cfile], idir))\n+ install_mode = self._get_kwarg_install_mode(kwargs)\n+ self.build.data.append(build.Data([cfile], idir, install_mode))\n return mesonlib.File.from_built_file(self.subdir, output)\n \n @permittedKwargs(permitted_kwargs['include_directories'])\n@@ -3642,6 +3654,7 @@ different subdirectory.\n sources = self.source_strings_to_files(sources)\n objs = extract_as_list(kwargs, 'objects')\n kwargs['dependencies'] = extract_as_list(kwargs, 'dependencies')\n+ kwargs['install_mode'] = self._get_kwarg_install_mode(kwargs)\n if 'extra_files' in kwargs:\n ef = extract_as_list(kwargs, 'extra_files')\n kwargs['extra_files'] = self.source_strings_to_files(ef)\n", "new_path": "mesonbuild/interpreter.py", "old_path": "mesonbuild/interpreter.py" }, { "change_type": "MODIFY", "diff": "@@ -148,7 +148,7 @@ def do_copyfile(from_file, to_file):\n selinux_updates.append(to_file)\n append_to_log(to_file)\n \n-def do_copydir(data, src_dir, dst_dir, exclude):\n+def do_copydir(data, src_dir, dst_dir, exclude, install_mode):\n '''\n Copies the contents of directory @src_dir into @dst_dir.\n \n@@ -158,7 +158,7 @@ def do_copydir(data, src_dir, dst_dir, exclude):\n excluded\n foobar\n file\n- do_copydir(..., '/foo', '/dst/dir', {'bar/excluded'}) creates\n+ do_copydir(..., '/foo', '/dst/dir', {'bar/excluded'}, None) creates\n /dst/\n dir/\n bar/\n@@ -170,6 +170,7 @@ def do_copydir(data, src_dir, dst_dir, exclude):\n dst_dir: str, absolute path to the destination directory\n exclude: (set(str), set(str)), tuple of (exclude_files, exclude_dirs),\n each element of the set is a path relative to src_dir.\n+ install_mode: FileMode object, or None to use defaults.\n '''\n if not os.path.isabs(src_dir):\n raise ValueError('src_dir must be absolute, got %s' % src_dir)\n@@ -212,7 +213,7 @@ def do_copydir(data, src_dir, dst_dir, exclude):\n os.mkdir(parent_dir)\n shutil.copystat(os.path.dirname(abs_src), parent_dir)\n shutil.copy2(abs_src, abs_dst, follow_symlinks=False)\n- sanitize_permissions(abs_dst, data.install_umask)\n+ set_mode(abs_dst, install_mode, data.install_umask)\n append_to_log(abs_dst)\n \n def get_destdir_path(d, path):\n@@ -263,8 +264,7 @@ def install_subdirs(d):\n full_dst_dir = get_destdir_path(d, dst_dir)\n print('Installing subdir %s to %s' % (src_dir, full_dst_dir))\n d.dirmaker.makedirs(full_dst_dir, exist_ok=True)\n- do_copydir(d, src_dir, full_dst_dir, exclude)\n- set_mode(full_dst_dir, mode, d.install_umask)\n+ do_copydir(d, src_dir, full_dst_dir, exclude, mode)\n \n def install_data(d):\n for i in d.data:\n@@ -283,6 +283,7 @@ def install_man(d):\n outfilename = get_destdir_path(d, m[1])\n outdir = os.path.dirname(outfilename)\n d.dirmaker.makedirs(outdir, exist_ok=True)\n+ install_mode = m[2]\n print('Installing %s to %s' % (full_source_filename, outdir))\n if outfilename.endswith('.gz') and not full_source_filename.endswith('.gz'):\n with open(outfilename, 'wb') as of:\n@@ -294,7 +295,7 @@ def install_man(d):\n append_to_log(outfilename)\n else:\n do_copyfile(full_source_filename, outfilename)\n- sanitize_permissions(outfilename, d.install_umask)\n+ set_mode(outfilename, install_mode, d.install_umask)\n \n def install_headers(d):\n for t in d.headers:\n@@ -302,10 +303,11 @@ def install_headers(d):\n fname = os.path.basename(fullfilename)\n outdir = get_destdir_path(d, t[1])\n outfilename = os.path.join(outdir, fname)\n+ install_mode = t[2]\n print('Installing %s to %s' % (fname, outdir))\n d.dirmaker.makedirs(outdir, exist_ok=True)\n do_copyfile(fullfilename, outfilename)\n- sanitize_permissions(outfilename, d.install_umask)\n+ set_mode(outfilename, install_mode, d.install_umask)\n \n def run_install_script(d):\n env = {'MESON_SOURCE_ROOT': d.source_dir,\n@@ -364,13 +366,14 @@ def install_targets(d):\n aliases = t[2]\n should_strip = t[3]\n install_rpath = t[4]\n+ install_mode = t[5]\n print('Installing %s to %s' % (fname, outname))\n d.dirmaker.makedirs(outdir, exist_ok=True)\n if not os.path.exists(fname):\n raise RuntimeError('File {!r} could not be found'.format(fname))\n elif os.path.isfile(fname):\n do_copyfile(fname, outname)\n- sanitize_permissions(outname, d.install_umask)\n+ set_mode(outname, install_mode, d.install_umask)\n if should_strip and d.strip_bin is not None:\n if fname.endswith('.jar'):\n print('Not stripping jar target:', os.path.basename(fname))\n@@ -387,12 +390,11 @@ def install_targets(d):\n pdb_outname = os.path.splitext(outname)[0] + '.pdb'\n print('Installing pdb file %s to %s' % (pdb_filename, pdb_outname))\n do_copyfile(pdb_filename, pdb_outname)\n- sanitize_permissions(pdb_outname, d.install_umask)\n+ set_mode(pdb_outname, install_mode, d.install_umask)\n elif os.path.isdir(fname):\n fname = os.path.join(d.build_dir, fname.rstrip('/'))\n outname = os.path.join(outdir, os.path.basename(fname))\n- do_copydir(d, fname, outname, None)\n- sanitize_permissions(outname, d.install_umask)\n+ do_copydir(d, fname, outname, None, install_mode)\n else:\n raise RuntimeError('Unknown file type for {!r}'.format(fname))\n printed_symlink_error = False\n", "new_path": "mesonbuild/scripts/meson_install.py", "old_path": "mesonbuild/scripts/meson_install.py" } ]
8fb72510c3b7a7a5f08b4b20b7e1480f5868052a
mesonbuild/meson
null
null
Move to coredata some methods handling options Those methods only use coredata object, so better just move them as a coredata method.
[ { "change_type": "MODIFY", "diff": "@@ -351,6 +351,37 @@ class CoreData:\n def get_external_preprocess_args(self, lang):\n return self.external_preprocess_args[lang]\n \n+ def merge_user_options(self, options):\n+ for (name, value) in options.items():\n+ if name not in self.user_options:\n+ self.user_options[name] = value\n+ else:\n+ oldval = self.user_options[name]\n+ if type(oldval) != type(value):\n+ self.user_options[name] = value\n+\n+ def set_options(self, options):\n+ for o in options:\n+ if '=' not in o:\n+ raise MesonException('Value \"%s\" not of type \"a=b\".' % o)\n+ (k, v) = o.split('=', 1)\n+ if is_builtin_option(k):\n+ self.set_builtin_option(k, v)\n+ elif k in self.backend_options:\n+ tgt = self.backend_options[k]\n+ tgt.set_value(v)\n+ elif k in self.user_options:\n+ tgt = self.user_options[k]\n+ tgt.set_value(v)\n+ elif k in self.compiler_options:\n+ tgt = self.compiler_options[k]\n+ tgt.set_value(v)\n+ elif k in self.base_options:\n+ tgt = self.base_options[k]\n+ tgt.set_value(v)\n+ else:\n+ raise MesonException('Unknown option %s.' % k)\n+\n def load(build_dir):\n filename = os.path.join(build_dir, 'meson-private', 'coredata.dat')\n load_fail_msg = 'Coredata file {!r} is corrupted. Try with a fresh build tree.'.format(filename)\n", "new_path": "mesonbuild/coredata.py", "old_path": "mesonbuild/coredata.py" }, { "change_type": "MODIFY", "diff": "@@ -386,15 +386,6 @@ class Environment:\n previous_is_plaind = i == '-D'\n return False\n \n- def merge_options(self, options):\n- for (name, value) in options.items():\n- if name not in self.coredata.user_options:\n- self.coredata.user_options[name] = value\n- else:\n- oldval = self.coredata.user_options[name]\n- if type(oldval) != type(value):\n- self.coredata.user_options[name] = value\n-\n @staticmethod\n def get_gnu_compiler_defines(compiler):\n \"\"\"\n", "new_path": "mesonbuild/environment.py", "old_path": "mesonbuild/environment.py" }, { "change_type": "MODIFY", "diff": "@@ -2306,7 +2306,7 @@ to directly access options of other subprojects.''')\n self.build.environment.cmd_line_options.projectoptions,\n )\n oi.process(self.option_file)\n- self.build.environment.merge_options(oi.options)\n+ self.coredata.merge_user_options(oi.options)\n self.set_backend()\n self.active_projectname = proj_name\n self.project_version = kwargs.get('version', 'undefined')\n", "new_path": "mesonbuild/interpreter.py", "old_path": "mesonbuild/interpreter.py" }, { "change_type": "MODIFY", "diff": "@@ -42,6 +42,9 @@ class Conf:\n def clear_cache(self):\n self.coredata.deps = {}\n \n+ def set_options(self, options):\n+ self.coredata.set_options(options)\n+\n def save(self):\n # Only called if something has changed so overwrite unconditionally.\n coredata.save(self.coredata, self.build_dir)\n@@ -94,28 +97,6 @@ class Conf:\n else:\n print(' {0:{width[0]}} {1:{width[1]}} {3:{width[3]}}'.format(*line, width=col_widths))\n \n- def set_options(self, options):\n- for o in options:\n- if '=' not in o:\n- raise ConfException('Value \"%s\" not of type \"a=b\".' % o)\n- (k, v) = o.split('=', 1)\n- if coredata.is_builtin_option(k):\n- self.coredata.set_builtin_option(k, v)\n- elif k in self.coredata.backend_options:\n- tgt = self.coredata.backend_options[k]\n- tgt.set_value(v)\n- elif k in self.coredata.user_options:\n- tgt = self.coredata.user_options[k]\n- tgt.set_value(v)\n- elif k in self.coredata.compiler_options:\n- tgt = self.coredata.compiler_options[k]\n- tgt.set_value(v)\n- elif k in self.coredata.base_options:\n- tgt = self.coredata.base_options[k]\n- tgt.set_value(v)\n- else:\n- raise ConfException('Unknown option %s.' % k)\n-\n def print_conf(self):\n print('Core properties:')\n print(' Source dir', self.build.environment.source_dir)\n", "new_path": "mesonbuild/mconf.py", "old_path": "mesonbuild/mconf.py" } ]

🏟️ Long Code Arena (Code Editing)

This is the benchmark for Code Editing task as part of 🏟️ Long Code Arena benchmark.

How-to

  1. List all the available configs via datasets.get_dataset_config_names and choose an appropriate one.

    Current configs: commitchronicle-py-long, commitchronicle-py-long-labels

  2. Load the data via load_dataset:

    from datasets import load_dataset
    
    configuration = "TODO"  # select a configuration
    dataset = load_dataset("JetBrains-Research/lca-code-editing", configuration, split="test")
    

    Note that all the data we have is considered to be in the test split.

Note 1. Working with git repositories under repos directory is not supported via πŸ€— Datasets. Download and extract the contents of each repository manually. We provide a full list of files in paths.json.

Note 2. Working with vector stores under vector_store directory is not supported via πŸ€— Datasets. Download the data for each repository manually. We provide a full list of files in paths.json.

Dataset Structure

This dataset contains three kinds of data:

  • full data about each commit (including modifications)
  • metadata with quality labels
  • compressed git repositories
  • precalculated faiss vector store for each datapoint

Full data

This section concerns configuration with full data about each commit (no -labels suffix).

Each example has the following fields:

Field Description
repo Commit repository.
hash Commit hash.
date Commit date.
license Commit repository's license.
message Commit message.
mods List of file modifications from a commit.

Each file modification has the following fields:

Field Description
change_type Type of change to current file. One of: ADD, COPY, RENAME, DELETE, MODIFY or UNKNOWN.
old_path Path to file before change (might be empty).
new_path Path to file after change (might be empty).
diff git diff for current file.

Data point example:

{'hash': 'f6347ae47c872b40339d9565a9cb29da5bca8716',
 'repo': 'mycroftai/mycroft-core',
 'date': None,
 'license': None,
 'message': 'Replace hashed meta with skill_gid as identifier\nThis also removes the notion of an owner skill and all skills may update settings on the server.',
 'mods': [{'change_type': 'MODIFY',
    'new_path': 'mycroft/skills/settings.py',
    'old_path': 'mycroft/skills/settings.py',
    'diff': '@@ -216,32 +216,10 @@ class SkillSettings(dict):<...>'}]}

Labels

This section concerns configuration with metadata and labels (with -labels suffix).

Each example has the following fields:

Field Description
repo Commit repository.
hash Commit hash.
date Commit date.
license Commit repository's license.
message Commit message.
label Label of current commit as a target for code editing task.
comment Comment for a label for current commit (optional, might be empty).

Labels are in 1-5 scale, where:

  • 1 – strong no
  • 2 – weak no
  • 3 – unsure
  • 4 – weak yes
  • 5 – strong yes

Data point example:

{'hash': 'b9747bc011e9e9830ab147327d7aeaa8447ad2d7',
 'repo': 'apache/libcloud',
 'date': '20.02.2020 00:11:58',
 'license': 'Apache License 2.0',
 'message': 'Add new storage API methods for downloading part of an object (range\ndownload) and implement it for the S3 and local storage drivers.',
 'label': 4.0,
 'comment': 'might be an interesting use-case (and also quite complicated)'}

Git Repositories

This section concerns repos directory, which stores compressed Git repositories for all the commits in this benchmark. After you download and extract it, you can work with each repository either via Git or via Python libraries like GitPython or PyDriller.

Vector stores

This section concerns vector_store directory, which stores precalculated faiss vector stores for code retrieval. After you download them, you can work with the databases in the following way:

from langchain.indexes import SQLRecordManager, index
from langchain_community.vectorstores.faiss import FAISS
from langchain_openai import OpenAIEmbeddings

# Namespace for the base commit
namespace = "apache__libcloud__9a7c47b31d513fc262fb1e5537f15d2335df3279"

# Setup the langchain vectorstore
embeddings = OpenAIEmbeddings(model="text-embedding-3-small")
db = FAISS.load_local("vector_store", embeddings, index_name=namespace)

# Retrieve closest documents
new_docs = db.similarity_search("main", 3)

# Indexing. See: https://python.langchain.com/docs/modules/data_connection/indexing
record_manager_path = f"vector_store/{namespace}.sqlite"
record_manager = SQLRecordManager(namespace, db_url=f"sqlite:///{record_manager_path}")

# Update the vector store
index([new_docs], record_manager, db, cleanup=None)
Downloads last month
293
Edit dataset card

Collection including JetBrains-Research/lca-code-editing