id
stringlengths 30
32
| content
stringlengths 139
2.8k
|
|---|---|
codereview_new_python_data_12580
|
def visit_name_expr(self, o: NameExpr) -> None:
self.tracker.record_definition(o.name)
elif self.tracker.is_defined_in_different_branch(o.name):
# A variable is defined in one branch but used in a different branch.
- if len(self.loops) > 0:
self.msg.variable_may_be_undefined(o.name, o)
else:
self.msg.var_used_before_def(o.name, o)
You can write simply like this, empty lists are falsey.
```suggestion
if self.loops:
```
def visit_name_expr(self, o: NameExpr) -> None:
self.tracker.record_definition(o.name)
elif self.tracker.is_defined_in_different_branch(o.name):
# A variable is defined in one branch but used in a different branch.
+ if self.loops:
self.msg.variable_may_be_undefined(o.name, o)
else:
self.msg.var_used_before_def(o.name, o)
|
codereview_new_python_data_12581
|
def run_case_step(self, testcase: DataDrivenTestCase, incremental_step: int) ->
env=env,
)
if sys.version_info >= (3, 12):
proc.wait(timeout=30)
output = proc.communicate()[0].decode("utf8")
outlines = output.splitlines()
```suggestion
if sys.version_info >= (3, 12):
# TODO: testDecorators1 hangs on 3.12, remove this once fixed
proc.wait(timeout=30)
```
def run_case_step(self, testcase: DataDrivenTestCase, incremental_step: int) ->
env=env,
)
if sys.version_info >= (3, 12):
+ # TODO: testDecorators1 hangs on 3.12, remove this once fixed
proc.wait(timeout=30)
output = proc.communicate()[0].decode("utf8")
outlines = output.splitlines()
|
codereview_new_python_data_12582
|
def add_invertible_flag(
"--enable-incomplete-features", action="store_true", help=argparse.SUPPRESS
)
parser.add_argument(
- "--_disable-bytearray-promotion", action="store_true", help=argparse.SUPPRESS
)
parser.add_argument(
- "--_disable-memoryview-promotion", action="store_true", help=argparse.SUPPRESS
)
# options specifying code to check
I feel like we don't need the underscore if we're already suppressing the flag.
def add_invertible_flag(
"--enable-incomplete-features", action="store_true", help=argparse.SUPPRESS
)
parser.add_argument(
+ "--disable-bytearray-promotion", action="store_true", help=argparse.SUPPRESS
)
parser.add_argument(
+ "--disable-memoryview-promotion", action="store_true", help=argparse.SUPPRESS
)
# options specifying code to check
|
codereview_new_python_data_12583
|
class BuildType:
}
# Features that are currently incomplete/experimental
-TYPE_VAR_TUPLE = "TypeVarTuple"
-INCOMPLETE_FEATURES = {TYPE_VAR_TUPLE}
class Options:
```suggestion
TYPE_VAR_TUPLE: Final = "TypeVarTuple"
INCOMPLETE_FEATURES: Final = frozenset((TYPE_VAR_TUPLE,))
```
class BuildType:
}
# Features that are currently incomplete/experimental
+TYPE_VAR_TUPLE: Final = "TypeVarTuple"
+INCOMPLETE_FEATURES: Final = frozenset((TYPE_VAR_TUPLE,))
class Options:
|
codereview_new_python_data_12584
|
def visit_assert_type_expr(self, expr: AssertTypeExpr) -> Type:
if not is_same_type(source_type, target_type):
if not self.chk.in_checked_function():
self.msg.note(
- "'assert_type' always outputs 'Any' in unchecked functions", expr.expr
)
self.msg.assert_type_fail(source_type, target_type, expr)
return source_type
```suggestion
'"assert_type" expects everything to be "Any" in unchecked functions', expr.expr
```
def visit_assert_type_expr(self, expr: AssertTypeExpr) -> Type:
if not is_same_type(source_type, target_type):
if not self.chk.in_checked_function():
self.msg.note(
+ '"assert_type" expects everything to be "Any" in unchecked functions',
+ expr.expr,
)
self.msg.assert_type_fail(source_type, target_type, expr)
return source_type
|
codereview_new_python_data_12585
|
def add_invertible_flag(
help="Make arguments prepended via Concatenate be truly positional-only",
group=strictness_group,
)
- # Experiment flag to detect undefined variables being used.
- add_invertible_flag("--disallow-undefined-vars", default=False, help=argparse.SUPPRESS)
strict_help = "Strict mode; enables the following flags: {}".format(
", ".join(strict_flag_names)
An alternative would be an error code which is disabled by default. Take a look at `mypy/errorcodes.py` and `TRUTHY_BOOL` as example.
def add_invertible_flag(
help="Make arguments prepended via Concatenate be truly positional-only",
group=strictness_group,
)
strict_help = "Strict mode; enables the following flags: {}".format(
", ".join(strict_flag_names)
|
codereview_new_python_data_12586
|
def analyze_function_body(self, defn: FuncItem) -> None:
self.function_stack.pop()
def check_classvar_in_signature(self, typ: ProperType) -> None:
if isinstance(typ, Overloaded):
- t: ProperType
for t in typ.items:
self.check_classvar_in_signature(t)
return
I think we can remove this one, `_items` is `list[CallableType]`, which is a subtype of `ProperType`: https://github.com/python/mypy/blob/551f8f4064c2158d47917b418726a01a2a797a7d/mypy/types.py#L1979
def analyze_function_body(self, defn: FuncItem) -> None:
self.function_stack.pop()
def check_classvar_in_signature(self, typ: ProperType) -> None:
+ t: ProperType
if isinstance(typ, Overloaded):
for t in typ.items:
self.check_classvar_in_signature(t)
return
|
codereview_new_python_data_12587
|
def __init__(self) -> None:
# A comma-separated list packages for mypy to type check
self.packages: list[str] | None = None
- # A comma-separated list modules for mypy to type check
self.modules: list[str] | None = None
# Write junit.xml to given file
```suggestion
# A comma-separated list of modules for mypy to type check
```
def __init__(self) -> None:
# A comma-separated list packages for mypy to type check
self.packages: list[str] | None = None
+ # A comma-separated list of modules for mypy to type check
self.modules: list[str] | None = None
# Write junit.xml to given file
|
codereview_new_python_data_12588
|
def visit_none_type(self, left: NoneType) -> bool:
# None is compatible with Hashable (and other similar protocols). This is
# slightly sloppy since we don't check the signature of "__hash__".
# None is also compatible with `SupportsStr` protocol.
- supported_members = frozenset(("__hash__", "__str__"))
return not members or all(member in supported_members for member in members)
return False
else:
```suggestion
return not members or all(member in ("__hash__", "__str__") for member in members)
```
def visit_none_type(self, left: NoneType) -> bool:
# None is compatible with Hashable (and other similar protocols). This is
# slightly sloppy since we don't check the signature of "__hash__".
# None is also compatible with `SupportsStr` protocol.
return not members or all(member in supported_members for member in members)
return False
else:
|
codereview_new_python_data_12589
|
def verify_typealias(
):
runtime_module = "typing"
runtime_fullname = f"{runtime_module}.{runtime_name}"
- if re.match(fr"_?{stub_target.type.fullname}", runtime_fullname):
# Okay, we're probably fine.
return
```suggestion
if re.match(fr"_?{re.escape(stub_target.type.fullname)}", runtime_fullname):
```
def verify_typealias(
):
runtime_module = "typing"
runtime_fullname = f"{runtime_module}.{runtime_name}"
+ if re.match(fr"_?{re.escape(stub_target.type.fullname)}", runtime_fullname):
# Okay, we're probably fine.
return
|
codereview_new_python_data_12593
|
def validate_transform(transform_id):
output_values_coder.component_coder_ids[0] !=
input_coder.component_coder_ids[1]):
raise ValueError(
- "Incompatable input and output coders for %s: %s" %
(transform_id, input_coder, output_coder))
for t in transform_proto.subtransforms:
Fix typo in string substitution to get tests past trivial issue
```suggestion
"Incompatible input coder %s and output coder %s for transform %s" %
```
def validate_transform(transform_id):
output_values_coder.component_coder_ids[0] !=
input_coder.component_coder_ids[1]):
raise ValueError(
+ "Incompatible input coder %s and output coder %s for transform %s" %
(transform_id, input_coder, output_coder))
for t in transform_proto.subtransforms:
|
codereview_new_python_data_12594
|
def test_predict_keyed_numpy(self):
for actual, expected in zip(inferences, expected_predictions):
self.assertTrue(_compare_prediction_result(actual[1], expected[1]))
- @pytest.mark.uses_tf
def test_predict_keyed_tensor(self):
fake_model = FakeTFTensorModel()
inference_runner = KeyedModelHandler(
nit: remove the marker since it is already present on the class def
def test_predict_keyed_numpy(self):
for actual, expected in zip(inferences, expected_predictions):
self.assertTrue(_compare_prediction_result(actual[1], expected[1]))
def test_predict_keyed_tensor(self):
fake_model = FakeTFTensorModel()
inference_runner = KeyedModelHandler(
|
codereview_new_python_data_12597
|
def login():
args = frappe.form_dict
ldap: LDAPSettings = frappe.get_doc("LDAP Settings")
- user = ldap.authenticate(frappe.as_unicode(args.usr), frappe.as_unicode(args.pop("pwd", None)))
frappe.local.login_manager.user = user.name
if should_run_2fa(user.name):
authenticate_for_2factor(user.name)
if not confirm_otp_token(frappe.local.login_manager):
return False
frappe.local.login_manager.post_login()
# because of a GET request!
```suggestion
password = args.pop("pwd", None)
user = ldap.authenticate(frappe.as_unicode(args.usr), frappe.as_unicode(password))
frappe.local.login_manager.user = user.name
if should_run_2fa(user.name):
frappe.form_dict["pwd"] = password if password else "dummy_string"
```
def login():
args = frappe.form_dict
ldap: LDAPSettings = frappe.get_doc("LDAP Settings")
+ user = ldap.authenticate(frappe.as_unicode(args.usr), frappe.as_unicode(args.pwd))
frappe.local.login_manager.user = user.name
if should_run_2fa(user.name):
authenticate_for_2factor(user.name)
if not confirm_otp_token(frappe.local.login_manager):
return False
+
+ frappe.form_dict.pop("pwd", None)
frappe.local.login_manager.post_login()
# because of a GET request!
|
codereview_new_python_data_12598
|
def login():
args = frappe.form_dict
ldap: LDAPSettings = frappe.get_doc("LDAP Settings")
- user = ldap.authenticate(frappe.as_unicode(args.usr), frappe.as_unicode(args.pop("pwd", None)))
frappe.local.login_manager.user = user.name
if should_run_2fa(user.name):
authenticate_for_2factor(user.name)
if not confirm_otp_token(frappe.local.login_manager):
return False
frappe.local.login_manager.post_login()
# because of a GET request!
Password is deliberately popped from the form dict here. Adding it back reverses this:
https://github.com/frappe/frappe/pull/17872
def login():
args = frappe.form_dict
ldap: LDAPSettings = frappe.get_doc("LDAP Settings")
+ user = ldap.authenticate(frappe.as_unicode(args.usr), frappe.as_unicode(args.pwd))
frappe.local.login_manager.user = user.name
if should_run_2fa(user.name):
authenticate_for_2factor(user.name)
if not confirm_otp_token(frappe.local.login_manager):
return False
+
+ frappe.form_dict.pop("pwd", None)
frappe.local.login_manager.post_login()
# because of a GET request!
|
codereview_new_python_data_12599
|
def import_controller(doctype):
from frappe.model.document import Document
from frappe.utils.nestedset import NestedSet
if doctype not in DOCTYPES_FOR_DOCTYPE:
meta = frappe.get_meta(doctype)
if meta.custom:
return NestedSet if meta.get("is_tree") else Document
module_name = meta.module
- else:
- module_name = "Core"
-
module_path = None
class_overrides = frappe.get_hooks("override_doctype_class")
if class_overrides and class_overrides.get(doctype):
we can also remove this branch by just putting `module_name = "Core"` on top of `if` branch - though it's a very minor nit - can be ignored :)
def import_controller(doctype):
from frappe.model.document import Document
from frappe.utils.nestedset import NestedSet
+ module_name = "Core"
if doctype not in DOCTYPES_FOR_DOCTYPE:
meta = frappe.get_meta(doctype)
if meta.custom:
return NestedSet if meta.get("is_tree") else Document
module_name = meta.module
module_path = None
class_overrides = frappe.get_hooks("override_doctype_class")
if class_overrides and class_overrides.get(doctype):
|
codereview_new_python_data_12600
|
def get_url_to_report(name, report_type=None, doctype=None):
def get_url_to_report_with_filters(name, filters, report_type=None, doctype=None):
if report_type == "Report Builder":
-<<<<<<< HEAD
- return get_url(uri="/app/{0}/view/report?{1}".format(quoted(doctype), filters))
else:
return get_url(uri="/app/query-report/{0}?{1}".format(quoted(name), filters))
-=======
- return get_url(uri=f"/app/{quoted(slug(doctype))}/view/report?{filters}")
-
- return get_url(uri=f"/app/query-report/{quoted(name)}?{filters}")
->>>>>>> 27a2689678 (fix: slug doctype when building url to report with filters (#19581))
operator_map = {
```suggestion
return get_url(uri="/app/{0}/view/report?{1}".format(quoted(slug(doctype)), filters))
else:
return get_url(uri="/app/query-report/{0}?{1}".format(quoted(name), filters))
```
def get_url_to_report(name, report_type=None, doctype=None):
def get_url_to_report_with_filters(name, filters, report_type=None, doctype=None):
if report_type == "Report Builder":
+ return get_url(uri="/app/{0}/view/report?{1}".format(quoted(slug(doctype)), filters))
else:
return get_url(uri="/app/query-report/{0}?{1}".format(quoted(name), filters))
operator_map = {
|
codereview_new_python_data_12601
|
def get_high_permlevel_fields(self):
return self.high_permlevel_fields
- def get_permlevel_read_fields(self, parenttype=None, *, user=None):
- """Build list of fields with read perm level and all the higher perm levels defined."""
- if not hasattr(self, "permlevel_read_fields"):
- self.permlevel_read_fields = []
permlevel_access = set(self.get_permlevel_access("read", parenttype, user=user))
for df in self.get_fieldnames_with_value(with_field_meta=True):
if df.permlevel in permlevel_access:
- self.permlevel_read_fields.append(df)
- return self.permlevel_read_fields
def get_permlevel_access(self, permission_type="read", parenttype=None, *, user=None):
has_access_to = []
Isn't this basically `get_readable_fields`? `get_permlevel_read_fields` makes it sound like it's permlevel-wise dict of fields.
I would rename the variable below as well.
def get_high_permlevel_fields(self):
return self.high_permlevel_fields
+ def get_permitted_fieldnames(self, parenttype=None, *, user=None):
+ """Build list of `fieldname` with read perm level and all the higher perm levels defined."""
+ if not hasattr(self, "permitted_fieldnames"):
+ self.permitted_fieldnames = []
permlevel_access = set(self.get_permlevel_access("read", parenttype, user=user))
for df in self.get_fieldnames_with_value(with_field_meta=True):
if df.permlevel in permlevel_access:
+ self.permitted_fieldnames.append(df.fieldname)
+ return self.permitted_fieldnames
def get_permlevel_access(self, permission_type="read", parenttype=None, *, user=None):
has_access_to = []
|
codereview_new_python_data_12602
|
def get_value(*args, **kwargs):
return db.get_value(*args, **kwargs)
-def as_json(obj: dict | list, indent=1, separators=None) -> str:
from frappe.utils.response import json_handler
if separators is None:
separators = (",", ": ")
try:
return json.dumps(
- obj, indent=indent, sort_keys=True, default=json_handler, separators=separators, ensure_ascii=False
)
except TypeError:
# this would break in case the keys are not all os "str" type - as defined in the JSON
# adding this to ensure keys are sorted (expected behaviour)
sorted_obj = dict(sorted(obj.items(), key=lambda kv: str(kv[0])))
- return json.dumps(sorted_obj, indent=indent, default=json_handler, separators=separators, ensure_ascii=False)
def are_emails_muted():
Don't just enable this globally on all `frappe.as_json` calls? Add a flag which defaults to current behaviour and change it where it's required.
def get_value(*args, **kwargs):
return db.get_value(*args, **kwargs)
+def as_json(obj: dict | list, indent=1, separators=None, ensure_ascii=True) -> str:
from frappe.utils.response import json_handler
if separators is None:
separators = (",", ": ")
try:
return json.dumps(
+ obj, indent=indent, sort_keys=True, default=json_handler, separators=separators, ensure_ascii=ensure_ascii
)
except TypeError:
# this would break in case the keys are not all os "str" type - as defined in the JSON
# adding this to ensure keys are sorted (expected behaviour)
sorted_obj = dict(sorted(obj.items(), key=lambda kv: str(kv[0])))
+ return json.dumps(sorted_obj, indent=indent, default=json_handler, separators=separators, ensure_ascii=ensure_ascii)
def are_emails_muted():
|
codereview_new_python_data_12603
|
def is_pypika_function_object(field: str) -> bool:
def get_doctype_name(table_name: str) -> str:
- if "tab" in table_name:
- table_name = table_name.replace("tab", "")
table_name = table_name.replace("`", "")
table_name = table_name.replace('"', "")
return table_name
tab can be part of doctype name too.

def is_pypika_function_object(field: str) -> bool:
def get_doctype_name(table_name: str) -> str:
+ if table_name.startswith(("tab", "`tab", '"tab')):
+ table_name = table_name.replace("tab", "", 1)
table_name = table_name.replace("`", "")
table_name = table_name.replace('"', "")
return table_name
|
codereview_new_python_data_12605
|
def handle_exception(e):
# usually.
frappe.session.user = "Guest"
- if isinstance(e, TypeError):
- http_status_code = 417
-
if respond_as_json:
# handle ajax responses first
# if the request is ajax, send back the trace or error message
This will convert all TypeErrors to 417, some of which were genuinely not user's mistake (python and dynamic code :shrug: :woozy_face: )
Is it feasible to wrap pydantic's type error in a specific exception? Then we can specify it and http code in `frappe.exceptions` directly without having to hardcode it here
def handle_exception(e):
# usually.
frappe.session.user = "Guest"
if respond_as_json:
# handle ajax responses first
# if the request is ajax, send back the trace or error message
|
codereview_new_python_data_12607
|
def delete(self):
def serialize_worker(worker: Worker) -> frappe._dict:
- queue = ", ".join(worker.queue_names())
- queue_types = ",".join(q.rsplit(":", 1)[1] for q in worker.queue_names())
return frappe._dict(
name=worker.pid,
queue=queue,
maybe store `worker.queue_names()` in a separate variable?
def delete(self):
def serialize_worker(worker: Worker) -> frappe._dict:
+ queue_names = worker.queue_names()
+
+ queue = ", ".join(queue_names)
+ queue_types = ",".join(q.rsplit(":", 1)[1] for q in queue_names)
return frappe._dict(
name=worker.pid,
queue=queue,
|
codereview_new_python_data_12610
|
def run(self, site: str):
try:
self.pre_schema_updates()
self.run_schema_updates()
finally:
- try:
- self.post_schema_updates()
- finally:
- self.tearDown()
- frappe.destroy()
Can move this in original try block. It's effectively same without nesting.
def run(self, site: str):
try:
self.pre_schema_updates()
self.run_schema_updates()
+ self.post_schema_updates()
finally:
+ self.tearDown()
+ frappe.destroy()
|
codereview_new_python_data_12611
|
def execute():
"""Set full name for all contacts"""
for name, first, middle, last, company in frappe.get_all(
"Contact",
fields=["name", "first_name", "middle_name", "last_name", "company_name"],
as_list=True,
):
- frappe.db.set_value("Contact", name, "full_name", get_full_name(first, middle, last, company))
Enable auto-commit. I've seen sites with millions of contacts :smiling_face_with_tear:
Converting to raw sql update might also be a better option.
def execute():
"""Set full name for all contacts"""
+ frappe.db.auto_commit_on_many_writes = 1
+
for name, first, middle, last, company in frappe.get_all(
"Contact",
fields=["name", "first_name", "middle_name", "last_name", "company_name"],
as_list=True,
):
+ frappe.db.set_value(
+ "Contact",
+ name,
+ "full_name",
+ get_full_name(first, middle, last, company),
+ update_modified=False,
+ )
|
codereview_new_python_data_12612
|
def set_naming_from_document_naming_rule(doc):
if doc.doctype in log_types:
return
- def _get_document_naming_rule():
- # ignore_ddl if naming is not yet bootstrapped
-
- return frappe.get_all(
- "Document Naming Rule",
- {"document_type": doc.doctype, "disabled": 0},
- order_by="priority desc",
- ignore_ddl=True,
- )
-
- document_naming_rules = frappe.cache().hget(
- "document_naming_rule", doc.doctype, _get_document_naming_rule
)
for d in document_naming_rules:
There should be cache eviction for this whenever doc naming rule is updated/inserted.
def set_naming_from_document_naming_rule(doc):
if doc.doctype in log_types:
return
+ document_naming_rules = frappe.cache_manager.get_doctype_map(
+ "Document Naming Rule",
+ doc.doctype,
+ filters={"document_type": doc.doctype, "disabled": 0},
+ order_by="priority desc",
)
for d in document_naming_rules:
|
codereview_new_python_data_12615
|
from frappe.model import no_value_fields
from frappe.utils import cint, cstr, duration_to_seconds, flt, update_progress_bar
from frappe.utils.csvutils import get_csv_content_from_google_sheets, read_csv_content
-from frappe.utils.file_manager import is_safe_path
from frappe.utils.xlsxutils import (
read_xls_file_from_attached_file,
read_xlsx_file_from_attached_file,
```suggestion
```
I guess this import is no longer required?
from frappe.model import no_value_fields
from frappe.utils import cint, cstr, duration_to_seconds, flt, update_progress_bar
from frappe.utils.csvutils import get_csv_content_from_google_sheets, read_csv_content
from frappe.utils.xlsxutils import (
read_xls_file_from_attached_file,
read_xlsx_file_from_attached_file,
|
codereview_new_python_data_12616
|
def execute():
if frappe.db.exists("Navbar Item", {"item_label": "Manage Subscriptions"}):
return
- for navbar_item in navbar_settings.settings_dropdown[5:]:
- navbar_item.idx = navbar_item.idx + 1
navbar_settings.append(
"settings_dropdown",
Keep patches simple. Just reset the idx after inserting a row in the middle instead of doing index arithematic :smile:
```python
for idx, row in enumerate(child_table, start):
row.idx = idx
```
def execute():
if frappe.db.exists("Navbar Item", {"item_label": "Manage Subscriptions"}):
return
+ for idx, row in enumerate(navbar_settings.settings_dropdown, 2):
+ row.idx = idx
navbar_settings.append(
"settings_dropdown",
|
codereview_new_python_data_12617
|
def savedocs(doc, action):
# action
doc.docstatus = {"Save": 0, "Submit": 1, "Update": 1, "Cancel": 2}[action]
if doc.docstatus == 1:
- if doc.meta.submit_in_background and doc.meta.is_submittable:
doc.queue_action("submit", timeout=4000)
else:
doc.submit()
```suggestion
if doc.meta.submit_in_background:
```
#
Probably no need to check issubmittable ...If docstatus is 1 it'll be 1 of 2 cases:
a) the document is being submitted
b) the submitted document is being updated
def savedocs(doc, action):
# action
doc.docstatus = {"Save": 0, "Submit": 1, "Update": 1, "Cancel": 2}[action]
if doc.docstatus == 1:
+ if doc.meta.submit_in_background:
doc.queue_action("submit", timeout=4000)
else:
doc.submit()
|
codereview_new_python_data_12618
|
def notify(self, submission_status: str, action: str):
def unlock_doc(self):
if self.is_locked:
try:
- Job(self.job_id, connection=get_redis_conn())
- frappe.msgprint(_("Document already exists in queue!"))
except NoSuchJobError:
self.to_be_queued_doc.unlock()
- self.status = "Failed"
- self.save()
frappe.msgprint(_("Unlocked document as no such document exists in queue"))
else:
- # failed, completed don't know at this point
- self.status = "Failed"
- self.save()
frappe.msgprint(_("Document is already unlocked"))
```suggestion
try:
Job.fetch(self.job_id, connection=get_redis_conn())
frappe.msgprint(_("Job for submission of the reference document exists in queue"), _("Failed to unlock"))
except NoSuchJobError:
self.unlock()
frappe.msgprint(_("Reference Document Unlocked"))
```
TBH not really sure if the status can be considered `Failed` in the except block, since the job can also be killed/timed out after the whole submission scenario happens ..best to leave it in the `Queued` state?
def notify(self, submission_status: str, action: str):
def unlock_doc(self):
if self.is_locked:
try:
+ job = Job(self.job_id, connection=get_redis_conn())
+ if not job.get_status(refresh=True):
+ raise NoSuchJobError
except NoSuchJobError:
self.to_be_queued_doc.unlock()
frappe.msgprint(_("Unlocked document as no such document exists in queue"))
else:
frappe.msgprint(_("Document is already unlocked"))
|
codereview_new_python_data_12619
|
EmptyQueryValues = object()
FallBackDateTimeStr = "0001-01-01 00:00:00.000000"
-NESTED_SET_HIERARCHY = (
"ancestors of",
"descendants of",
"not ancestors of",
```suggestion
NestedSetHierarchy = (
```
EmptyQueryValues = object()
FallBackDateTimeStr = "0001-01-01 00:00:00.000000"
+NestedSetHierarchy = (
"ancestors of",
"descendants of",
"not ancestors of",
|
codereview_new_python_data_12621
|
def candidates() -> Generator[Symbol, None, None]:
def _symbol_lookup(
self,
nestedName: ASTNestedName,
- onMissingQualifiedSymbol: Callable[["Symbol", ASTIdentifier], Symbol | None],
ancestorLookupType: str | None,
matchSelf: bool,
recurseInAnon: bool,
```suggestion
onMissingQualifiedSymbol: Callable[[Symbol, ASTIdentifier], Symbol | None],
```
def candidates() -> Generator[Symbol, None, None]:
def _symbol_lookup(
self,
nestedName: ASTNestedName,
+ onMissingQualifiedSymbol: Callable[[Symbol, ASTIdentifier], Symbol | None],
ancestorLookupType: str | None,
matchSelf: bool,
recurseInAnon: bool,
|
codereview_new_python_data_12622
|
def setlocale(category: int, value: Union[str, Iterable[str], None] = None) -> N
pass
-LOCALE_DIR = path.abspath(path.dirname(__file__))
def init_console(
- locale_dir: str = LOCALE_DIR,
catalog: str = 'sphinx',
) -> Tuple[NullTranslations, bool]:
"""Initialize locale for console.
```suggestion
_LOCALE_DIR = path.abspath(path.dirname(__file__))
```
def setlocale(category: int, value: Union[str, Iterable[str], None] = None) -> N
pass
+_LOCALE_DIR = path.abspath(path.dirname(__file__))
def init_console(
+ locale_dir: str = _LOCALE_DIR,
catalog: str = 'sphinx',
) -> Tuple[NullTranslations, bool]:
"""Initialize locale for console.
|
codereview_new_python_data_12623
|
def setlocale(category: int, value: Union[str, Iterable[str], None] = None) -> N
pass
-LOCALE_DIR = path.abspath(path.dirname(__file__))
def init_console(
- locale_dir: str = LOCALE_DIR,
catalog: str = 'sphinx',
) -> Tuple[NullTranslations, bool]:
"""Initialize locale for console.
```suggestion
locale_dir: str = _LOCALE_DIR,
```
def setlocale(category: int, value: Union[str, Iterable[str], None] = None) -> N
pass
+_LOCALE_DIR = path.abspath(path.dirname(__file__))
def init_console(
+ locale_dir: str = _LOCALE_DIR,
catalog: str = 'sphinx',
) -> Tuple[NullTranslations, bool]:
"""Initialize locale for console.
|
codereview_new_python_data_12624
|
def is_ignored_uri(self, uri: str) -> bool:
class HyperlinkAvailabilityCheckWorker(Thread):
"""A worker class for checking the availability of hyperlinks."""
- def __init__(self, env: BuildEnvironment, config: Config, rqueue: "Queue[CheckResult]",
- wqueue: "Queue[CheckRequest]", rate_limits: Dict[str, RateLimit]) -> None:
self.config = config
self.env = env
self.rate_limits = rate_limits
```suggestion
def __init__(self, env: BuildEnvironment, config: Config, rqueue: 'Queue[CheckResult]',
wqueue: 'Queue[CheckRequest]', rate_limits: Dict[str, RateLimit]) -> None:
```
def is_ignored_uri(self, uri: str) -> bool:
class HyperlinkAvailabilityCheckWorker(Thread):
"""A worker class for checking the availability of hyperlinks."""
+ def __init__(self, env: BuildEnvironment, config: Config, rqueue: 'Queue[CheckResult]',
+ wqueue: 'Queue[CheckRequest]', rate_limits: Dict[str, RateLimit]) -> None:
self.config = config
self.env = env
self.rate_limits = rate_limits
|
codereview_new_python_data_12625
|
def terminal_safe(s: str) -> str:
def get_terminal_width() -> int:
- """Get number of columns of the terminal."""
return shutil.get_terminal_size().columns - 1
```suggestion
"""Return the width of the terminal in columns."""
```
def terminal_safe(s: str) -> str:
def get_terminal_width() -> int:
+ """Return the width of the terminal in columns."""
return shutil.get_terminal_size().columns - 1
|
codereview_new_python_data_12626
|
def __init__(self, nproc: int) -> None:
# task arguments
self._args: Dict[int, Optional[List[Any]]] = {}
# list of subprocesses (both started and waiting)
- self._procs: Dict[int, "ForkProcess"] = {}
# list of receiving pipe connections of running subprocesses
self._precvs: Dict[int, Any] = {}
# list of receiving pipe connections of waiting subprocesses
Is this quotation needed?
def __init__(self, nproc: int) -> None:
# task arguments
self._args: Dict[int, Optional[List[Any]]] = {}
# list of subprocesses (both started and waiting)
+ self._procs: Dict[int, ForkProcess] = {}
# list of receiving pipe connections of running subprocesses
self._precvs: Dict[int, Any] = {}
# list of receiving pipe connections of waiting subprocesses
|
codereview_new_python_data_12674
|
def test_handling_first_fetch_and_old_integration_context(mocker,
def test_auto_detect_indicator_type_from_cs(indicator: dict, expected_results: str | None):
from CrowdStrikeIndicatorFeed import auto_detect_indicator_type_from_cs
- type_ = auto_detect_indicator_type_from_cs(indicator['indicator'], indicator['type'])
- assert type_ == expected_results
```suggestion
assert auto_detect_indicator_type_from_cs(indicator['indicator'], indicator['type']) == expected_results
```
def test_handling_first_fetch_and_old_integration_context(mocker,
def test_auto_detect_indicator_type_from_cs(indicator: dict, expected_results: str | None):
from CrowdStrikeIndicatorFeed import auto_detect_indicator_type_from_cs
+ assert auto_detect_indicator_type_from_cs(indicator['indicator'], indicator['type']) == expected_results
|
codereview_new_python_data_12675
|
def normalize_scan_data(scan_data: dict) -> dict:
include_none=True,
)
- if "duration" in scan_data:
result["TotalTime"] = readable_duration_time(scan_data["duration"])
else:
@bziser I've checked and debugged the old integration by checking out commit 6d2266fd29997611a64988f9362faa349e8ab4c5 (last version bump before the rewrite), and it seems like previously, if the `duration` key was missing, the value of `TotalTime` ended up being `"0 minutes"`.
I thought returning a message (since its value is a string anyway) saying it couldn't be found would be more accurate, but I guess it could be considered as breaking BC.
Please let me know what you think.
(see PR's description for more context about the bug and why I believe it happened)
def normalize_scan_data(scan_data: dict) -> dict:
include_none=True,
)
+ if scan_data.get("duration"):
result["TotalTime"] = readable_duration_time(scan_data["duration"])
else:
|
codereview_new_python_data_12676
|
def get_agent():
within XSOAR (both on-prem and cloud).
"""
platform = get_demisto_version().get('platform')
- if platform == 'x2': # XSIAM
- return 'xdr'
- else: # XSOAR (on-prem or cloud)
- return 'xsoartim'
def get_file_report(self, file_hash: str):
return self._http_request(
simpler not mandatory tho
```suggestion
return 'xdr' if platform == 'x2' else 'xsoartim': # XSIAM
```
def get_agent():
within XSOAR (both on-prem and cloud).
"""
platform = get_demisto_version().get('platform')
+ return 'xdr' if platform == 'x2' else 'xsoartim'
def get_file_report(self, file_hash: str):
return self._http_request(
|
codereview_new_python_data_12677
|
def get_script_execution_result_files(self, action_id: str, endpoint_id: str) ->
link = response.get('reply', {}).get('DATA')
# If the link is None, the API call will result in a 'Connection Timeout Error', so we raise an exception
if not link:
- demisto.debug(f'Failed getting response from /scripts/get_script_execution_results_files, {action_id=},'
- f' {endpoint_id=}')
- raise DemistoException('File not found.')
return self._http_request(
method='GET',
full_url=link,
```suggestion
raise DemistoException(f'Failed getting response files for {action_id=}, {endpoint_id=}')
```
def get_script_execution_result_files(self, action_id: str, endpoint_id: str) ->
link = response.get('reply', {}).get('DATA')
# If the link is None, the API call will result in a 'Connection Timeout Error', so we raise an exception
if not link:
+ raise DemistoException(f'Failed getting response files for {action_id=}, {endpoint_id=}')
return self._http_request(
method='GET',
full_url=link,
|
codereview_new_python_data_12678
|
def set_marketplace_url(servers, branch_name, ci_build_number, marketplace_name=
except Exception as e:
logging.error(f'Filed to sync marketplace. Error: {e}')
logging.info('Finished copying successfully.')
- logging.info('sleeping for 120 seconds')
sleep(120)
def concurrently_run_function_on_servers(self, function=None, pack_path=None, service_account=None):
```suggestion
sleep_time = 120
logging.info(f'sleeping for {sleep_time} seconds')
sleep(sleep_time)
```
def set_marketplace_url(servers, branch_name, ci_build_number, marketplace_name=
except Exception as e:
logging.error(f'Filed to sync marketplace. Error: {e}')
logging.info('Finished copying successfully.')
+ sleep_time = 120
+ logging.info(f'sleeping for {sleep_time} seconds')
+ sleep(sleep_time)
sleep(120)
def concurrently_run_function_on_servers(self, function=None, pack_path=None, service_account=None):
|
codereview_new_python_data_12713
|
def spyder_safeimport(path, forceload=0, cache={}):
pydoc.safeimport = spyder_safeimport
except Exception:
pass
# Needed to prevent showing a warning message regarding debugging
# See spyder-ide/spyder#20390
if is_pynsist():
```suggestion
# Needed to prevent showing a warning message regarding debugging
```
def spyder_safeimport(path, forceload=0, cache={}):
pydoc.safeimport = spyder_safeimport
except Exception:
pass
+
# Needed to prevent showing a warning message regarding debugging
# See spyder-ide/spyder#20390
if is_pynsist():
|
codereview_new_python_data_12714
|
def show_syspath(self):
self.dialog_manager.show(editor)
@Slot()
- def run_script(self, filename=None, silent=False,
- args=None):
"""
Run a Python script.
"""
```suggestion
def run_script(self, filename=None, silent=False, args=None):
```
def show_syspath(self):
self.dialog_manager.show(editor)
@Slot()
+ def run_script(self, filename=None, silent=False, args=None):
"""
Run a Python script.
"""
|
codereview_new_python_data_12715
|
"""Tests for gotoline.py"""
# Third party imports
-from qtpy.QtWidgets import QDialogButtonBox, QPushButton, QTableWidget, QLineEdit
# Local imports
from spyder.plugins.editor.widgets.gotoline import GoToLineDialog
def test_gotolinedialog_has_cancel_button(codeeditor, qtbot, tmpdir):
"""
Test that GoToLineDialog has a Cancel button.
I think the `QPushButton` and `QTableWidget` imports are not being use here so please remove them.
"""Tests for gotoline.py"""
# Third party imports
+from qtpy.QtWidgets import QDialogButtonBox, QLineEdit
# Local imports
from spyder.plugins.editor.widgets.gotoline import GoToLineDialog
+
def test_gotolinedialog_has_cancel_button(codeeditor, qtbot, tmpdir):
"""
Test that GoToLineDialog has a Cancel button.
|
codereview_new_python_data_12716
|
def get_skiprows(self):
"""Return number of lines to be skipped"""
skip_rows = to_text_string(self.skiprows_edt.text())
# QIntValidator does not handle '+' sign
- # See Spyder PR #20070
if skip_rows and skip_rows != '+':
return int(skip_rows)
else:
Let change this to:
```suggestion
# See spyder-ide/spyder#20070
```
To follow the comments link format supported by Spyder
def get_skiprows(self):
"""Return number of lines to be skipped"""
skip_rows = to_text_string(self.skiprows_edt.text())
# QIntValidator does not handle '+' sign
+ # See spyder-ide/spyder#20070
if skip_rows and skip_rows != '+':
return int(skip_rows)
else:
|
codereview_new_python_data_12717
|
class SpyderKernelsCondaPkg(BuildCondaPkg):
Build conda packages to local channel.
This module builds conda packages for Spyder and external-deps for
- inclusion in the conda-based installer. The Following classes are
provided for each package:
SpyderCondaPkg
PylspCondaPkg
```suggestion
inclusion in the conda-based installer. The following classes are
```
class SpyderKernelsCondaPkg(BuildCondaPkg):
Build conda packages to local channel.
This module builds conda packages for Spyder and external-deps for
+ inclusion in the conda-based installer. The following classes are
provided for each package:
SpyderCondaPkg
PylspCondaPkg
|
codereview_new_python_data_12718
|
def start_installation(self, latest_release):
def set_download_progress(self, current_value, total):
percentage_progress = 0
if total > 0:
- percentage_progress = int((current_value/total) * 100)
- self.custom_widget.setText(f"{percentage_progress} %")
def set_status_pending(self, latest_release):
self.set_value(PENDING)
```suggestion
self.custom_widget.setText(f"{percentage_progress}%")
```
No space between number and percent sign (at least in English)
def start_installation(self, latest_release):
def set_download_progress(self, current_value, total):
percentage_progress = 0
if total > 0:
+ percentage_progress = round((current_value/total) * 100)
+ self.custom_widget.setText(f"{percentage_progress}%")
def set_status_pending(self, latest_release):
self.set_value(PENDING)
|
codereview_new_python_data_12719
|
def remote_call(self, interrupt=False, blocking=False, callback=None,
def on_incoming_call(self, call_dict):
"""A call was received"""
super().on_incoming_call(call_dict)
- # Just in case the call was not recieved
self._comm_ready()
# ---- Private -----
```suggestion
# Just in case the call was not received
```
def remote_call(self, interrupt=False, blocking=False, callback=None,
def on_incoming_call(self, call_dict):
"""A call was received"""
super().on_incoming_call(call_dict)
+ # Just in case the call was not received
self._comm_ready()
# ---- Private -----
|
codereview_new_python_data_12720
|
def update_pdb_state(self, state, filename, line_number):
):
self.debugger_panel.start_clean()
self.debugger_panel.set_current_line_arrow(line_number)
return
self.debugger_panel.stop_clean()
```suggestion
return
```
def update_pdb_state(self, state, filename, line_number):
):
self.debugger_panel.start_clean()
self.debugger_panel.set_current_line_arrow(line_number)
+
return
self.debugger_panel.stop_clean()
|
codereview_new_python_data_12721
|
def _update_codeeditor(self, codeeditor):
pdb_state, filename, lineno)
@Slot(bool)
- def _update_current_codeeditor_pdb_state(
- self, pdb_state):
"""
The pdb state has changed.
"""
```suggestion
def _update_current_codeeditor_pdb_state(self, pdb_state):
```
def _update_codeeditor(self, codeeditor):
pdb_state, filename, lineno)
@Slot(bool)
+ def _update_current_codeeditor_pdb_state(self, pdb_state):
"""
The pdb state has changed.
"""
|
codereview_new_python_data_12722
|
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
"""
This module contains the editor panels.
```suggestion
# (see spyder/__init__.py for details)
```
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
+
"""
This module contains the editor panels.
|
codereview_new_python_data_12723
|
class DebuggerWidget(ShellConnectMainWidget):
"""This signal is emitted to request the current file to be debugged."""
sig_debug_cell = Signal()
"""This signal is emitted to request the current cell to be debugged."""
sig_breakpoints_saved = Signal()
"""Breakpoints have been saved"""
sig_toggle_breakpoints = Signal()
"""Add or remove a breakpoint on the current line."""
sig_toggle_conditional_breakpoints = Signal()
"""Add or remove a conditional breakpoint on the current line."""
sig_clear_all_breakpoints = Signal()
"""Clear all breakpoints in all files."""
sig_pdb_state_changed = Signal(bool, dict)
"""Pdb state changed"""
```suggestion
sig_breakpoints_saved = Signal()
"""Breakpoints have been saved"""
sig_toggle_breakpoints = Signal()
"""Add or remove a breakpoint on the current line."""
sig_toggle_conditional_breakpoints = Signal()
"""Add or remove a conditional breakpoint on the current line."""
sig_clear_all_breakpoints = Signal()
"""Clear all breakpoints in all files."""
sig_pdb_state_changed = Signal(bool, dict)
"""Pdb state changed"""
```
class DebuggerWidget(ShellConnectMainWidget):
"""This signal is emitted to request the current file to be debugged."""
sig_debug_cell = Signal()
"""This signal is emitted to request the current cell to be debugged."""
+
sig_breakpoints_saved = Signal()
"""Breakpoints have been saved"""
+
sig_toggle_breakpoints = Signal()
"""Add or remove a breakpoint on the current line."""
+
sig_toggle_conditional_breakpoints = Signal()
"""Add or remove a conditional breakpoint on the current line."""
+
sig_clear_all_breakpoints = Signal()
"""Clear all breakpoints in all files."""
+
sig_pdb_state_changed = Signal(bool, dict)
"""Pdb state changed"""
|
codereview_new_python_data_12724
|
# or if you want to *rename* options, then you need to do a MAJOR update in
# version, e.g. from 3.0.0 to 4.0.0
# 3. You don't need to touch this value if you're just adding a new option
-CONF_VERSION = '72.1.0'
```suggestion
CONF_VERSION = '73.0.0'
```
This is because you're renaming options, i.e. removing some and adding them with a different name.
# or if you want to *rename* options, then you need to do a MAJOR update in
# version, e.g. from 3.0.0 to 4.0.0
# 3. You don't need to touch this value if you're just adding a new option
+CONF_VERSION = '73.0.0'
|
codereview_new_python_data_12725
|
def __init__(self, editor):
editor.panels.register(self.debugger_panel)
self.debugger_panel.order_in_zone = -1
self.update_panel_visibility()
- # load breakpoints
self.load_breakpoints()
# Update breakpoints if the number of lines in the file changes
```suggestion
# Load breakpoints
```
def __init__(self, editor):
editor.panels.register(self.debugger_panel)
self.debugger_panel.order_in_zone = -1
self.update_panel_visibility()
+
+ # Load breakpoints
self.load_breakpoints()
# Update breakpoints if the number of lines in the file changes
|
codereview_new_python_data_12726
|
def setup_page(self):
showcodefolding_box = newcb(_("Show code folding"), 'code_folding')
linenumbers_box = newcb(_("Show line numbers"), 'line_numbers')
breakpoints_box = newcb(_("Show breakpoints"), 'breakpoints_panel',
- section='debugger', default=True)
blanks_box = newcb(_("Show blank spaces"), 'blank_spaces')
currentline_box = newcb(_("Highlight current line"),
'highlight_current_line')
```suggestion
section='debugger')
```
This is incorrect because (almost always) options need to have defaults when they are transient (e.g. when they depend on the user filesystem). Instead, please add this new option to `debugger` section in `spyder/config/main.py`.
def setup_page(self):
showcodefolding_box = newcb(_("Show code folding"), 'code_folding')
linenumbers_box = newcb(_("Show line numbers"), 'line_numbers')
breakpoints_box = newcb(_("Show breakpoints"), 'breakpoints_panel',
+ section='debugger')
blanks_box = newcb(_("Show blank spaces"), 'blank_spaces')
currentline_box = newcb(_("Highlight current line"),
'highlight_current_line')
|
codereview_new_python_data_12727
|
def emit_fault_text(self, fault):
self.sig_fault.emit(fault)
def fault_filename(self):
- """get fault filename"""
if not self._fault_args:
return
return self._fault_args[0]
```suggestion
"""Get fault filename."""
```
def emit_fault_text(self, fault):
self.sig_fault.emit(fault)
def fault_filename(self):
+ """Get fault filename."""
if not self._fault_args:
return
return self._fault_args[0]
|
codereview_new_python_data_12728
|
def run_terminal_thread():
raise NotImplementedError
-def check_version_range(module_version, version):
"""
- Check version string of a module against a required version.
"""
if ';' in version:
versions = version.split(';')
```suggestion
def check_version_range(module_version, version_range):
```
I think it's clearer if we rename here `version` by `version_range`.
def run_terminal_thread():
raise NotImplementedError
+def check_version_range(module_version, version_range):
"""
+ Check if a module's version lies in `version_range`.
"""
if ';' in version:
versions = version.split(';')
|
codereview_new_python_data_12729
|
def test_print_frames(main_window, qtbot, tmpdir, thread):
@pytest.mark.slow
@flaky(max_runs=3)
-def test_debugger(main_window, qtbot):
- """Test debugger"""
# Wait until the window is fully up
shell = main_window.ipyconsole.get_current_shellwidget()
qtbot.waitUntil(
```suggestion
def test_debugger_plugin(main_window, qtbot):
"""Test debugger plugin."""
```
def test_print_frames(main_window, qtbot, tmpdir, thread):
@pytest.mark.slow
@flaky(max_runs=3)
+def test_debugger_plugin(main_window, qtbot):
+ """Test debugger plugin."""
# Wait until the window is fully up
shell = main_window.ipyconsole.get_current_shellwidget()
qtbot.waitUntil(
|
codereview_new_python_data_12730
|
class ShellWidget(NamepaceBrowserWidget, HelpWidget, DebuggingWidget,
sig_kernel_started = Signal()
sig_kernel_reset = Signal()
- # Request configuration from plugins
- sig_configure_requested = Signal()
@classmethod
def prune_shutdown_thread_list(cls):
```suggestion
# Request plugins to send additional configuration to the kernel
sig_config_kernel_requested = Signal()
```
This follows my suggestion above.
class ShellWidget(NamepaceBrowserWidget, HelpWidget, DebuggingWidget,
sig_kernel_started = Signal()
sig_kernel_reset = Signal()
+ # Request plugins to send additional configuration to the kernel
+ sig_config_kernel_requested = Signal()
@classmethod
def prune_shutdown_thread_list(cls):
|
codereview_new_python_data_12731
|
def close_file(self, index=None, force=False):
editor.setParent(None)
editor.completion_widget.setParent(None)
if self.parent():
- # Can be false in tests
self.get_plugin().unregister_widget_shortcuts(editor)
# We pass self object ID as a QString, because otherwise it would
```suggestion
# Can be None in tests
```
I think that's really the case.
def close_file(self, index=None, force=False):
editor.setParent(None)
editor.completion_widget.setParent(None)
if self.parent():
+ # Can be None in tests
self.get_plugin().unregister_widget_shortcuts(editor)
# We pass self object ID as a QString, because otherwise it would
|
codereview_new_python_data_12732
|
def delete_plugin(self, plugin_name: str, teardown: bool = True,
True if the teardown notification to other plugins should be sent
when deleting the plugin, False otherwise.
check_can_delete: bool
- True if the plugin should validate if can be closed in the moment,
- False otherwise.
Returns
-------
```suggestion
True if the plugin should validate if it can be closed when this
method is called, False otherwise.
```
I'm not so sure if this exactly what you mean here, but it seems like it. If not, please edit my suggestion.
def delete_plugin(self, plugin_name: str, teardown: bool = True,
True if the teardown notification to other plugins should be sent
when deleting the plugin, False otherwise.
check_can_delete: bool
+ True if the plugin should validate if it can be closed when this
+ method is called, False otherwise.
Returns
-------
|
codereview_new_python_data_12733
|
# (see spyder/__init__.py for details)
"""
-Status widget for Kite completions.
"""
# Standard library imports
```suggestion
Status widget for Spyder updates.
```
# (see spyder/__init__.py for details)
"""
+Status widget for Spyder updates.
"""
# Standard library imports
|
codereview_new_python_data_12734
|
# Update installation process statuses
NO_STATUS = __version__
-DOWNLOADING_INSTALLER = _("Downloading installer")
-INSTALLING = _("Installing")
FINISHED = _("Installation finished")
-PENDING = _("Pending update")
CHECKING = _("Checking for updates")
CANCELLED = _("Cancelled")
Lets change these values following the suggestions from @CAM-Gerlach:
```suggestion
DOWNLOADING_INSTALLER = _("Downloading update")
INSTALLING = _("Installing update")
FINISHED = _("Installation finished")
PENDING = _("Update available")
```
# Update installation process statuses
NO_STATUS = __version__
+DOWNLOADING_INSTALLER = _("Downloading update")
+INSTALLING = _("Installing update")
FINISHED = _("Installation finished")
+PENDING = _("Update available")
CHECKING = _("Checking for updates")
CANCELLED = _("Cancelled")
|
codereview_new_python_data_12735
|
def get_user_environment_variables():
Returns
-------
env_var : dict
- Key-value pairs of environment variables
-
"""
if os.name == 'nt':
cmd = "set"
```suggestion
Key-value pairs of environment variables.
```
def get_user_environment_variables():
Returns
-------
env_var : dict
+ Key-value pairs of environment variables.
"""
if os.name == 'nt':
cmd = "set"
|
codereview_new_python_data_12736
|
def setup(self, data, title='', readonly=False, remote=False,
if icon is None:
self.setWindowIcon(ima.icon('dictedit'))
- self.setWindowFlags(Qt.Window)
@Slot()
def save_and_close_enable(self):
Why did you remove this? This affects any dialog that inherits from `CollectionsEditor`, which includes the ones we display for collections in the Variable Explorer.
def setup(self, data, title='', readonly=False, remote=False,
if icon is None:
self.setWindowIcon(ima.icon('dictedit'))
+ if sys.platform == 'darwin':
+ # See spyder-ide/spyder#9051
+ self.setWindowFlags(Qt.Tool)
+ else:
+ # Make the dialog act as a window
+ self.setWindowFlags(Qt.Window)
@Slot()
def save_and_close_enable(self):
|
codereview_new_python_data_12737
|
def create_window(self):
def close_window(self, save_undocked=False):
"""
Close QMainWindow instance that contains this widget.
"""
logger.debug("Docking plugin back to the main window")
Please document the new `save_undocked` kwarg in the docstring of this method.
def create_window(self):
def close_window(self, save_undocked=False):
"""
Close QMainWindow instance that contains this widget.
+
+ Parameters
+ ----------
+ save_undocked : bool, optional
+ True if the undocked state needs to be saved. The default is False.
+
+ Returns
+ -------
+ None.
"""
logger.debug("Docking plugin back to the main window")
|
codereview_new_python_data_12785
|
def write_metadata(self) -> None:
with open(self.metadata_filename, "w") as f:
# Write provenance header
- f.write(f"// This file is generated by {THIS} --metadata\n")
f.write(self.from_source_files())
f.write(f"// Do not edit!\n")
```suggestion
f.write(f"// This file is generated by {THIS}n")
```
def write_metadata(self) -> None:
with open(self.metadata_filename, "w") as f:
# Write provenance header
+ f.write(f"// This file is generated by {THIS}n")
f.write(self.from_source_files())
f.write(f"// Do not edit!\n")
|
codereview_new_python_data_12786
|
def write_metadata(self) -> None:
with open(self.metadata_filename, "w") as f:
# Write provenance header
- f.write(f"// This file is generated by {THIS}n")
f.write(self.from_source_files())
f.write(f"// Do not edit!\n")
```suggestion
f.write(f"// This file is generated by {THIS}\n")
```
Whoops.
def write_metadata(self) -> None:
with open(self.metadata_filename, "w") as f:
# Write provenance header
+ f.write(f"// This file is generated by {THIS}\n")
f.write(self.from_source_files())
f.write(f"// Do not edit!\n")
|
codereview_new_python_data_12789
|
def spam():
return NO_MORE
spam.need_reentrance = True
spam.iterator = iter(spam, NO_MORE)
- next(spam.iterator)
# Test exception propagation through function iterator
def test_exception_function(self):
What's the expected behaviour here? `StopIteration`?
def spam():
return NO_MORE
spam.need_reentrance = True
spam.iterator = iter(spam, NO_MORE)
+ with self.assertRaises(StopIteration):
+ next(spam.iterator)
# Test exception propagation through function iterator
def test_exception_function(self):
|
codereview_new_python_data_12790
|
def from_decimal(cls, dec):
return cls(*dec.as_integer_ratio())
@classmethod
- def _from_pair(cls, num, den):
obj = super(Fraction, cls).__new__(cls)
- obj._numerator = num
- obj._denominator = den
return obj
def is_integer(self):
Let's use `numerator` and `denominator` - there's no reason to introduce a naming inconsistency with the `__new__` method. It would also be good to have a docstring for the benefit of code readers - that docstring should explain that the purpose here is not to store an unnormalized fraction, but to bypass the normalization in situations where it's not needed. It might also be good to emphasize that it's an internal implementation detail that shouldn't be depended on by external users.
def from_decimal(cls, dec):
return cls(*dec.as_integer_ratio())
@classmethod
+ def _from_pair(cls, numerator, denominator, /):
+ """Convert a pair of int's to a rational number.
+
+ The ratio of integers should be in lowest terms and
+ the denominator is positive.
+ """
obj = super(Fraction, cls).__new__(cls)
+ obj._numerator = numerator
+ obj._denominator = denominator
return obj
def is_integer(self):
|
codereview_new_python_data_12793
|
def test_fast_paths_in_use(self):
# There are fast paths of these functions implemented in posixmodule.c.
# Confirm that they are being used, and not the Python fallbacks in
# genericpath.py.
- self.assertTrue(os.path.isdir is nt._isdir)
self.assertFalse(inspect.isfunction(os.path.isdir))
- self.assertTrue(os.path.isfile is nt._isfile)
self.assertFalse(inspect.isfunction(os.path.isfile))
- self.assertTrue(os.path.islink is nt._islink)
self.assertFalse(inspect.isfunction(os.path.islink))
- self.assertTrue(os.path.exists is nt._exists)
self.assertFalse(inspect.isfunction(os.path.exists))
```suggestion
self.assertTrue(os.path.isdir is nt._path_isdir)
self.assertFalse(inspect.isfunction(os.path.isdir))
self.assertTrue(os.path.isfile is nt._path_isfile)
self.assertFalse(inspect.isfunction(os.path.isfile))
self.assertTrue(os.path.islink is nt._path_islink)
self.assertFalse(inspect.isfunction(os.path.islink))
self.assertTrue(os.path.exists is nt._path_exists)
```
def test_fast_paths_in_use(self):
# There are fast paths of these functions implemented in posixmodule.c.
# Confirm that they are being used, and not the Python fallbacks in
# genericpath.py.
+ self.assertTrue(os.path.isdir is nt._path_isdir)
self.assertFalse(inspect.isfunction(os.path.isdir))
+ self.assertTrue(os.path.isfile is nt._path_isfile)
self.assertFalse(inspect.isfunction(os.path.isfile))
+ self.assertTrue(os.path.islink is nt._path_islink)
self.assertFalse(inspect.isfunction(os.path.islink))
+ self.assertTrue(os.path.exists is nt._path_exists)
self.assertFalse(inspect.isfunction(os.path.exists))
|
codereview_new_python_data_12795
|
def _execute_child(self, args, executable, preexec_fn, close_fds,
system_root = os.environ.get('SystemRoot', '')
comspec = os.path.join(system_root, 'System32', 'cmd.exe')
if not os.path.isabs(comspec):
- raise FileNotFoundError(
- 'shell not found: neither %ComSpec% nor %SystemRoot% is set')
if os.path.isabs(comspec):
executable = comspec
```suggestion
raise FileNotFoundError('shell not found: neither %ComSpec% nor %SystemRoot% is set')
```
This is better all on one line. (I know PEP 8 says to split it, but it's okay to ignore it this time - there's no important code in or after the message, and it's more readable this way)
def _execute_child(self, args, executable, preexec_fn, close_fds,
system_root = os.environ.get('SystemRoot', '')
comspec = os.path.join(system_root, 'System32', 'cmd.exe')
if not os.path.isabs(comspec):
+ raise FileNotFoundError('shell not found: neither %ComSpec% nor %SystemRoot% is set')
if os.path.isabs(comspec):
executable = comspec
|
codereview_new_python_data_12796
|
def call_exception_handler(self, context):
def _add_callback(self, handle):
"""Add a Handle to _ready."""
- assert isinstance(handle, events.Handle), 'A Handle is required here'
if not handle._cancelled:
self._ready.append(handle)
I wonder if we should drop this assertion as well? It's still going to be the slowest part of the method.
def call_exception_handler(self, context):
def _add_callback(self, handle):
"""Add a Handle to _ready."""
if not handle._cancelled:
self._ready.append(handle)
|
codereview_new_python_data_12797
|
def stem(self):
def filename(self):
return pathlib.Path(self.root.filename).joinpath(self.at)
- def read_text(self, encoding=None, *args, **kwargs):
- encoding = io.text_encoding(encoding)
- with self.open('r', *args, encoding=encoding, **kwargs) as strm:
return strm.read()
def read_bytes(self):
nitpick: I try to avoid mutating variables, though I guess mutating `locals()['encoding']` is not much worse than mutating `kwargs['encoding']`.
def stem(self):
def filename(self):
return pathlib.Path(self.root.filename).joinpath(self.at)
+ def read_text(self, *args, **kwargs):
+ with self.open('r', *args, **kwargs) as strm:
return strm.read()
def read_bytes(self):
|
codereview_new_python_data_12798
|
def splitroot(p):
splitroot('//server/share/') == ('//server/share', '/', '')
splitroot('C:/Users/Barney') == ('C:', '/', 'Users/Barney')
- splitroot('C:///spam///ham') == ('C:', '/', '//spam///egg')
splitroot('Windows/notepad') == ('', '', 'Windows/notepad')
"""
p = os.fspath(p)
```suggestion
splitroot('C:///spam///ham') == ('C:', '/', '//spam///ham')
```
def splitroot(p):
splitroot('//server/share/') == ('//server/share', '/', '')
splitroot('C:/Users/Barney') == ('C:', '/', 'Users/Barney')
+ splitroot('C:///spam///ham') == ('C:', '/', '//spam///ham')
splitroot('Windows/notepad') == ('', '', 'Windows/notepad')
"""
p = os.fspath(p)
|
codereview_new_python_data_12799
|
def commonpath(paths):
drivesplits = [splitroot(p.replace(altsep, sep).lower()) for p in paths]
split_paths = [p.split(sep) for d, r, p in drivesplits]
- if len(set(r for d, r, p in drivesplits)) != 1:
raise ValueError("Can't mix absolute and relative paths")
# Check that all drive letters or UNC paths match. The check is made only
# now otherwise type errors for mixing strings and bytes would not be
# caught.
- if len(set(d for d, r, p in drivesplits)) != 1:
raise ValueError("Paths don't have the same drive")
drive, root, path = splitroot(paths[0].replace(altsep, sep))
```suggestion
if len({d for d, r, p in drivesplits}) != 1:
```
def commonpath(paths):
drivesplits = [splitroot(p.replace(altsep, sep).lower()) for p in paths]
split_paths = [p.split(sep) for d, r, p in drivesplits]
+ if len({r for d, r, p in drivesplits}) != 1:
raise ValueError("Can't mix absolute and relative paths")
# Check that all drive letters or UNC paths match. The check is made only
# now otherwise type errors for mixing strings and bytes would not be
# caught.
+ if len({d for d, r, p in drivesplits}) != 1:
raise ValueError("Paths don't have the same drive")
drive, root, path = splitroot(paths[0].replace(altsep, sep))
|
codereview_new_python_data_12800
|
def pseudo_op(name, op, real_ops):
def_op('DELETE_DEREF', 139)
hasfree.append(139)
jrel_op('JUMP_BACKWARD', 140) # Number of words to skip (backwards)
-def_op('COMPARE_AND_BRANCH', 141) # Comparison and jump
hascompare.append(141)
def_op('CALL_FUNCTION_EX', 142) # Flags
def_op('EXTENDED_ARG', 144)
This opcode needs to be in hasjrel as well.
def pseudo_op(name, op, real_ops):
def_op('DELETE_DEREF', 139)
hasfree.append(139)
jrel_op('JUMP_BACKWARD', 140) # Number of words to skip (backwards)
+jrel_op('COMPARE_AND_BRANCH', 141) # Comparison and jump
hascompare.append(141)
+
def_op('CALL_FUNCTION_EX', 142) # Flags
def_op('EXTENDED_ARG', 144)
|
codereview_new_python_data_12802
|
def _write_atomic(path, data, mode=0o666):
# Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array
# in PC/launcher.c must also be updated.
-MAGIC_NUMBER = (3512).to_bytes(2, 'little') + b'\r\n'
_RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c
```suggestion
MAGIC_NUMBER = (3515).to_bytes(2, 'little') + b'\r\n'
```
def _write_atomic(path, data, mode=0o666):
# Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array
# in PC/launcher.c must also be updated.
+MAGIC_NUMBER = (3515).to_bytes(2, 'little') + b'\r\n'
_RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c
|
codereview_new_python_data_12803
|
def test_copy(self):
def test_deepcopy(self):
s = slice(1, 10)
c = copy.deepcopy(s)
- self.assertIsNot(s, c)
self.assertEqual(s, c)
s = slice(1, 10, 2)
c = copy.deepcopy(s)
- self.assertIsNot(s, c)
self.assertEqual(s, c)
# Corner case for mutable indices:
Same as above
```suggestion
```
def test_copy(self):
def test_deepcopy(self):
s = slice(1, 10)
c = copy.deepcopy(s)
self.assertEqual(s, c)
s = slice(1, 10, 2)
c = copy.deepcopy(s)
self.assertEqual(s, c)
# Corner case for mutable indices:
|
codereview_new_python_data_12804
|
def _write_atomic(path, data, mode=0o666):
# Python 3.12a1 3510 (FOR_ITER leaves iterator on the stack)
# Python 3.12a1 3511 (Add STOPITERATION_ERROR instruction)
# Python 3.12a1 3512 (Remove all unused consts from code objects)
-# Python 3.12a1 3513 (Add CALL_INTRINSIC_1 instruction)
# Python 3.13 will start with 3550
```suggestion
# Python 3.12a1 3513 (Add CALL_INTRINSIC_1 instruction, removed STOPITERATION_ERROR, PRINT_EXPR, IMPORT_STAR)
```
def _write_atomic(path, data, mode=0o666):
# Python 3.12a1 3510 (FOR_ITER leaves iterator on the stack)
# Python 3.12a1 3511 (Add STOPITERATION_ERROR instruction)
# Python 3.12a1 3512 (Remove all unused consts from code objects)
+# Python 3.12a1 3513 (Add CALL_INTRINSIC_1 instruction, removed STOPITERATION_ERROR, PRINT_EXPR, IMPORT_STAR)
# Python 3.13 will start with 3550
|
codereview_new_python_data_12805
|
def test_mock_unsafe(self):
m.aseert_foo_call()
m.assrt_foo_call()
#Issue21262
def test_assert_not_called(self):
m = Mock()
I would suggest to also test a name like `assertSome`. Because `_` might be important.
def test_mock_unsafe(self):
m.aseert_foo_call()
m.assrt_foo_call()
+ # gh-100739
+ def test_mock_safe_with_spec(self):
+ class Foo(object):
+ def assert_bar(self):
+ pass
+
+ def assertSome(self):
+ pass
+
+ m = Mock(spec=Foo)
+ m.assert_bar()
+ m.assertSome()
+
+ m.assert_bar.assert_called_once()
+ m.assertSome.assert_called_once()
+
#Issue21262
def test_assert_not_called(self):
m = Mock()
|
codereview_new_python_data_12809
|
def _io_wrapper(file, mode='r', *args, **kwargs):
elif mode == 'rb':
return file
raise ValueError(
- "Invalid mode value '{}', only 'r' and 'rb' are supported".format(mode)
)
This looks like something got reverted. Gotta track it down and backport it.
def _io_wrapper(file, mode='r', *args, **kwargs):
elif mode == 'rb':
return file
raise ValueError(
+ f"Invalid mode value '{mode}', only 'r' and 'rb' are supported"
)
|
codereview_new_python_data_12811
|
def __exit__(self, t, v, tb):
@classmethod
def cwd(cls):
- """Return a new path pointing to the current working directory
- (as returned by os.getcwd()).
- """
return cls().absolute()
@classmethod
Same remark as other PR about feeling weird about a straightforward implementation matching docstring being replaced by an indirect implementation.
Could you add a note in `absolute` to mark that `cwd` depends on `absolute` using `os.getcwd`?
def __exit__(self, t, v, tb):
@classmethod
def cwd(cls):
+ """Return a new path pointing to the current working directory."""
+ # We call 'absolute()' rather than using 'os.getcwd()' directly to
+ # enable users to replace the implementation of 'absolute()' in a
+ # subclass and benefit from the new behaviour here. This works because
+ # os.path.abspath('.') == os.getcwd().
return cls().absolute()
@classmethod
|
codereview_new_python_data_12812
|
def test_normpath(self):
tester("ntpath.normpath('//server/share/../../')", '\\\\server\\share\\')
# gh-96290: don't normalize partial/invalid UNC drives as rooted paths
- tester("ntpath.normpath('\\\\foo\\bar')", '\\\\foo\\bar')
tester("ntpath.normpath('\\\\foo\\')", '\\\\foo\\')
tester("ntpath.normpath('\\\\foo')", '\\\\foo')
tester("ntpath.normpath('\\\\')", '\\\\')
This is a valid UNC path. Maybe include a commented out test case that depends on fixing `nt._path_normpath()`.
```suggestion
# BUGBUG: nt._path_normpath() needs to be fixed to match
# ntpath.splitdrive() for an empty share. Skip this for now.
# tester("ntpath.normpath('\\\\foo\\\\')", '\\\\foo\\')
```
def test_normpath(self):
tester("ntpath.normpath('//server/share/../../')", '\\\\server\\share\\')
# gh-96290: don't normalize partial/invalid UNC drives as rooted paths
+ # BUGBUG: nt._path_normpath() needs to be fixed to match
+ # ntpath.splitdrive() for an empty share. Skip this for now.
+ # tester("ntpath.normpath('\\\\foo\\\\')", '\\\\foo\\')
tester("ntpath.normpath('\\\\foo\\')", '\\\\foo\\')
tester("ntpath.normpath('\\\\foo')", '\\\\foo')
tester("ntpath.normpath('\\\\')", '\\\\')
|
codereview_new_python_data_12813
|
def _process_exited(self, returncode):
# object. On Python 3.6, it is required to avoid a ResourceWarning.
self._proc.returncode = returncode
self._call(self._protocol.process_exited)
- # See https://github.com/python/cpython/issues/100133
- # The pipes should not be closed otherwise some data may be lost.
- # If the pipe is closed here then _UnixReadPipeTransport will remove the
- # reader prematurely and the data will be lost, instead of doing that
- # the pipe will be closed when the process is finished via _pipe_connection_lost
- # followed by _try_finish.
- # for p in self._pipes.values():
- # if p is not None:
- # p.pipe.close()
self._try_finish()
I'd remove the commented-out code. Not sure if the whole comment is even necessary; perhaps you can shorten it?
def _process_exited(self, returncode):
# object. On Python 3.6, it is required to avoid a ResourceWarning.
self._proc.returncode = returncode
self._call(self._protocol.process_exited)
self._try_finish()
|
codereview_new_python_data_12814
|
async def get_command_stdout(cmd, *args):
async def main():
outputs = [f'foo{i}' for i in range(10)]
res = await asyncio.gather(*[get_command_stdout(sys.executable, '-c',
- f'import sys; print({out!r})') for out in outputs])
self.assertEqual(res, outputs)
self.loop.run_until_complete(main())
Do we need 10? The repro from the issue had three repetitions and it always diagnosed the problem. Or is there a timing issue here where if the subprocesses are too fast the test may flake-succeed?
async def get_command_stdout(cmd, *args):
async def main():
outputs = [f'foo{i}' for i in range(10)]
res = await asyncio.gather(*[get_command_stdout(sys.executable, '-c',
+ f'print({out!r})') for out in outputs])
self.assertEqual(res, outputs)
self.loop.run_until_complete(main())
|
codereview_new_python_data_12823
|
def hash_composite_id(keys: Sequence[str]) -> str:
unchanged for the existing composite object if they use TRANSPARENT_ID on the
new keyed field.
- The composite id is compuated in the follow steps:
1. Index each key with its position in the list from 0.
2. Remove any key == TRANSPARENT_ID
3. Get the SHA256 hex digest of "0-key_0:1-key_1:..."
```suggestion
The composite id is computed in the following steps:
```
def hash_composite_id(keys: Sequence[str]) -> str:
unchanged for the existing composite object if they use TRANSPARENT_ID on the
new keyed field.
+ The composite id is computed in the following steps:
1. Index each key with its position in the list from 0.
2. Remove any key == TRANSPARENT_ID
3. Get the SHA256 hex digest of "0-key_0:1-key_1:..."
|
codereview_new_python_data_12824
|
def generate_rules() -> List[str]:
runner_args = run_module_utils.build_run_flags_for_model(
model=model,
model_input_data=test_config.input_data) + test_config.extra_test_flags
- # TODO(#11136): We should pick up the execution args from
- # `build_run_flags_for_execution_config`. But currently the DRIVER needs to
- # be populated separately in the CMake rule.
- runner_args.append("--device_allocator=caching")
cmake_rule = cmake_builder.rules.build_iree_benchmark_suite_module_test(
target_name=test_config.name,
model=f"{model.id}_{model.name}",
Wat. This seems not great and I don't understand why we need to hardcode this
def generate_rules() -> List[str]:
runner_args = run_module_utils.build_run_flags_for_model(
model=model,
model_input_data=test_config.input_data) + test_config.extra_test_flags
+ # TODO(#11136): Currently the DRIVER is a separate field in the CMake rule (
+ # and has effect on test labels). Generates the flags without the driver.
+ runner_args += run_module_utils.build_run_flags_for_execution_config(
+ test_config.execution_config, without_driver=True)
cmake_rule = cmake_builder.rules.build_iree_benchmark_suite_module_test(
target_name=test_config.name,
model=f"{model.id}_{model.name}",
|
codereview_new_python_data_12825
|
def build_run_flags_for_execution_config(
List of flags.
"""
- run_flags = list(module_execution_config.extra_flags)
if with_driver:
driver = module_execution_config.driver
if driver == RuntimeDriver.CUDA:
You're doing this to copy it, right? I think `list.copy` would be clearer
```suggestion
run_flags = module_execution_config.extra_flags.copy()
```
https://docs.python.org/3/tutorial/datastructures.html#:~:text=list%20in%20place.-,list.copy(),-Return%20a%20shallow
def build_run_flags_for_execution_config(
List of flags.
"""
+ run_flags = module_execution_config.extra_flags.copy()
if with_driver:
driver = module_execution_config.driver
if driver == RuntimeDriver.CUDA:
|
codereview_new_python_data_12826
|
def generate_rules() -> List[str]:
model=model,
model_input_data=test_config.input_data) + test_config.extra_test_flags
# TODO(#11136): Currently the DRIVER is a separate field in the CMake rule (
- # and has effect on test labels). Generates the flags without the driver.
runner_args += run_module_utils.build_run_flags_for_execution_config(
test_config.execution_config, with_driver=False)
cmake_rule = cmake_builder.rules.build_iree_benchmark_suite_module_test(
Not really clear what the TODO is here. It seems like this is already done? What would you want to change?
def generate_rules() -> List[str]:
model=model,
model_input_data=test_config.input_data) + test_config.extra_test_flags
# TODO(#11136): Currently the DRIVER is a separate field in the CMake rule (
+ # and has effect on test labels). Rules should be generated in another way
+ # to avoid that. Generates the flags without the driver for now.
runner_args += run_module_utils.build_run_flags_for_execution_config(
test_config.execution_config, with_driver=False)
cmake_rule = cmake_builder.rules.build_iree_benchmark_suite_module_test(
|
codereview_new_python_data_12831
|
class _MLIRDialectPair(object):
# Next ID: 4
class MLIRDialectType(_MLIRDialectPair, Enum):
"""Imported MLIR dialect type."""
- LINALG = (1, "linalg")
TOSA = (2, "tosa")
MHLO = (3, "mhlo")
MODEL_SOURCE_TO_DIALECT_TYPE_MAP = {
common_definitions.ModelSourceType.EXPORTED_LINALG_MLIR:
- MLIRDialectType.LINALG,
common_definitions.ModelSourceType.EXPORTED_TFLITE:
MLIRDialectType.TOSA,
common_definitions.ModelSourceType.EXPORTED_TF:
This list is a little strange to me. I'd expect this to correspond to the `--iree-input-type` flag, but "linalg" is included. There's more to IREE's input than `linalg`, and linalg can be used together with TOSA and MHLO. https://github.com/iree-org/iree/blob/5016a28120c296d7b9a9774245c2759699bf3d33/compiler/src/iree/compiler/Pipelines/Options.cpp#L27-L55
(or the `IREE_INPUT_` options, though those are more internal to the project vs something visible from the distributed compiler packages)
https://github.com/iree-org/iree/blob/5016a28120c296d7b9a9774245c2759699bf3d33/CMakeLists.txt#L272-L287
(maybe something for a separate PR...? since that would reach into code that you aren't touching right now)
class _MLIRDialectPair(object):
# Next ID: 4
class MLIRDialectType(_MLIRDialectPair, Enum):
"""Imported MLIR dialect type."""
+ NONE = (1, "none")
TOSA = (2, "tosa")
MHLO = (3, "mhlo")
MODEL_SOURCE_TO_DIALECT_TYPE_MAP = {
common_definitions.ModelSourceType.EXPORTED_LINALG_MLIR:
+ MLIRDialectType.NONE,
common_definitions.ModelSourceType.EXPORTED_TFLITE:
MLIRDialectType.TOSA,
common_definitions.ModelSourceType.EXPORTED_TF:
|
codereview_new_python_data_12832
|
def convert_directory(directory_path, write_files, allow_partial_conversion,
# Read the Bazel BUILD file and interpret it.
with open(build_file_path, "rt") as build_file:
build_file_contents = build_file.read()
- if build_file_contents.count("# SKIP: bazel_to_cmake") > 0:
return Status.SKIPPED
build_file_code = compile(build_file_contents, build_file_path, "exec")
try:
Wouldn't `in` be more natural?
```suggestion
if "# SKIP: bazel_to_cmake" in build_file_contents:
```
But actually, hard-coding the comment seems a bit weird. I guess it ensures that it isn't appearing in a string literal, but that doesn't seem particularly likely. Also, usually these sorts of comments list the tool first, so how about:
```suggestion
if "bazel-to-cmake: skip" in build_file_contents:
```
def convert_directory(directory_path, write_files, allow_partial_conversion,
# Read the Bazel BUILD file and interpret it.
with open(build_file_path, "rt") as build_file:
build_file_contents = build_file.read()
+ if "bazel-to-cmake: skip" in build_file_contents:
return Status.SKIPPED
build_file_code = compile(build_file_contents, build_file_path, "exec")
try:
|
codereview_new_python_data_12833
|
def check_dir_path(path_str: str) -> pathlib.Path:
def check_file_path(path_str: str) -> pathlib.Path:
path = pathlib.Path(path_str)
- if path.is_file():
- return path
- else:
raise argparse.ArgumentTypeError(f"{path} is not a file.")
parser = argparse.ArgumentParser()
parser.add_argument("--output",
Nit: the else isn't really necessary here since the other branch terminates
```suggestion
if path.is_file():
return path
raise argparse.ArgumentTypeError(f"{path} is not a file.")
```
or if you think a guard statement is clearer
```suggestion
if not path.is_file():
raise argparse.ArgumentTypeError(f"{path} is not a file.")
return path
```
I think I actually like option two better
def check_dir_path(path_str: str) -> pathlib.Path:
def check_file_path(path_str: str) -> pathlib.Path:
path = pathlib.Path(path_str)
+ if not path.is_file():
raise argparse.ArgumentTypeError(f"{path} is not a file.")
+ return path
parser = argparse.ArgumentParser()
parser.add_argument("--output",
|
codereview_new_python_data_12835
|
def main(args: argparse.Namespace):
device_name not in target_device_names):
continue
if (device_spec_matchers is not None and
- all(not matcher(run_config.target_device_spec)
for matcher in device_spec_matchers)):
continue
grouped_run_config_map[device_name].append(run_config)
I think not any would be clearer here, personally:
```suggestion
not any(matcher(run_config.target_device_spec)
for matcher in device_spec_matchers)):
```
def main(args: argparse.Namespace):
device_name not in target_device_names):
continue
if (device_spec_matchers is not None and
+ not any(matcher(run_config.target_device_spec)
for matcher in device_spec_matchers)):
continue
grouped_run_config_map[device_name].append(run_config)
|
codereview_new_python_data_12836
|
# Add build_tools python dir to the search path.
sys.path.insert(0, str(pathlib.Path(__file__).parent.with_name("python")))
-from typing import Callable, Dict, List
import argparse
import collections
import dataclasses
import json
from benchmark_suites.iree import benchmark_collections
from e2e_test_framework.definitions import common_definitions, iree_definitions
Nit: this should be alphabetical
```suggestion
import argparse
import collections
import dataclasses
import json
from typing import Callable, Dict, List
```
# Add build_tools python dir to the search path.
sys.path.insert(0, str(pathlib.Path(__file__).parent.with_name("python")))
import argparse
import collections
import dataclasses
import json
+from typing import Callable, Dict, List
from benchmark_suites.iree import benchmark_collections
from e2e_test_framework.definitions import common_definitions, iree_definitions
|
codereview_new_python_data_12837
|
def skip_path(path: str) -> bool:
def set_output(d: Mapping[str, str]):
print(f"Setting outputs: {d}")
- for k, v in d.items():
- print(f"::set-output name={k}::{v}")
def get_trailers() -> Mapping[str, str]:
Please don't undo this :-)
def skip_path(path: str) -> bool:
def set_output(d: Mapping[str, str]):
print(f"Setting outputs: {d}")
+ step_output_file = os.environ["GITHUB_OUTPUT"]
+ with open(step_output_file, "a") as f:
+ f.writelines(f"{k}={v}" "\n" for k, v in d.items())
def get_trailers() -> Mapping[str, str]:
|
codereview_new_python_data_12839
|
def main(args: argparse.Namespace):
artifacts_root = (
e2e_test_artifacts.artifacts.generate_default_artifacts_root())
- root_path = pathlib.PurePath(f"${{{ROOT_ARTIFACTS_DIR_CMAKE_VARIABLE}}}")
package_name = f"${{{PACKAGE_NAME_CMAKE_VARIABLE}}}"
model_rule_map = model_rule_generator.generate_model_rule_map(
root_path=root_path, artifacts_root=artifacts_root.model_artifacts_root)
output_dir = pathlib.Path(args.output_dir)
fetch_models_cmake_file = output_dir / GENERATED_E2E_TEST_FETCH_MODELS_CMAKE_FILENAMAE
- with fetch_models_cmake_file.open("w") as output_file:
- cmake_rules = itertools.chain.from_iterable(
- rule.cmake_rules for rule in model_rule_map.values())
- output_file.write("\n".join(cmake_rules))
iree_cmake_rules = iree_rule_generator.generate_rules(
package_name=package_name,
root_path=root_path,
artifacts_root=artifacts_root.iree_artifacts_root,
model_rule_map=model_rule_map)
- iree_artifacts_cmake_file = output_dir / GENERATED_E2E_TEST_IREE_ARTIFACTS_CMAKE_FILENAME
- with iree_artifacts_cmake_file.open("w") as output_file:
- output_file.write("\n".join(iree_cmake_rules))
if __name__ == "__main__":
```suggestion
(output_dir / GENERATED_E2E_TEST_IREE_ARTIFACTS_CMAKE_FILENAME).write_text("\n".join(iree_cmake_rules))
```
def main(args: argparse.Namespace):
artifacts_root = (
e2e_test_artifacts.artifacts.generate_default_artifacts_root())
+ root_path = pathlib.PurePath("${%s}" % ROOT_ARTIFACTS_DIR_CMAKE_VARIABLE)
package_name = f"${{{PACKAGE_NAME_CMAKE_VARIABLE}}}"
model_rule_map = model_rule_generator.generate_model_rule_map(
root_path=root_path, artifacts_root=artifacts_root.model_artifacts_root)
output_dir = pathlib.Path(args.output_dir)
fetch_models_cmake_file = output_dir / GENERATED_E2E_TEST_FETCH_MODELS_CMAKE_FILENAMAE
+ cmake_rules = itertools.chain.from_iterable(
+ rule.cmake_rules for rule in model_rule_map.values())
+ fetch_models_cmake_file.write_text("\n".join(cmake_rules))
iree_cmake_rules = iree_rule_generator.generate_rules(
package_name=package_name,
root_path=root_path,
artifacts_root=artifacts_root.iree_artifacts_root,
model_rule_map=model_rule_map)
+ (output_dir / GENERATED_E2E_TEST_IREE_ARTIFACTS_CMAKE_FILENAME).write_text(
+ "\n".join(iree_cmake_rules))
if __name__ == "__main__":
|
codereview_new_python_data_12840
|
def check_exe_path(path):
parser.add_argument("--run_config",
type=check_file_path,
default=None,
- help="JSON file of the run configs")
return parser
I'm confused by the plural here. Is it one run configuration or multiple??
def check_exe_path(path):
parser.add_argument("--run_config",
type=check_file_path,
default=None,
+ help="JSON file of the run config")
return parser
|
codereview_new_python_data_12845
|
def test_query_device_specs(self):
architecture=common_definitions.DeviceArchitecture.ARMV9_A_GENERIC,
platform=common_definitions.DevicePlatform.GENERIC_ANDROID,
device_parameters={"big-cores"})
self.assertEqual(linux_x86_devices, [linux_x86_device_spec])
self.assertEqual(android_x86_devices, [android_x86_device_spec])
self.assertEqual(little_cores_devices, [little_cores_device_spec])
self.assertEqual(big_cores_devices, [big_cores_device_spec])
if __name__ == "__main__":
If you're going to test this, probably good to have at least one test that returns 0 results and at least one that returns more than one
def test_query_device_specs(self):
architecture=common_definitions.DeviceArchitecture.ARMV9_A_GENERIC,
platform=common_definitions.DevicePlatform.GENERIC_ANDROID,
device_parameters={"big-cores"})
+ all_arm_devices = devices.query_device_specs(
+ architecture=common_definitions.DeviceArchitecture.ARMV9_A_GENERIC,
+ platform=common_definitions.DevicePlatform.GENERIC_ANDROID)
+ no_matched_device = devices.query_device_specs(
+ architecture=common_definitions.DeviceArchitecture.ARMV9_A_GENERIC,
+ platform=common_definitions.DevicePlatform.GENERIC_LINUX)
self.assertEqual(linux_x86_devices, [linux_x86_device_spec])
self.assertEqual(android_x86_devices, [android_x86_device_spec])
self.assertEqual(little_cores_devices, [little_cores_device_spec])
self.assertEqual(big_cores_devices, [big_cores_device_spec])
+ self.assertEqual(all_arm_devices,
+ [little_cores_device_spec, big_cores_device_spec])
+ self.assertEqual(no_matched_device, [])
if __name__ == "__main__":
|
codereview_new_python_data_12873
|
BOARD_NAMES = {
"odroid-c2": "Hardkernel ODROID-C2",
"odroid-c4": "Hardkernel ODROID-C4",
- "odroid-m1": "Hardkernel ODROID-C4",
"odroid-n2": "Home Assistant Blue / Hardkernel ODROID-N2/N2+",
"odroid-xu4": "Hardkernel ODROID-XU4",
}
```suggestion
"odroid-m1": "Hardkernel ODROID-M1",
```
BOARD_NAMES = {
"odroid-c2": "Hardkernel ODROID-C2",
"odroid-c4": "Hardkernel ODROID-C4",
+ "odroid-m1": "Hardkernel ODROID-M1",
"odroid-n2": "Home Assistant Blue / Hardkernel ODROID-N2/N2+",
"odroid-xu4": "Hardkernel ODROID-XU4",
}
|
codereview_new_python_data_12874
|
async def websocket_info(
"type": "otbr/create_network",
}
)
@websocket_api.async_response
async def websocket_create_network(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
This one too
async def websocket_info(
"type": "otbr/create_network",
}
)
+@websocket_api.require_admin
@websocket_api.async_response
async def websocket_create_network(
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.