title
stringlengths
2
169
diff
stringlengths
235
19.5k
body
stringlengths
0
30.5k
url
stringlengths
48
84
created_at
stringlengths
20
20
closed_at
stringlengths
20
20
merged_at
stringlengths
20
20
updated_at
stringlengths
20
20
diff_len
float64
101
3.99k
repo_name
stringclasses
83 values
__index_level_0__
int64
15
52.7k
Add Ustream channel support
diff --git a/youtube_dl/extractor/__init__.py b/youtube_dl/extractor/__init__.py index 26cf2493547..a7cddef733b 100644 --- a/youtube_dl/extractor/__init__.py +++ b/youtube_dl/extractor/__init__.py @@ -96,7 +96,7 @@ from .tumblr import TumblrIE from .tutv import TutvIE from .unistra import UnistraIE -from .ustream import UstreamIE +from .ustream import UstreamIE, UstreamChannelIE from .vbox7 import Vbox7IE from .veehd import VeeHDIE from .veoh import VeohIE diff --git a/youtube_dl/extractor/ustream.py b/youtube_dl/extractor/ustream.py index 5f423870abb..16cdcc76592 100644 --- a/youtube_dl/extractor/ustream.py +++ b/youtube_dl/extractor/ustream.py @@ -1,4 +1,7 @@ +from HTMLParser import HTMLParser +import json import re +from urlparse import urljoin from .common import InfoExtractor @@ -43,3 +46,70 @@ def _real_extract(self, url): 'thumbnail': thumbnail, } return info + +# More robust than regular expressions + +class ChannelParser(HTMLParser): + """ + <meta name="ustream:channel_id" content="1234"> + """ + channel_id = None + + def handle_starttag(self, tag, attrs): + if tag != 'meta': + return + values = dict(attrs) + if values.get('name') != 'ustream:channel_id': + return + value = values.get('content', '') + if value.isdigit(): + self.channel_id = value + +class SocialstreamParser(HTMLParser): + """ + <li class="content123 video" data-content-id="123" data-length="1452" + data-href="/recorded/123" data-og-url="/recorded/123"> + """ + def __init__(self): + HTMLParser.__init__(self) + self.content_ids = [] + + def handle_starttag(self, tag, attrs): + if tag != 'li': + return + for (attr, value) in attrs: + if attr == 'data-content-id' and value.isdigit(): + self.content_ids.append(value) + +class UstreamChannelIE(InfoExtractor): + _VALID_URL = r'https?://www\.ustream\.tv/channel/(?P<slug>.+)' + IE_NAME = u'ustream:channel' + + def _real_extract(self, url): + m = re.match(self._VALID_URL, url) + slug = m.group('slug') + # Slugs can be non-ascii, but youtube-dl can't handle non-ascii command lines, + # so if we got this far it's probably percent encoded and we needn't worry. + + p = ChannelParser() + p.feed(self._download_webpage(url, slug)) + p.close() + channel_id = p.channel_id + + p = SocialstreamParser() + BASE = 'http://www.ustream.tv' + next_url = '/ajax/socialstream/videos/%s/1.json' % channel_id + while next_url: + reply = json.loads(self._download_webpage(urljoin(BASE, next_url), channel_id)) + p.feed(reply['data']) + next_url = reply['nextUrl'] + p.close() + video_ids = p.content_ids + + # From YoutubeChannelIE + + self._downloader.to_screen(u'[ustream] Channel %s: Found %i videos' % (channel_id, len(video_ids))) + + urls = ['http://www.ustream.tv/recorded/' + vid for vid in video_ids] + url_entries = [self.url_result(eurl, 'Ustream') for eurl in urls] + return [self.playlist_result(url_entries, channel_id)]
https://api.github.com/repos/ytdl-org/youtube-dl/pulls/1413
2013-09-12T10:31:57Z
2013-09-13T20:13:16Z
2013-09-13T20:13:16Z
2013-09-15T20:05:54Z
927
ytdl-org/youtube-dl
50,601
Make additional methods of google_assistant.AbstractConfig abstract
diff --git a/homeassistant/components/google_assistant/helpers.py b/homeassistant/components/google_assistant/helpers.py index 7d431f8c94cb..65782c9ec245 100644 --- a/homeassistant/components/google_assistant/helpers.py +++ b/homeassistant/components/google_assistant/helpers.py @@ -129,19 +129,19 @@ def async_deinitialize(self) -> None: self._on_deinitialize.pop()() @property + @abstractmethod def enabled(self): """Return if Google is enabled.""" - return False @property + @abstractmethod def entity_config(self): """Return entity config.""" - return {} @property + @abstractmethod def secure_devices_pin(self): """Return entity config.""" - return None @property def is_reporting_state(self): @@ -154,9 +154,9 @@ def is_local_sdk_active(self): return self._local_sdk_active @property + @abstractmethod def should_report_state(self): """Return if states should be proactively reported.""" - return False @property def is_local_connected(self) -> bool: @@ -167,6 +167,7 @@ def is_local_connected(self) -> bool: and self._local_last_active > utcnow() - timedelta(seconds=70) ) + @abstractmethod def get_local_user_id(self, webhook_id): """Map webhook ID to a Home Assistant user ID. @@ -175,21 +176,10 @@ def get_local_user_id(self, webhook_id): Return None if no user id is found for the webhook_id. """ - # Note: The manually setup Google Assistant currently returns the Google agent - # user ID instead of a valid Home Assistant user ID - found_agent_user_id = None - for agent_user_id, agent_user_data in self._store.agent_user_ids.items(): - if agent_user_data[STORE_GOOGLE_LOCAL_WEBHOOK_ID] == webhook_id: - found_agent_user_id = agent_user_id - break - - return found_agent_user_id + @abstractmethod def get_local_webhook_id(self, agent_user_id): """Return the webhook ID to be used for actions for a given agent user id via the local SDK.""" - if data := self._store.agent_user_ids.get(agent_user_id): - return data[STORE_GOOGLE_LOCAL_WEBHOOK_ID] - return None @abstractmethod def get_agent_user_id(self, context): @@ -199,15 +189,15 @@ def get_agent_user_id(self, context): def should_expose(self, state) -> bool: """Return if entity should be exposed.""" + @abstractmethod def should_2fa(self, state): """If an entity should have 2FA checked.""" - return True + @abstractmethod async def async_report_state( self, message: dict[str, Any], agent_user_id: str, event_id: str | None = None ) -> HTTPStatus | None: """Send a state report to Google.""" - raise NotImplementedError async def async_report_state_all(self, message): """Send a state report to Google for all previously synced users.""" diff --git a/homeassistant/components/google_assistant/http.py b/homeassistant/components/google_assistant/http.py index 226c37fb7175..9207f917458f 100644 --- a/homeassistant/components/google_assistant/http.py +++ b/homeassistant/components/google_assistant/http.py @@ -36,6 +36,7 @@ REPORT_STATE_BASE_URL, REQUEST_SYNC_BASE_URL, SOURCE_CLOUD, + STORE_GOOGLE_LOCAL_WEBHOOK_ID, ) from .helpers import AbstractConfig from .smart_home import async_handle_message @@ -110,6 +111,34 @@ def should_report_state(self): """Return if states should be proactively reported.""" return self._config.get(CONF_REPORT_STATE) + def get_local_user_id(self, webhook_id): + """Map webhook ID to a Home Assistant user ID. + + Any action inititated by Google Assistant via the local SDK will be attributed + to the returned user ID. + + Return None if no user id is found for the webhook_id. + """ + # Note: The manually setup Google Assistant currently returns the Google agent + # user ID instead of a valid Home Assistant user ID + found_agent_user_id = None + for agent_user_id, agent_user_data in self._store.agent_user_ids.items(): + if agent_user_data[STORE_GOOGLE_LOCAL_WEBHOOK_ID] == webhook_id: + found_agent_user_id = agent_user_id + break + + return found_agent_user_id + + def get_local_webhook_id(self, agent_user_id): + """Return the webhook ID to be used for actions for a given agent user id via the local SDK.""" + if data := self._store.agent_user_ids.get(agent_user_id): + return data[STORE_GOOGLE_LOCAL_WEBHOOK_ID] + return None + + def get_agent_user_id(self, context): + """Get agent user ID making request.""" + return context.user_id + def should_expose(self, state) -> bool: """Return if entity should be exposed.""" expose_by_default = self._config.get(CONF_EXPOSE_BY_DEFAULT) @@ -149,10 +178,6 @@ def should_expose(self, state) -> bool: return is_default_exposed or explicit_expose - def get_agent_user_id(self, context): - """Get agent user ID making request.""" - return context.user_id - def should_2fa(self, state): """If an entity should have 2FA checked.""" return True diff --git a/tests/components/google_assistant/__init__.py b/tests/components/google_assistant/__init__.py index 931f4d255225..b7d329575c9f 100644 --- a/tests/components/google_assistant/__init__.py +++ b/tests/components/google_assistant/__init__.py @@ -1,7 +1,7 @@ """Tests for the Google Assistant integration.""" from unittest.mock import MagicMock -from homeassistant.components.google_assistant import helpers +from homeassistant.components.google_assistant import helpers, http def mock_google_config_store(agent_user_ids=None): @@ -14,7 +14,7 @@ def mock_google_config_store(agent_user_ids=None): return store -class MockConfig(helpers.AbstractConfig): +class MockConfig(http.GoogleConfig): """Fake config that always exposes everything.""" def __init__( @@ -30,7 +30,7 @@ def __init__( should_report_state=False, ): """Initialize config.""" - super().__init__(hass) + helpers.AbstractConfig.__init__(self, hass) self._enabled = enabled self._entity_config = entity_config or {} self._secure_devices_pin = secure_devices_pin
<!-- You are amazing! Thanks for contributing to our project! Please, DO NOT DELETE ANY TEXT from this template! (unless instructed). --> ## Proposed change <!-- Describe the big picture of your changes here to communicate to the maintainers why we should accept this pull request. If it fixes a bug or resolves a feature request, be sure to link to that issue in the additional information section. --> Make additional methods of `google_assistant.AbstractConfig` abstract This doesn't change the implementation, it's only intended to make it easier to follow the code ## Type of change <!-- What type of change does your PR introduce to Home Assistant? NOTE: Please, check only 1! box! If your PR requires multiple boxes to be checked, you'll most likely need to split it into multiple PRs. This makes things easier and faster to code review. --> - [ ] Dependency upgrade - [ ] Bugfix (non-breaking change which fixes an issue) - [ ] New integration (thank you!) - [ ] New feature (which adds functionality to an existing integration) - [ ] Deprecation (breaking change to happen in the future) - [ ] Breaking change (fix/feature causing existing functionality to break) - [ ] Code quality improvements to existing code or addition of tests ## Additional information <!-- Details are important, and help maintainers processing your PR. Please be sure to fill out additional details, if applicable. --> - This PR fixes or closes issue: fixes # - This PR is related to issue: - Link to documentation pull request: ## Checklist <!-- Put an `x` in the boxes that apply. You can also fill these out after creating the PR. If you're unsure about any of them, don't hesitate to ask. We're here to help! This is simply a reminder of what we are going to look for before merging your code. --> - [ ] The code change is tested and works locally. - [ ] Local tests pass. **Your PR cannot be merged unless tests pass** - [ ] There is no commented out code in this PR. - [ ] I have followed the [development checklist][dev-checklist] - [ ] I have followed the [perfect PR recommendations][perfect-pr] - [ ] The code has been formatted using Ruff (`ruff format homeassistant tests`) - [ ] Tests have been added to verify that the new code works. If user exposed functionality or configuration variables are added/changed: - [ ] Documentation added/updated for [www.home-assistant.io][docs-repository] If the code communicates with devices, web services, or third-party tools: - [ ] The [manifest file][manifest-docs] has all fields filled out correctly. Updated and included derived files by running: `python3 -m script.hassfest`. - [ ] New or updated dependencies have been added to `requirements_all.txt`. Updated by running `python3 -m script.gen_requirements_all`. - [ ] For the updated dependencies - a link to the changelog, or at minimum a diff between library versions is added to the PR description. - [ ] Untested files have been added to `.coveragerc`. <!-- This project is very active and we have a high turnover of pull requests. Unfortunately, the number of incoming pull requests is higher than what our reviewers can review and merge so there is a long backlog of pull requests waiting for review. You can help here! By reviewing another pull request, you will help raise the code quality of that pull request and the final review will be faster. This way the general pace of pull request reviews will go up and your wait time will go down. When picking a pull request to review, try to choose one that hasn't yet been reviewed. Thanks for helping out! --> To help with the load of incoming pull requests: - [ ] I have reviewed two other [open pull requests][prs] in this repository. [prs]: https://github.com/home-assistant/core/pulls?q=is%3Aopen+is%3Apr+-author%3A%40me+-draft%3Atrue+-label%3Awaiting-for-upstream+sort%3Acreated-desc+review%3Anone+-status%3Afailure <!-- Thank you for contributing <3 Below, some useful links you could explore: --> [dev-checklist]: https://developers.home-assistant.io/docs/development_checklist/ [manifest-docs]: https://developers.home-assistant.io/docs/creating_integration_manifest/ [quality-scale]: https://developers.home-assistant.io/docs/integration_quality_scale_index/ [docs-repository]: https://github.com/home-assistant/home-assistant.io [perfect-pr]: https://developers.home-assistant.io/docs/review-process/#creating-the-perfect-pr
https://api.github.com/repos/home-assistant/core/pulls/109811
2024-02-06T15:46:32Z
2024-02-06T18:14:13Z
2024-02-06T18:14:13Z
2024-02-07T19:01:34Z
1,521
home-assistant/core
39,407
Fix extension parameters not being saved to last used parameters
diff --git a/modules/processing.py b/modules/processing.py index f04a0e1e411..ae04cab7839 100644 --- a/modules/processing.py +++ b/modules/processing.py @@ -531,16 +531,16 @@ def process_images_inner(p: StableDiffusionProcessing) -> Processed: def infotext(iteration=0, position_in_batch=0): return create_infotext(p, p.all_prompts, p.all_seeds, p.all_subseeds, comments, iteration, position_in_batch) - with open(os.path.join(shared.script_path, "params.txt"), "w", encoding="utf8") as file: - processed = Processed(p, [], p.seed, "") - file.write(processed.infotext(p, 0)) - if os.path.exists(cmd_opts.embeddings_dir) and not p.do_not_reload_embeddings: model_hijack.embedding_db.load_textual_inversion_embeddings() if p.scripts is not None: p.scripts.process(p) + with open(os.path.join(shared.script_path, "params.txt"), "w", encoding="utf8") as file: + processed = Processed(p, [], p.seed, "") + file.write(processed.infotext(p, 0)) + infotexts = [] output_images = []
**Describe what this pull request is trying to achieve.** Extension parameters are not restored when the paste button is pressed with no prompt, this PR fixes that by writing `params.txt` after scripts have finished processing **Environment this was tested in** - OS: Windows - Browser: Chrome - Graphics card: RTX 3090
https://api.github.com/repos/AUTOMATIC1111/stable-diffusion-webui/pulls/6684
2023-01-12T21:47:20Z
2023-01-13T11:44:40Z
2023-01-13T11:44:39Z
2023-01-13T11:44:40Z
295
AUTOMATIC1111/stable-diffusion-webui
40,158
hires button, fix seeds
diff --git a/modules/txt2img.py b/modules/txt2img.py index c4cc12d2f6d..d22a1f319e3 100644 --- a/modules/txt2img.py +++ b/modules/txt2img.py @@ -67,13 +67,16 @@ def txt2img_upscale(id_task: str, request: gr.Request, gallery, gallery_index, g geninfo = json.loads(generation_info) all_seeds = geninfo["all_seeds"] + all_subseeds = geninfo["all_subseeds"] image_info = gallery[gallery_index] if 0 <= gallery_index < len(gallery) else gallery[0] p.firstpass_image = infotext_utils.image_from_url_text(image_info) gallery_index_from_end = len(gallery) - gallery_index seed = all_seeds[-gallery_index_from_end if gallery_index_from_end < len(all_seeds) + 1 else 0] - p.script_args = modules.scripts.scripts_txt2img.set_named_arg(p.script_args, 'ScriptSeed', 'seed', seed) + subseed = all_subseeds[-gallery_index_from_end if gallery_index_from_end < len(all_seeds) + 1 else 0] + p.seed = seed + p.subseed = subseed with closing(p): processed = modules.scripts.scripts_txt2img.run(p, *p.script_args)
## Description follow up of https://github.com/AUTOMATIC1111/stable-diffusion-webui/commit/15ec54dd969d6dc3fea7790ca5cce5badcfda426 Have upscale button use the same seed as hires fix. that doesn't actually work `seed` is set by the `seed script` in the `setup()` too late to modify it using `modules.scripts.scripts_txt2img.set_named_arg`, I have to directly modify `p.seed` you forget about `sub seeds` I've been having trouble getting all 3 out put normal hires fix (the old input accordion) manual hires fix (txt2img -> upscale -> image) and 2 stage hires fix (this nuw button) to be the match (pixel perfect) > at this point I am not sure if I messed up or has something changed ## Checklist: - [x] I have read [contributing wiki page](https://github.com/AUTOMATIC1111/stable-diffusion-webui/wiki/Contributing) - [x] I have performed a self-review of my own code - [x] My code follows the [style guidelines](https://github.com/AUTOMATIC1111/stable-diffusion-webui/wiki/Contributing#code-style) - [x] My code passes [tests](https://github.com/AUTOMATIC1111/stable-diffusion-webui/wiki/Tests)
https://api.github.com/repos/AUTOMATIC1111/stable-diffusion-webui/pulls/14598
2024-01-09T16:58:09Z
2024-01-09T17:05:10Z
2024-01-09T17:05:10Z
2024-01-09T17:19:33Z
321
AUTOMATIC1111/stable-diffusion-webui
39,851
parseBalance debt support
diff --git a/js/base/Exchange.js b/js/base/Exchange.js index bb40418e647f..d517b11260e9 100644 --- a/js/base/Exchange.js +++ b/js/base/Exchange.js @@ -943,7 +943,6 @@ module.exports = class Exchange { if ((used === undefined) && (total !== undefined) && (free !== undefined)) { used = Precise.stringSub (total, free); } - balance[code]['debt'] = this.parseNumber (debt); balance[code]['free'] = this.parseNumber (free); balance[code]['used'] = this.parseNumber (used); balance[code]['total'] = this.parseNumber (total); @@ -951,10 +950,13 @@ module.exports = class Exchange { balance['used'][code] = balance[code]['used']; balance['total'][code] = balance[code]['total']; if (debt !== undefined) { + balance[code]['debt'] = this.parseNumber (debt); debtBalance[code] = balance[code]['debt']; } } - if (debtBalance.length) { + const debtBalanceArray = Object.keys (debtBalance); + const length = debtBalanceArray.length; + if (length) { balance['debt'] = debtBalance; } return balance;
https://api.github.com/repos/ccxt/ccxt/pulls/15757
2022-11-21T03:25:14Z
2022-11-21T03:27:45Z
2022-11-21T03:27:45Z
2022-11-21T03:27:45Z
297
ccxt/ccxt
13,552
Check for VPC existence for hostedzone
diff --git a/localstack/services/route53/provider.py b/localstack/services/route53/provider.py index bfe2a2d7be0b8..d858fe7914ae8 100644 --- a/localstack/services/route53/provider.py +++ b/localstack/services/route53/provider.py @@ -2,6 +2,7 @@ from typing import Optional import moto.route53.models as route53_models +from botocore.exceptions import ClientError from moto.route53.models import route53_backends from localstack.aws.api import RequestContext @@ -17,11 +18,13 @@ HealthCheck, HealthCheckId, HostedZoneConfig, + InvalidVPCId, Nonce, NoSuchHealthCheck, ResourceId, Route53Api, ) +from localstack.aws.connect import connect_to from localstack.services.moto import call_moto from localstack.services.plugins import ServiceLifecycleHook @@ -37,6 +40,24 @@ def create_hosted_zone( delegation_set_id: ResourceId = None, **kwargs, ) -> CreateHostedZoneResponse: + # private hosted zones cannot be created in a VPC that does not exist + # check that the VPC exists + if vpc: + vpc_id = vpc.get("VPCId") + vpc_region = vpc.get("VPCRegion") + if not vpc_id or not vpc_region: + raise Exception( + "VPCId and VPCRegion must be specified when creating a private hosted zone" + ) + try: + connect_to( + aws_access_key_id=context.account_id, region_name=vpc_region + ).ec2.describe_vpcs(VpcIds=[vpc_id]) + except ClientError as e: + if e.response.get("Error", {}).get("Code") == "InvalidVpcID.NotFound": + raise InvalidVPCId("The VPC ID is invalid.", sender_fault=True) from e + raise e + response = call_moto(context) # moto does not populate the VPC struct of the response if creating a private hosted zone diff --git a/tests/aws/services/route53/test_route53.py b/tests/aws/services/route53/test_route53.py index 160a772c6f5f7..d7a62b4eaa07a 100644 --- a/tests/aws/services/route53/test_route53.py +++ b/tests/aws/services/route53/test_route53.py @@ -49,7 +49,13 @@ def test_crud_health_check(self, aws_client): assert "NoSuchHealthCheck" in str(ctx.value) @markers.aws.validated - @markers.snapshot.skip_snapshot_verify(paths=["$..HostedZone.CallerReference"]) + @markers.snapshot.skip_snapshot_verify( + paths=[ + "$..HostedZone.CallerReference", + # moto does not return MaxItems for list_hosted_zones_by_vpc + "$..MaxItems", + ] + ) def test_create_private_hosted_zone( self, region_name, aws_client, cleanups, snapshot, hosted_zone ): @@ -76,6 +82,13 @@ def test_create_private_hosted_zone( response = aws_client.route53.get_hosted_zone(Id=zone_id) snapshot.match("get_hosted_zone", response) + response = aws_client.route53.list_hosted_zones_by_vpc(VPCId=vpc_id, VPCRegion=region_name) + snapshot.match("list_hosted_zones_by_vpc", response) + + response = aws_client.route53.list_hosted_zones() + zones = [zone for zone in response["HostedZones"] if name in zone["Name"]] + snapshot.match("list_hosted_zones", zones) + @markers.aws.unknown def test_associate_vpc_with_hosted_zone( self, cleanups, hosted_zone, aws_client, account_id, region_name @@ -150,6 +163,16 @@ def test_associate_vpc_with_hosted_zone( VPC={"VPCRegion": vpc_region, "VPCId": vpc2_id}, ) + @markers.aws.validated + def test_create_hosted_zone_in_non_existent_vpc( + self, aws_client, hosted_zone, snapshot, region_name + ): + vpc = {"VPCId": "non-existent", "VPCRegion": region_name} + with pytest.raises(aws_client.route53.exceptions.InvalidVPCId) as exc_info: + hosted_zone(Name=f"zone-{short_uid()}.com", VPC=vpc) + + snapshot.match("failure-response", exc_info.value.response) + @markers.aws.unknown def test_reusable_delegation_sets(self, aws_client): client = aws_client.route53 diff --git a/tests/aws/services/route53/test_route53.snapshot.json b/tests/aws/services/route53/test_route53.snapshot.json index 181a7ab22f6c5..e82bcf05108b1 100644 --- a/tests/aws/services/route53/test_route53.snapshot.json +++ b/tests/aws/services/route53/test_route53.snapshot.json @@ -47,7 +47,7 @@ } }, "tests/aws/services/route53/test_route53.py::TestRoute53::test_create_private_hosted_zone": { - "recorded-date": "15-12-2023, 15:20:07", + "recorded-date": "11-04-2024, 14:03:14", "recorded-content": { "create-hosted-zone-response": { "ChangeInfo": { @@ -96,6 +96,50 @@ "HTTPHeaders": {}, "HTTPStatusCode": 200 } + }, + "list_hosted_zones_by_vpc": { + "HostedZoneSummaries": [ + { + "HostedZoneId": "<zone-id:1>", + "Name": "<zone_name:1>", + "Owner": { + "OwningAccount": "111111111111" + } + } + ], + "MaxItems": "100", + "ResponseMetadata": { + "HTTPHeaders": {}, + "HTTPStatusCode": 200 + } + }, + "list_hosted_zones": [ + { + "Id": "/hostedzone/<zone-id:1>", + "Name": "<zone_name:1>", + "CallerReference": "<caller-reference:1>", + "Config": { + "Comment": "test", + "PrivateZone": true + }, + "ResourceRecordSetCount": 2 + } + ] + } + }, + "tests/aws/services/route53/test_route53.py::TestRoute53::test_create_hosted_zone_in_non_existent_vpc": { + "recorded-date": "10-04-2024, 15:47:22", + "recorded-content": { + "failure-response": { + "Error": { + "Code": "InvalidVPCId", + "Message": "The VPC ID is invalid.", + "Type": "Sender" + }, + "ResponseMetadata": { + "HTTPHeaders": {}, + "HTTPStatusCode": 400 + } } } } diff --git a/tests/aws/services/route53/test_route53.validation.json b/tests/aws/services/route53/test_route53.validation.json index 60db1366275ea..dedc896d977d7 100644 --- a/tests/aws/services/route53/test_route53.validation.json +++ b/tests/aws/services/route53/test_route53.validation.json @@ -2,7 +2,10 @@ "tests/aws/services/route53/test_route53.py::TestRoute53::test_create_hosted_zone": { "last_validated_date": "2023-11-02T11:59:59+00:00" }, + "tests/aws/services/route53/test_route53.py::TestRoute53::test_create_hosted_zone_in_non_existent_vpc": { + "last_validated_date": "2024-04-10T15:47:22+00:00" + }, "tests/aws/services/route53/test_route53.py::TestRoute53::test_create_private_hosted_zone": { - "last_validated_date": "2023-12-15T14:20:07+00:00" + "last_validated_date": "2024-04-11T14:03:14+00:00" } }
<!-- Please refer to the contribution guidelines before raising a PR: https://github.com/localstack/localstack/blob/master/CONTRIBUTING.md --> <!-- Why am I raising this PR? Add context such as related issues, PRs, or documentation. --> ## Motivation If a hosted zone is to be created in a VPC, `moto` does not check for existence for that VPC whereas AWS does. <!-- What notable changes does this PR make? --> ## Changes * If the VPC details are specified by the caller of `create_hosted_zone`, look up the VPC and raise an error if it does not exist. * Update `test_create_private_hosted_zone` to call `list_hosted_zones_by_vpc` and snapshot the result <!-- The following sections are optional, but can be useful! ## Testing Description of how to test the changes ## TODO What's left to do: - [ ] ... - [ ] ... -->
https://api.github.com/repos/localstack/localstack/pulls/10634
2024-04-10T15:55:22Z
2024-04-12T10:14:10Z
2024-04-12T10:14:10Z
2024-04-12T10:14:11Z
1,910
localstack/localstack
28,507
Fix generate shortcut bug and add interrupt shortcut
diff --git a/javascript/script.js b/javascript/script.js index 05a8c5901..593d8c36c 100644 --- a/javascript/script.js +++ b/javascript/script.js @@ -125,18 +125,23 @@ document.addEventListener("DOMContentLoaded", function() { * Add a ctrl+enter as a shortcut to start a generation */ document.addEventListener('keydown', function(e) { - var handled = false; - if (e.key !== undefined) { - if ((e.key == "Enter" && (e.metaKey || e.ctrlKey || e.altKey))) handled = true; - } else if (e.keyCode !== undefined) { - if ((e.keyCode == 13 && (e.metaKey || e.ctrlKey || e.altKey))) handled = true; - } - if (handled) { - var button = gradioApp().querySelector('button[id=generate_button]'); - if (button) { - button.click(); + const isModifierKey = (e.metaKey || e.ctrlKey || e.altKey); + const isEnterKey = (e.key == "Enter" || e.keyCode == 13); + + if(isModifierKey && isEnterKey) { + const generateButton = gradioApp().querySelector('button:not(.hidden)[id=generate_button]'); + if (generateButton) { + generateButton.click(); + e.preventDefault(); + return; + } + + const stopButton = gradioApp().querySelector('button:not(.hidden)[id=stop_button]') + if(stopButton) { + stopButton.click(); + e.preventDefault(); + return; } - e.preventDefault(); } });
I noticed that the shortcut for a quick start with ctrl+enter would also cause an issue if hit again while the gernation had already started. It would click the (now hidden) generate button again in the background. One the first generation finishes the second would start off but the UI would not recognise it. This solves that by only clicking the button when visible as well as making the ctrl+enter shortcut stop the current generation allowing people to update their prompts faster if they dont like the current direction the previews are showing, or if they made a mistake.
https://api.github.com/repos/lllyasviel/Fooocus/pulls/1408
2023-12-14T17:01:12Z
2023-12-14T17:21:50Z
2023-12-14T17:21:50Z
2023-12-14T18:37:27Z
369
lllyasviel/Fooocus
7,139
Parameters to Request() in wrong order
diff --git a/docs/topics/request-response.rst b/docs/topics/request-response.rst index f83ef98b770..0aaf7d5d669 100644 --- a/docs/topics/request-response.rst +++ b/docs/topics/request-response.rst @@ -24,7 +24,7 @@ below in :ref:`topics-request-response-ref-request-subclasses` and Request objects =============== -.. class:: Request(url[, method='GET', body, headers, cookies, meta, encoding='utf-8', priority=0, dont_filter=False, callback, errback]) +.. class:: Request(url[, callback, method='GET', headers, body, cookies, meta, encoding='utf-8', priority=0, dont_filter=False, errback]) A :class:`Request` object represents an HTTP request, which is usually generated in the Spider and executed by the Downloader, and thus generating @@ -33,6 +33,15 @@ Request objects :param url: the URL of this request :type url: string + :param callback: the function that will be called with the response of this + request (once its downloaded) as its first parameter. For more information + see :ref:`topics-request-response-ref-request-callback-arguments` below. + If a Request doesn't specify a callback, the spider's + :meth:`~scrapy.spider.BaseSpider.parse` method will be used. + Note that if exceptions are raised during processing, errback is called instead. + + :type callback: callable + :param method: the HTTP method of this request. Defaults to ``'GET'``. :type method: string @@ -101,13 +110,6 @@ Request objects care, or you will get into crawling loops. Default to ``False``. :type dont_filter: boolean - :param callback: the function that will be called with the response of this - request (once its downloaded) as its first parameter. For more information - see :ref:`topics-request-response-ref-request-callback-arguments` below. - If a Request doesn't specify a callback, the spider's - :meth:`~scrapy.spider.BaseSpider.parse` method will be used. - :type callback: callable - :param errback: a function that will be called if any exception was raised while processing the request. This includes pages that failed with 404 HTTP errors and such. It receives a `Twisted Failure`_ instance
Implied that callback wasn't the first optional unnamed parameter.
https://api.github.com/repos/scrapy/scrapy/pulls/226
2013-01-15T10:37:56Z
2013-10-16T20:15:52Z
2013-10-16T20:15:52Z
2014-06-12T16:08:03Z
552
scrapy/scrapy
34,236
Add option to set notification sound volume
diff --git a/javascript/notification.js b/javascript/notification.js index 6d79956125c..3ee972ae166 100644 --- a/javascript/notification.js +++ b/javascript/notification.js @@ -26,7 +26,11 @@ onAfterUiUpdate(function() { lastHeadImg = headImg; // play notification sound if available - gradioApp().querySelector('#audio_notification audio')?.play(); + const notificationAudio = gradioApp().querySelector('#audio_notification audio'); + if (notificationAudio) { + notificationAudio.volume = opts.notification_volume / 100.0 || 1.0; + notificationAudio.play(); + } if (document.hasFocus()) return; diff --git a/modules/shared_options.py b/modules/shared_options.py index a9964fcbbfe..d40db5306ee 100644 --- a/modules/shared_options.py +++ b/modules/shared_options.py @@ -64,6 +64,7 @@ "save_incomplete_images": OptionInfo(False, "Save incomplete images").info("save images that has been interrupted in mid-generation; even if not saved, they will still show up in webui output."), "notification_audio": OptionInfo(True, "Play notification sound after image generation").info("notification.mp3 should be present in the root directory").needs_reload_ui(), + "notification_volume": OptionInfo(100, "Notification sound volume", gr.Slider, {"minimum": 0, "maximum": 100, "step": 1}).info("in %"), })) options_templates.update(options_section(('saving-paths', "Paths for saving"), {
## Description * a simple description of what you're trying to accomplish * This PR adds the option to the settings for changing the volume of the optional notification sound that can be played after image generation. This can be useful if the volume of the used notification sound should be lowered without the need to modify the sound file self. * a summary of changes in code * The option for changing the volume value was added to the shared options in percent * Before playing the notification sound, the volume will be adjusted based on the set value in the options ## Screenshots/videos: ![grafik](https://github.com/AUTOMATIC1111/stable-diffusion-webui/assets/118578287/82b79dbd-e131-4b44-9c0a-ead230c3e7d0) ## Checklist: - [x] I have read [contributing wiki page](https://github.com/AUTOMATIC1111/stable-diffusion-webui/wiki/Contributing) - [x] I have performed a self-review of my own code - [x] My code follows the [style guidelines](https://github.com/AUTOMATIC1111/stable-diffusion-webui/wiki/Contributing#code-style) - [x] My code passes [tests](https://github.com/AUTOMATIC1111/stable-diffusion-webui/wiki/Tests)
https://api.github.com/repos/AUTOMATIC1111/stable-diffusion-webui/pulls/13884
2023-11-07T02:24:37Z
2023-11-07T05:29:07Z
2023-11-07T05:29:07Z
2023-11-07T17:10:22Z
353
AUTOMATIC1111/stable-diffusion-webui
39,694
Port .travis.yml to GitHub Actions
diff --git a/.github/workflows/tests-ubuntu.yml b/.github/workflows/tests-ubuntu.yml index b0360554..270e64e2 100644 --- a/.github/workflows/tests-ubuntu.yml +++ b/.github/workflows/tests-ubuntu.yml @@ -23,5 +23,16 @@ jobs: docker: runs-on: ubuntu-20.04 steps: - - run: docker-compose build + - uses: actions/checkout@v2 + - run: docker-compose build + - run: - docker images + - run: | + docker-compose -f docker-compose.yml up -d + # docker-compose -f docker-compose.yml -f docker-compose.debug.yml up -d + docker-compose ps + # wait until the web server is up + wget --timeout 3 --tries=5 --spider localhost:8002 2>&1 | grep -i http + docker-compose logs --no-color + - run: CHEATSH_TEST_STANDALONE=NO bash tests/run-tests.sh +
https://api.github.com/repos/chubin/cheat.sh/pulls/253
2020-11-13T12:37:49Z
2020-11-13T12:39:38Z
2020-11-13T12:39:38Z
2020-11-13T12:45:25Z
244
chubin/cheat.sh
15,236
[ie/radiko]add Metadata info (performer, duration)
diff --git a/yt_dlp/extractor/radiko.py b/yt_dlp/extractor/radiko.py index c363d9ba5f7..2b640599952 100644 --- a/yt_dlp/extractor/radiko.py +++ b/yt_dlp/extractor/radiko.py @@ -1,5 +1,6 @@ import base64 import random +import re import urllib.parse from .common import InfoExtractor @@ -11,6 +12,7 @@ unified_timestamp, update_url_query, ) +from ..utils.traversal import traverse_obj class RadikoBaseIE(InfoExtractor): @@ -159,6 +161,12 @@ def _extract_formats(self, video_id, station, is_onair, ft, cursor, auth_token, return formats + def _extract_performers(self, prog): + performers = traverse_obj(prog, ( + 'pfm/text()', ..., {lambda x: re.split(r'[//、 ,,]', x)}, ..., {str.strip})) + # TODO: change 'artist' fields to 'artists' and return traversal list instead of str + return ', '.join(performers) or None + class RadikoIE(RadikoBaseIE): _VALID_URL = r'https?://(?:www\.)?radiko\.jp/#!/ts/(?P<station>[A-Z0-9-]+)/(?P<id>\d+)' @@ -186,10 +194,12 @@ def _real_extract(self, url): return { 'id': video_id, 'title': try_call(lambda: prog.find('title').text), + 'artist': self._extract_performers(prog), 'description': clean_html(try_call(lambda: prog.find('info').text)), 'uploader': try_call(lambda: station_program.find('.//name').text), 'uploader_id': station, 'timestamp': vid_int, + 'duration': try_call(lambda: unified_timestamp(radio_end, False) - unified_timestamp(radio_begin, False)), 'is_live': True, 'formats': self._extract_formats( video_id=video_id, station=station, is_onair=False, @@ -243,6 +253,7 @@ def _real_extract(self, url): return { 'id': station, 'title': title, + 'artist': self._extract_performers(prog), 'description': description, 'uploader': station_name, 'uploader_id': station,
**IMPORTANT**: PRs without the template will be CLOSED ### Description of your *pull request* and other information <!-- Explanation of your *pull request* in arbitrary form goes here. Please **make sure the description explains the purpose and effect** of your *pull request* and is worded well enough to be understood. Provide as much **context and examples** as possible --> Adds the following information as metadata that can be obtained from radiko.jp for the target program: - Performers - Duration <details open><summary>Template</summary> <!-- OPEN is intentional --> <!-- # PLEASE FOLLOW THE GUIDE BELOW - You will be asked some questions, please read them **carefully** and answer honestly - Put an `x` into all the boxes `[ ]` relevant to your *pull request* (like [x]) - Use *Preview* tab to see how your *pull request* will actually look like --> ### Before submitting a *pull request* make sure you have: - [x] At least skimmed through [contributing guidelines](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#developer-instructions) including [yt-dlp coding conventions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#yt-dlp-coding-conventions) - [x] [Searched](https://github.com/yt-dlp/yt-dlp/search?q=is%3Apr&type=Issues) the bugtracker for similar pull requests - [x] Checked the code with [flake8](https://pypi.python.org/pypi/flake8) and [ran relevant tests](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#developer-instructions) ### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check all of the following options that apply: - [ ] I am the original author of this code and I am willing to release it under [Unlicense](http://unlicense.org/) - [x] I am not the original author of this code but it is in public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence) ### What is the purpose of your *pull request*? - [x] Fix or improvement to an extractor (Make sure to add/update tests) - [ ] New extractor ([Piracy websites will not be accepted](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#is-the-website-primarily-used-for-piracy)) - [ ] Core bug fix/improvement - [ ] New feature (It is strongly [recommended to open an issue first](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#adding-new-feature-or-making-overarching-changes)) </details>
https://api.github.com/repos/yt-dlp/yt-dlp/pulls/9115
2024-01-31T08:28:12Z
2024-02-03T18:44:18Z
2024-02-03T18:44:18Z
2024-02-03T18:44:18Z
554
yt-dlp/yt-dlp
7,629
Mention the usage of Python's builtin readline module in the documentation
diff --git a/docs/source/conf.py b/docs/source/conf.py index 774871e3e..34258eb49 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -43,6 +43,7 @@ "sphinx.ext.autodoc", "sphinx.ext.viewcode", "sphinx.ext.napoleon", + "sphinx.ext.intersphinx", "sphinx.ext.autosectionlabel", "sphinx_copybutton", ] diff --git a/docs/source/console.rst b/docs/source/console.rst index 6ea6e0926..2b26e3104 100644 --- a/docs/source/console.rst +++ b/docs/source/console.rst @@ -238,12 +238,14 @@ The :meth:`~rich.console.Console.print` method has a boolean ``crop`` argument. Input ----- -The console class has an :meth:`~rich.console.Console.input` which works in the same way as Python's builtin ``input()`` method, but can use anything that Rich can print as a prompt. For example, here's a colorful prompt with an emoji:: +The console class has an :meth:`~rich.console.Console.input` method which works in the same way as Python's builtin :func:`input` function, but can use anything that Rich can print as a prompt. For example, here's a colorful prompt with an emoji:: from rich.console import Console console = Console() console.input("What is [i]your[/i] [bold red]name[/]? :smiley: ") +If Python's builtin :mod:`readline` module is previously loaded, elaborate line editing and history features will be available. + Exporting --------- diff --git a/docs/source/prompt.rst b/docs/source/prompt.rst index fa256a0f6..088aa8e74 100644 --- a/docs/source/prompt.rst +++ b/docs/source/prompt.rst @@ -1,7 +1,7 @@ Prompt ====== -Rich has a number of :class:`~rich.prompt.Prompt` classes which ask a user for input and loop until a valid response is received. Here's a simple example:: +Rich has a number of :class:`~rich.prompt.Prompt` classes which ask a user for input and loop until a valid response is received (they all use the :ref:`Console API<Input>` internally). Here's a simple example:: >>> from rich.prompt import Prompt >>> name = Prompt.ask("Enter your name") diff --git a/rich/console.py b/rich/console.py index 7c4be7fcf..de0a01b03 100644 --- a/rich/console.py +++ b/rich/console.py @@ -1919,6 +1919,8 @@ def input( ) -> str: """Displays a prompt and waits for input from the user. The prompt may contain color / style. + It works in the same way as Python's builtin :func:`input` function and provides elaborate line editing and history features if Python's builtin :mod:`readline` module is previously loaded. + Args: prompt (Union[str, Text]): Text to render in the prompt. markup (bool, optional): Enable console markup (requires a str prompt). Defaults to True.
## Type of changes - [ ] Bug fix - [ ] New feature - [x] Documentation / docstrings - [ ] Tests - [ ] Other ## Checklist - [x] I've run the latest [black](https://github.com/psf/black) with default args on new code. - [ ] I've updated CHANGELOG.md and CONTRIBUTORS.md where appropriate. - [x] ~I've added tests for new code.~ No new code. - [x] I accept that @willmcgugan may be pedantic in the code review. ## Description This attempts to resolve #1633.
https://api.github.com/repos/Textualize/rich/pulls/1634
2021-10-31T16:07:21Z
2021-11-07T10:24:01Z
2021-11-07T10:24:01Z
2021-11-07T10:24:01Z
716
Textualize/rich
48,046
[Bug fix] Add rope_theta for llama config
diff --git a/csrc/transformer/inference/csrc/apply_rotary_pos_emb.cu b/csrc/transformer/inference/csrc/apply_rotary_pos_emb.cu index e743ffc3f64f..e326c762c0f3 100644 --- a/csrc/transformer/inference/csrc/apply_rotary_pos_emb.cu +++ b/csrc/transformer/inference/csrc/apply_rotary_pos_emb.cu @@ -32,6 +32,7 @@ __global__ void apply_rotary_pos_half(T* mixed_query, unsigned num_heads, unsigned head_size, unsigned total_count, + float rope_theta, int max_out_tokens) { constexpr int T_per_thread = granularity / sizeof(T); @@ -61,7 +62,7 @@ __global__ void apply_rotary_pos_half(T* mixed_query, const int neuron_idx = base_neuron_idx + i; if (neuron_idx < rotary_dim) { float inv_freq = (float)((neuron_idx % half_dim) * 2) / (float)rotary_dim; - inv_freq = 1.0 / powf(10000.0, inv_freq) * (float)seq_idx; + inv_freq = 1.0 / powf(rope_theta, inv_freq) * (float)seq_idx; float rotary_sign = (neuron_idx > (half_dim - 1) ? -1.0 : 1.0); float q_rot = conversion::to<float>(q[i]) * rotary_sign; @@ -95,6 +96,7 @@ __global__ void apply_rotary_pos_half(T* mixed_query, num_heads, \ head_size, \ total_count, \ + rope_theta, \ max_out_tokens); #ifdef __HIP_PLATFORM_HCC__ @@ -136,6 +138,7 @@ void launch_apply_rotary_pos_emb(T* mixed_query, unsigned offset, unsigned num_heads, unsigned batch, + float rope_theta, cudaStream_t stream, int max_out_tokens) { @@ -176,9 +179,18 @@ void launch_apply_rotary_pos_emb(T* mixed_query, } } -#define INSTANTIATE_LAUNCH_ROTARY_POS_EMB(T) \ - template void launch_apply_rotary_pos_emb<T>( \ - T*, T*, unsigned, unsigned, unsigned, unsigned, unsigned, unsigned, cudaStream_t, int); +#define INSTANTIATE_LAUNCH_ROTARY_POS_EMB(T) \ + template void launch_apply_rotary_pos_emb<T>(T*, \ + T*, \ + unsigned, \ + unsigned, \ + unsigned, \ + unsigned, \ + unsigned, \ + unsigned, \ + float, \ + cudaStream_t, \ + int); INSTANTIATE_LAUNCH_ROTARY_POS_EMB(float); #ifdef BF16_AVAILABLE diff --git a/csrc/transformer/inference/csrc/pt_binding.cpp b/csrc/transformer/inference/csrc/pt_binding.cpp index 634b6e3adbbb..4fd64112e148 100644 --- a/csrc/transformer/inference/csrc/pt_binding.cpp +++ b/csrc/transformer/inference/csrc/pt_binding.cpp @@ -445,7 +445,8 @@ std::vector<at::Tensor> ds_softmax_context(at::Tensor& query_key_value, bool no_masking, unsigned layer_id, unsigned num_layers, - at::Tensor& alibi) + at::Tensor& alibi, + float rope_theta) { unsigned bsz = query_key_value.size(0); unsigned seq_len = query_key_value.size(1); @@ -493,7 +494,8 @@ std::vector<at::Tensor> ds_softmax_context(at::Tensor& query_key_value, rotate_every_two, InferenceContext::Instance().GetCurrentStream(), 3, - InferenceContext::Instance().GetMaxTokenLength()); + InferenceContext::Instance().GetMaxTokenLength(), + rope_theta); if (rotary_dim > 0 && rotate_half) launch_apply_rotary_pos_emb(query_cont, kv_cache, @@ -503,6 +505,7 @@ std::vector<at::Tensor> ds_softmax_context(at::Tensor& query_key_value, (is_prompt ? 0 : soft_len - 1), heads, bsz, + rope_theta, InferenceContext::Instance().GetCurrentStream(), InferenceContext::Instance().GetMaxTokenLength()); @@ -1100,7 +1103,8 @@ at::Tensor ds_linear_layer(at::Tensor& input, bool add_bias, bool do_flash_attn, int num_heads, - bool transposed_mode) + bool transposed_mode, + float rope_theta) { auto input_cont = input.contiguous(); auto options = at::TensorOptions() @@ -1174,7 +1178,8 @@ at::Tensor ds_linear_layer(at::Tensor& input, false, InferenceContext::Instance().GetCurrentStream(), 3, - input.size(1)); + input.size(1), + rope_theta); return at::from_blob(final_output, {3, input.size(0), num_heads, input.size(1), padded_head_size}, options); @@ -1200,7 +1205,8 @@ at::Tensor ds_linear_layer(at::Tensor& input, false, InferenceContext::Instance().GetCurrentStream(), 3, - input.size(1)); + input.size(1), + rope_theta); return at::from_blob( final_output, {3, input.size(0), num_heads, input.size(1), head_size}, options); // return at::from_blob(workspace, {input.size(0) * input.size(1), 3, num_heads, @@ -1847,7 +1853,8 @@ std::vector<at::Tensor> apply_rotary_pos_emb(at::Tensor& mixed_query, unsigned rotary_dim, unsigned offset, unsigned num_heads, - bool rotate_half) + bool rotate_half, + float rope_theta) { auto query_cont = mixed_query.contiguous(); auto key_cont = key_layer.contiguous(); @@ -1865,6 +1872,7 @@ std::vector<at::Tensor> apply_rotary_pos_emb(at::Tensor& mixed_query, offset, num_heads, bsz, + rope_theta, InferenceContext::Instance().GetCurrentStream(), InferenceContext::Instance().GetMaxTokenLength()); else @@ -1876,6 +1884,7 @@ std::vector<at::Tensor> apply_rotary_pos_emb(at::Tensor& mixed_query, offset, num_heads, bsz, + rope_theta, InferenceContext::Instance().GetCurrentStream(), InferenceContext::Instance().GetMaxTokenLength()); return {query_cont, key_cont}; diff --git a/csrc/transformer/inference/csrc/transform.cu b/csrc/transformer/inference/csrc/transform.cu index 0b8bffa643c6..06b29647ab2a 100644 --- a/csrc/transformer/inference/csrc/transform.cu +++ b/csrc/transformer/inference/csrc/transform.cu @@ -32,7 +32,8 @@ __global__ void bias_add_transform_0213(float* output, bool rotate_half, bool rotate_every_two, int head_ext, - int max_out_tokens) + int max_out_tokens, + float rope_theta) { int d0_stride = hidden_dim * seq_length; int d1_stride = hidden_dim; @@ -70,7 +71,7 @@ __global__ void bias_add_transform_0213(float* output, #pragma unroll for (int o = 0; o < 2; o++) { float inv_freq = (float)(((d3 << 1) + o) * 2) / (float)(rotary_dim << 2); - inv_freq = 1.0 / powf(10000.0, inv_freq) * (float)seq_id; + inv_freq = 1.0 / powf(rope_theta, inv_freq) * (float)seq_id; q_f[o].x = (-1.0 * q_f[o].y * sinf(inv_freq) + q_f[o].x * cosf(inv_freq)); q_f[o].y = (q_f[o].x * sinf(inv_freq) + q_f[o].y * cosf(inv_freq)); } @@ -100,7 +101,8 @@ __global__ void bias_add_transform_0213(T* output, // q bool rotate_half, bool rotate_every_two, int head_ext, - int max_out_tokens) + int max_out_tokens, + float rope_theta) { using T2 = typename std::conditional<std::is_same<T, __half>::value, __half2, __nv_bfloat162>::type; @@ -147,7 +149,7 @@ __global__ void bias_add_transform_0213(T* output, // q #pragma unroll for (int o = 0; o < 4; o++) { float inv_freq = (float)(((d3 << 2) + o) * 2) / (float)(rotary_dim << 3); - inv_freq = 1.0 / powf(10000.0, inv_freq) * (float)seq_id; + inv_freq = 1.0 / powf(rope_theta, inv_freq) * (float)seq_id; float q_data[2]; q_data[0] = conversion::to<float>(q_h[o].x); q_data[1] = conversion::to<float>(q_h[o].y); @@ -181,7 +183,8 @@ void launch_bias_add_transform_0213<float>(float* output, bool rotate_every_two, cudaStream_t stream, int trans_count, - int max_out_tokens) + int max_out_tokens, + float rope_theta) { hidden_dim >>= 2; int head_ext = (hidden_dim - 1) / MAX_THREADS + 1; @@ -204,7 +207,8 @@ void launch_bias_add_transform_0213<float>(float* output, rotate_half, rotate_every_two, head_ext, - max_out_tokens); + max_out_tokens, + rope_theta); } template <typename T> @@ -225,7 +229,8 @@ void launch_bias_add_transform_0213(T* output, bool rotate_every_two, cudaStream_t stream, int trans_count, - int max_out_tokens) + int max_out_tokens, + float rope_theta) { hidden_dim >>= 3; int head_ext = 1; // (hidden_dim - 1) / MAX_THREADS + 1; @@ -247,7 +252,8 @@ void launch_bias_add_transform_0213(T* output, rotate_half, rotate_every_two, head_ext, - max_out_tokens); + max_out_tokens, + rope_theta); } #define INSTANTIATE_LAUNCH_BIAS_ADD_TRANSFORM_0213(T) \ @@ -268,7 +274,8 @@ void launch_bias_add_transform_0213(T* output, bool, \ cudaStream_t, \ int, \ - int) + int, \ + float) #ifdef BF16_AVAILABLE INSTANTIATE_LAUNCH_BIAS_ADD_TRANSFORM_0213(__nv_bfloat16); diff --git a/csrc/transformer/inference/includes/inference_cuda_layers.h b/csrc/transformer/inference/includes/inference_cuda_layers.h index 5240ebb1d524..dcc020483687 100644 --- a/csrc/transformer/inference/includes/inference_cuda_layers.h +++ b/csrc/transformer/inference/includes/inference_cuda_layers.h @@ -168,6 +168,7 @@ void launch_apply_rotary_pos_emb(T* mixed_query, unsigned offset, unsigned num_heads, unsigned batch, + float rope_theta, cudaStream_t stream, int max_out_tokens); @@ -207,7 +208,8 @@ void launch_bias_add_transform_0213(T* outputs, bool rotate_every_two, cudaStream_t stream, int trans_count, - int max_out_tokens); + int max_out_tokens, + float rope_theta); template <typename T> void pad_data(T* padded_output, T* output, diff --git a/deepspeed/module_inject/containers/llama.py b/deepspeed/module_inject/containers/llama.py index af99d658017c..f6157e5cdfed 100644 --- a/deepspeed/module_inject/containers/llama.py +++ b/deepspeed/module_inject/containers/llama.py @@ -34,6 +34,7 @@ def create_module(self, config=None): _config.rotate_half = True _config.rotate_every_two = False _config.rotary_dim = self.hidden_size // self.num_attention_heads + _config.rope_theta = self.policy.client_module.self_attn.rope_theta self.module = DeepSpeedGPTInference(_config, mp_group=self.mp_group) return self.module diff --git a/deepspeed/ops/transformer/inference/config.py b/deepspeed/ops/transformer/inference/config.py index 4e29a2137c64..d5aff4f541f7 100644 --- a/deepspeed/ops/transformer/inference/config.py +++ b/deepspeed/ops/transformer/inference/config.py @@ -79,7 +79,8 @@ def __init__(self, transposed_mode=False, use_triton=False, triton_autotune=False, - num_kv=-1): + num_kv=-1, + rope_theta=10000): super(DeepSpeedInferenceConfig, self).__init__(hidden_size, (intermediate_size if intermediate_size > 0 else 4 * hidden_size), heads, num_hidden_layers) @@ -114,6 +115,7 @@ def __init__(self, self.use_triton = use_triton self.triton_autotune = triton_autotune self.num_kv = num_kv + self.rope_theta = rope_theta @classmethod def from_dict(cls, json_object): diff --git a/deepspeed/ops/transformer/inference/op_binding/linear.py b/deepspeed/ops/transformer/inference/op_binding/linear.py index e970b562c6d6..b8decb6dc5ea 100644 --- a/deepspeed/ops/transformer/inference/op_binding/linear.py +++ b/deepspeed/ops/transformer/inference/op_binding/linear.py @@ -31,7 +31,7 @@ def __init__(self, config: DeepSpeedInferenceConfig): except AttributeError: self.linear_func = self.linear_fallback - def linear_fallback(self, input, weight, bias, add_bias, do_flash_attn, num_heads, transpose): + def linear_fallback(self, input, weight, bias, add_bias, do_flash_attn, num_heads, transpose, rope_theta): raise NotImplementedError def forward(self, @@ -44,7 +44,7 @@ def forward(self, external_cache: bool = None, num_layers: int = None): qkv_out = self.linear_func(input, weight, bias, add_bias, do_flash_attn, num_heads, - self.config.transposed_mode) + self.config.transposed_mode, self.config.rope_theta) return qkv_out @staticmethod diff --git a/deepspeed/ops/transformer/inference/op_binding/softmax_context.py b/deepspeed/ops/transformer/inference/op_binding/softmax_context.py index 012399ea1ef3..0dc4e08a3633 100644 --- a/deepspeed/ops/transformer/inference/op_binding/softmax_context.py +++ b/deepspeed/ops/transformer/inference/op_binding/softmax_context.py @@ -23,9 +23,9 @@ def __init__(self, config: DeepSpeedInferenceConfig): except AttributeError: self.softmax_context_func = self.softmax_context_fallback - def softmax_context_fallback(self, query_key_value, attn_mask, rotary_dim, rotate_half, roteate_every_two, heads, - norm_factor, triangular_masking, local_attention, window_size, no_masking, layer_id, - num_layers, alibi): + def softmax_context_fallback(self, query_key_value, attn_mask, rotary_dim, rotate_half, rotate_every_two, heads, + num_kv, norm_factor, triangular_masking, local_attention, window_size, no_masking, + layer_id, num_layers, alibi, rope_theta): raise NotImplementedError def forward(self, query_key_value: torch.Tensor, attn_mask: torch.Tensor, heads: int, num_kv: int, @@ -41,6 +41,7 @@ def forward(self, query_key_value: torch.Tensor, attn_mask: torch.Tensor, heads: output = self.softmax_context_func(query_key_value, attn_mask, self.config.rotary_dim, self.config.rotate_half, self.config.rotate_every_two, heads, num_kv, norm_factor, self.config.triangular_masking, self.config.local_attention, - self.config.window_size, no_masking, layer_id, num_layers, alibi) + self.config.window_size, no_masking, layer_id, num_layers, alibi, + self.config.rope_theta) return output
Fixed bug with CodeLlama. Bug description is here https://github.com/microsoft/DeepSpeed/issues/4442. Now DeepSpeed uses rope_theta from transformers.
https://api.github.com/repos/microsoft/DeepSpeed/pulls/4480
2023-10-09T15:29:15Z
2023-10-19T18:12:46Z
2023-10-19T18:12:46Z
2023-11-09T13:20:16Z
3,928
microsoft/DeepSpeed
10,625
Added US census data api
diff --git a/README.md b/README.md index 25c2eaba64..0f2621fb91 100644 --- a/README.md +++ b/README.md @@ -528,8 +528,10 @@ API | Description | Auth | HTTPS | CORS | | [Open Government, Thailand](https://data.go.th/) | Thailand Government Open Data | `apiKey` | Yes | Unknown | | [Open Government, USA](https://www.data.gov/) | United States Government Open Data | No | Yes | Unknown | | [Represent by Open North](https://represent.opennorth.ca/) | Find Canadian Government Representatives | No | Yes | Unknown | +| [US Census Bureau](https://www.census.gov/data/developers/data-sets.html) | U.S. Census data sets and Geolocation | No | Yes | Unknown | | [USAspending.gov](https://api.usaspending.gov/) | US federal spending data | No | Yes | Unknown | + **[⬆ Back to Index](#index)** ### Health API | Description | Auth | HTTPS | CORS |
<!-- Thank you for taking the time to work on a Pull Request for this project! --> <!-- To ensure your PR is dealt with swiftly please check the following: --> - [x] My submission is formatted according to the guidelines in the [contributing guide](CONTRIBUTING.md) - [x] My addition is ordered alphabetically - [x] My submission has a useful description - [x] The description does not end with punctuation - [x] Each table column is padded with one space on either side - [x] I have searched the repository for any relevant issues or pull requests - [x] Any category I am creating has the minimum requirement of 3 items - [x] All changes have been [squashed][squash-link] into a single commit [squash-link]: <https://github.com/todotxt/todo.txt-android/wiki/Squash-All-Commits-Related-to-a-Single-Issue-into-a-Single-Commit>
https://api.github.com/repos/public-apis/public-apis/pulls/1610
2021-03-31T19:19:55Z
2021-04-02T02:08:24Z
2021-04-02T02:08:24Z
2021-04-02T15:16:48Z
236
public-apis/public-apis
35,663
Add Xwin-LM V0.1, V0.2 support
diff --git a/docs/model_support.md b/docs/model_support.md index 780e11110d..8d801fafcc 100644 --- a/docs/model_support.md +++ b/docs/model_support.md @@ -46,6 +46,7 @@ - [WizardLM/WizardCoder-15B-V1.0](https://huggingface.co/WizardLM/WizardCoder-15B-V1.0) - [HuggingFaceH4/starchat-beta](https://huggingface.co/HuggingFaceH4/starchat-beta) - [HuggingFaceH4/zephyr-7b-alpha](https://huggingface.co/HuggingFaceH4/zephyr-7b-alpha) +- [Xwin-LM/Xwin-LM-7B-V0.1](https://huggingface.co/Xwin-LM/Xwin-LM-70B-V0.1) - Any [EleutherAI](https://huggingface.co/EleutherAI) pythia model such as [pythia-6.9b](https://huggingface.co/EleutherAI/pythia-6.9b) - Any [Peft](https://github.com/huggingface/peft) adapter trained on top of a model above. To activate, must have `peft` in the model path. Note: If diff --git a/fastchat/model/model_adapter.py b/fastchat/model/model_adapter.py index f33d5232d7..72ebfbf640 100644 --- a/fastchat/model/model_adapter.py +++ b/fastchat/model/model_adapter.py @@ -1672,6 +1672,18 @@ def get_default_conv_template(self, model_path: str) -> Conversation: return get_conv_template("zephyr") +class XwinLMAdapter(BaseModelAdapter): + """The model adapter for Xwin-LM V0.1 and V0.2 series of models(e.g., Xwin-LM/Xwin-LM-70B-V0.1)""" + + # use_fast_tokenizer = False + + def match(self, model_path: str): + return "xwin-lm" in model_path.lower() + + def get_default_conv_template(self, model_path: str) -> Conversation: + return get_conv_template("vicuna_v1.1") + + # Note: the registration order matters. # The one registered earlier has a higher matching priority. register_model_adapter(PeftModelAdapter) @@ -1733,6 +1745,7 @@ def get_default_conv_template(self, model_path: str) -> Conversation: register_model_adapter(CodeLlamaAdapter) register_model_adapter(Llama2ChangAdapter) register_model_adapter(ZephyrAdapter) +register_model_adapter(XwinLMAdapter) # After all adapters, try the default base adapter. register_model_adapter(BaseModelAdapter) diff --git a/fastchat/model/model_registry.py b/fastchat/model/model_registry.py index 9f562b846b..22d3013a1e 100644 --- a/fastchat/model/model_registry.py +++ b/fastchat/model/model_registry.py @@ -325,3 +325,15 @@ def get_model_info(name: str) -> ModelInfo: "https://huggingface.co/HuggingFaceH4/zephyr-7b-alpha", "a chatbot fine-tuned from Mistral by Hugging Face", ) +register_model_info( + [ + "Xwin-LM-7B-V0.1", + "Xwin-LM-13B-V0.1", + "Xwin-LM-70B-V0.1", + "Xwin-LM-7B-V0.2", + "Xwin-LM-13B-V0.2", + ], + "Xwin-LM", + "https://github.com/Xwin-LM/Xwin-LM", + "Chat models developed by Xwin-LM team", +)
<!-- Thank you for your contribution! --> <!-- Please add a reviewer to the assignee section when you create a PR. If you don't have the access to it, we will shortly find a reviewer and assign them to your PR. --> ## Why are these changes needed? <!-- Please give a short summary of the change and the problem this solves. --> Add support for Xwin-LM V0.1, V0.2 series of models. ## Related issue number (if applicable) None <!-- For example: "Closes #1234" --> ## Checks - [x] I've run `format.sh` to lint the changes in this PR. - [x] I've included any doc changes needed. - [x] I've made sure the relevant tests are passing (if applicable).
https://api.github.com/repos/lm-sys/FastChat/pulls/2566
2023-10-15T13:51:25Z
2023-10-15T19:37:17Z
2023-10-15T19:37:17Z
2023-10-15T19:37:17Z
859
lm-sys/FastChat
41,499
Check dependencies in Ebook gen script
diff --git a/generate-epub.sh b/generate-epub.sh index d7c21241b8..18690fbb52 100755 --- a/generate-epub.sh +++ b/generate-epub.sh @@ -1,4 +1,4 @@ -#! /usr/bin/env sh +#! /usr/bin/env bash generate_from_stdin() { outfile=$1 @@ -34,6 +34,20 @@ generate () { cat $name.md | generate_from_stdin $name.epub $language } +# Check if depencies exist +check_dependencies () { + for dependency in "${dependencies[@]}" + do + if ! [ -x "$(command -v $dependency)" ]; then + echo "Error: $dependency is not installed." >&2 + exit 1 + fi + done +} + +dependencies=("pandoc") + +check_dependencies generate_with_solutions generate README-ja ja generate README-zh-Hans zh-Hans
Fixes https://github.com/donnemartin/system-design-primer/issues/339 @donnemartin @Skn0tt Please check
https://api.github.com/repos/donnemartin/system-design-primer/pulls/406
2020-04-25T07:19:24Z
2020-07-04T01:24:24Z
2020-07-04T01:24:24Z
2020-07-04T01:24:34Z
231
donnemartin/system-design-primer
36,831
Add scikit-multiflow - stream learning in Python
diff --git a/README.md b/README.md index da7fb015..35621d62 100644 --- a/README.md +++ b/README.md @@ -1068,6 +1068,7 @@ be * [fast.ai](https://github.com/fastai/fastaihttps://github.com/fastai/fastai) - A library simplifies training fast and accurate neural nets using modern best practices and already supports vision, text, tabular, and collab (collaborative filtering) models "out of the box" * [Catalyst](https://github.com/catalyst-team/catalyst) - High-level utils for PyTorch DL & RL research. It was developed with a focus on reproducibility, fast experimentation and code/ideas reusing. Being able to research/develop something new, rather than write another regular train loop. * [Fastai](https://github.com/fastai/fastai) - High-level wrapper built on the top of Pytorch which supports vision, text, tabular data and collaborative filtering. +* [scikit-multiflow](https://github.com/scikit-multiflow/scikit-multiflow) - A machine learning framework for multi-output/multi-label and stream data. <a name="python-data-analysis"></a> #### Data Analysis / Data Visualization
https://api.github.com/repos/josephmisiti/awesome-machine-learning/pulls/655
2019-12-05T00:51:05Z
2020-01-27T14:55:36Z
2020-01-27T14:55:36Z
2020-01-27T14:55:36Z
280
josephmisiti/awesome-machine-learning
51,787
CI Adds skipping to azure pipelines with commit message
diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 1a42c533fb2ee..870c5f0e1d313 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -8,17 +8,11 @@ schedules: always: true jobs: -- job: linting - displayName: Linting +- job: git_commit + displayName: Get Git Commit pool: vmImage: ubuntu-18.04 steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.9' - - bash: | - pip install flake8 mypy==0.782 - displayName: Install linters - bash: | set -ex if [[ $BUILD_REASON == "PullRequest" ]]; then @@ -26,48 +20,53 @@ jobs: # which has a "Merge ID into ID" as a commit message. The latest commit # message is the second to last commit COMMIT_ID=$(echo $BUILD_SOURCEVERSIONMESSAGE | awk '{print $2}') - COMMIT_MESSAGE=$(git log $COMMIT_ID -1 --pretty=%B) + message=$(git log $COMMIT_ID -1 --pretty=%B) else - COMMIT_MESSAGE=$BUILD_SOURCEVERSIONMESSAGE + message=$BUILD_SOURCEVERSIONMESSAGE fi - echo "##vso[task.setvariable variable=COMMIT_MESSAGE]$COMMIT_MESSAGE" + echo "##vso[task.setvariable variable=message;isOutput=true]$message" + name: commit displayName: Get source version message + +- job: linting + dependsOn: [git_commit] + condition: | + and( + succeeded(), + not(contains(dependencies['git_commit']['outputs']['commit.message'], '[lint skip]')), + not(contains(dependencies['git_commit']['outputs']['commit.message'], '[ci skip]')) + ) + displayName: Linting + pool: + vmImage: ubuntu-18.04 + steps: + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.9' - bash: | - set -ex - if [[ "$COMMIT_MESSAGE" =~ "[lint skip]" ]]; then - # skip linting - echo "Skipping flake8 linting" - exit 0 - else - ./build_tools/circle/linting.sh - fi + pip install flake8 mypy==0.782 + displayName: Install linters + - bash: | + ./build_tools/circle/linting.sh displayName: Run linting - bash: | - set -ex - if [[ "$COMMIT_MESSAGE" =~ "[lint skip]" ]]; then - # skip linting - echo "Skipping mypy linting" - exit 0 - else - mypy sklearn/ - fi + mypy sklearn/ displayName: Run mypy - - bash: | - if [[ "$COMMIT_MESSAGE" =~ "[scipy-dev]" ]] || [[ $BUILD_REASON == "Schedule" ]]; then - echo "Running scipy-dev" - echo "##vso[task.setvariable variable=runScipyDev;isOutput=true]true" - else - echo "##vso[task.setvariable variable=runScipyDev;isOutput=true]false" - fi - name: gitCommitMessage - displayName: Determine to run scipy-dev - template: build_tools/azure/posix.yml parameters: name: Linux_Nightly vmImage: ubuntu-18.04 - dependsOn: [linting] - condition: eq(dependencies['linting']['outputs']['gitCommitMessage.runScipyDev'], 'true') + dependsOn: [git_commit, linting] + condition: | + and( + succeeded(), + not(contains(dependencies['git_commit']['outputs']['commit.message'], '[ci skip]')), + or(eq(variables['Build.Reason'], 'Schedule'), + contains(dependencies['git_commit']['outputs']['commit.message'], '[scipy-dev]' + ) + ) + ) matrix: pylatest_pip_scipy_dev: DISTRIB: 'conda-pip-scipy-dev' @@ -84,6 +83,12 @@ jobs: parameters: name: Linux_Runs vmImage: ubuntu-18.04 + dependsOn: [git_commit] + condition: | + and( + succeeded(), + not(contains(dependencies['git_commit']['outputs']['commit.message'], '[ci skip]')) + ) matrix: pylatest_conda_mkl: DISTRIB: 'conda' @@ -95,8 +100,13 @@ jobs: parameters: name: Linux vmImage: ubuntu-18.04 - dependsOn: [linting] - condition: and(ne(variables['Build.Reason'], 'Schedule'), succeeded('linting')) + dependsOn: [linting, git_commit] + condition: | + and( + succeeded(), + not(contains(dependencies['git_commit']['outputs']['commit.message'], '[ci skip]')), + ne(variables['Build.Reason'], 'Schedule') + ) matrix: # Linux environment to test that scikit-learn can be built against # versions of numpy, scipy with ATLAS that comes with Ubuntu Bionic 18.04 @@ -139,8 +149,13 @@ jobs: parameters: name: Linux32 vmImage: ubuntu-18.04 - dependsOn: [linting] - condition: and(ne(variables['Build.Reason'], 'Schedule'), succeeded('linting')) + dependsOn: [linting, git_commit] + condition: | + and( + succeeded(), + not(contains(dependencies['git_commit']['outputs']['commit.message'], '[ci skip]')), + ne(variables['Build.Reason'], 'Schedule') + ) matrix: py36_ubuntu_atlas_32bit: DISTRIB: 'ubuntu-32' @@ -157,8 +172,13 @@ jobs: parameters: name: macOS vmImage: macOS-10.14 - dependsOn: [linting] - condition: and(ne(variables['Build.Reason'], 'Schedule'), succeeded('linting')) + dependsOn: [linting, git_commit] + condition: | + and( + succeeded(), + not(contains(dependencies['git_commit']['outputs']['commit.message'], '[ci skip]')), + ne(variables['Build.Reason'], 'Schedule') + ) matrix: pylatest_conda_forge_mkl: DISTRIB: 'conda' @@ -174,8 +194,13 @@ jobs: parameters: name: Windows vmImage: vs2017-win2016 - dependsOn: [linting] - condition: and(ne(variables['Build.Reason'], 'Schedule'), succeeded('linting')) + dependsOn: [linting, git_commit] + condition: | + and( + succeeded(), + not(contains(dependencies['git_commit']['outputs']['commit.message'], '[ci skip]')), + ne(variables['Build.Reason'], 'Schedule') + ) matrix: py37_conda_mkl: PYTHON_VERSION: '3.7'
This PR adds the ability to use `[ci skip]` in a commit message to skip azure pipelines in pull requests.
https://api.github.com/repos/scikit-learn/scikit-learn/pulls/19134
2021-01-08T02:37:01Z
2021-01-13T08:23:27Z
2021-01-13T08:23:27Z
2021-01-13T08:23:47Z
1,679
scikit-learn/scikit-learn
46,209
Add album art support in the mpd component
diff --git a/homeassistant/components/mpd/media_player.py b/homeassistant/components/mpd/media_player.py index 1e16675f7b62..1273b720dd8e 100644 --- a/homeassistant/components/mpd/media_player.py +++ b/homeassistant/components/mpd/media_player.py @@ -1,5 +1,6 @@ """Support to interact with a Music Player Daemon.""" from datetime import timedelta +import hashlib import logging import os @@ -107,6 +108,7 @@ def __init__(self, server, port, password, name): self._muted_volume = 0 self._media_position_updated_at = None self._media_position = None + self._commands = None # set up MPD client self._client = MPDClient() @@ -163,6 +165,7 @@ async def async_update(self): try: if not self._is_connected: await self._connect() + self._commands = list(await self._client.commands()) await self._fetch_status() except (mpd.ConnectionError, OSError, BrokenPipeError, ValueError) as error: @@ -252,6 +255,56 @@ def media_album_name(self): """Return the album of current playing media (Music track only).""" return self._currentsong.get("album") + @property + def media_image_hash(self): + """Hash value for media image.""" + file = self._currentsong.get("file") + if file: + return hashlib.sha256(file.encode("utf-8")).hexdigest()[:16] + + return None + + async def async_get_media_image(self): + """Fetch media image of current playing track.""" + file = self._currentsong.get("file") + if not file: + return None, None + + # not all MPD implementations and versions support the `albumart` and `fetchpicture` commands + can_albumart = "albumart" in self._commands + can_readpicture = "readpicture" in self._commands + + response = None + + # read artwork embedded into the media file + if can_readpicture: + try: + response = await self._client.readpicture(file) + except mpd.CommandError as error: + _LOGGER.warning( + "Retrieving artwork through `readpicture` command failed: %s", + error, + ) + + # read artwork contained in the media directory (cover.{jpg,png,tiff,bmp}) if none is embedded + if can_albumart and not response: + try: + response = await self._client.albumart(file) + except mpd.CommandError as error: + _LOGGER.warning( + "Retrieving artwork through `albumart` command failed: %s", + error, + ) + + if not response: + return None, None + + image = bytes(response.get("binary")) + mime = response.get( + "type", "image/png" + ) # readpicture has type, albumart does not + return (image, mime) + @property def volume_level(self): """Return the volume level."""
<!-- You are amazing! Thanks for contributing to our project! Please, DO NOT DELETE ANY TEXT from this template! (unless instructed). --> ## Breaking change <!-- If your PR contains a breaking change for existing users, it is important to tell them what breaks, how to make it work again and why we did this. This piece of text is published with the release notes, so it helps if you write it towards our users, not us. Note: Remove this section if this PR is NOT a breaking change. --> None ## Proposed change <!-- Describe the big picture of your changes here to communicate to the maintainers why we should accept this pull request. If it fixes a bug or resolves a feature request, be sure to link to that issue in the additional information section. --> Uses `readpicture` to retrieve embedded artwork and `albumart` to acquire cover art located in the media directory. As the mpd component supports multiple different implementations (think mopidy, PI MusicBox, etc.) we check for the availability of each command before using them. Tested against mpd 0.22.3, which includes support for both. ## Type of change <!-- What type of change does your PR introduce to Home Assistant? NOTE: Please, check only 1! box! If your PR requires multiple boxes to be checked, you'll most likely need to split it into multiple PRs. This makes things easier and faster to code review. --> - [ ] Dependency upgrade - [ ] Bugfix (non-breaking change which fixes an issue) - [ ] New integration (thank you!) - [x] New feature (which adds functionality to an existing integration) - [ ] Breaking change (fix/feature causing existing functionality to break) - [ ] Code quality improvements to existing code or addition of tests ## Example entry for `configuration.yaml`: <!-- Supplying a configuration snippet, makes it easier for a maintainer to test your PR. Furthermore, for new integrations, it gives an impression of how the configuration would look like. Note: Remove this section if this PR does not have an example entry. --> ```yaml # Example configuration.yaml media_players: - platform: mpd host: 127.0.0.1 ``` and a media library that holds either media with embedded artwork or a cover.{jpg,png,bmp,tiff} in the same directory. ## Additional information <!-- Details are important, and help maintainers processing your PR. Please be sure to fill out additional details, if applicable. --> - This PR fixes or closes issue: fixes # - This PR is related to issue: - Link to documentation pull request: ## Checklist <!-- Put an `x` in the boxes that apply. You can also fill these out after creating the PR. If you're unsure about any of them, don't hesitate to ask. We're here to help! This is simply a reminder of what we are going to look for before merging your code. --> - [x] The code change is tested and works locally. - [x] Local tests pass. **Your PR cannot be merged unless tests pass** - [x] There is no commented out code in this PR. - [x] I have followed the [development checklist][dev-checklist] - [x] The code has been formatted using Black (`black --fast homeassistant tests`) - [ ] Tests have been added to verify that the new code works. If user exposed functionality or configuration variables are added/changed: - [ ] Documentation added/updated for [www.home-assistant.io][docs-repository] If the code communicates with devices, web services, or third-party tools: - [x] The [manifest file][manifest-docs] has all fields filled out correctly. Updated and included derived files by running: `python3 -m script.hassfest`. - [x] New or updated dependencies have been added to `requirements_all.txt`. Updated by running `python3 -m script.gen_requirements_all`. - [x] Untested files have been added to `.coveragerc`. The integration reached or maintains the following [Integration Quality Scale][quality-scale]: <!-- The Integration Quality Scale scores an integration on the code quality and user experience. Each level of the quality scale consists of a list of requirements. We highly recommend getting your integration scored! --> - [ ] No score or internal - [ ] 🥈 Silver - [ ] 🥇 Gold - [ ] 🏆 Platinum <!-- This project is very active and we have a high turnover of pull requests. Unfortunately, the number of incoming pull requests is higher than what our reviewers can review and merge so there is a long backlog of pull requests waiting for review. You can help here! By reviewing another pull request, you will help raise the code quality of that pull request and the final review will be faster. This way the general pace of pull request reviews will go up and your wait time will go down. When picking a pull request to review, try to choose one that hasn't yet been reviewed. Thanks for helping out! --> To help with the load of incoming pull requests: - [ ] I have reviewed two other [open pull requests][prs] in this repository. [prs]: https://github.com/home-assistant/core/pulls?q=is%3Aopen+is%3Apr+-author%3A%40me+-draft%3Atrue+-label%3Awaiting-for-upstream+sort%3Acreated-desc+review%3Anone <!-- Thank you for contributing <3 Below, some useful links you could explore: --> [dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html [manifest-docs]: https://developers.home-assistant.io/docs/en/creating_integration_manifest.html [quality-scale]: https://developers.home-assistant.io/docs/en/next/integration_quality_scale_index.html [docs-repository]: https://github.com/home-assistant/home-assistant.io
https://api.github.com/repos/home-assistant/core/pulls/44527
2020-12-25T22:00:17Z
2020-12-26T21:18:28Z
2020-12-26T21:18:28Z
2020-12-28T00:22:34Z
715
home-assistant/core
39,232
Add pre/post execution hooks
diff --git a/airflow/models/baseoperator.py b/airflow/models/baseoperator.py index a1316405b4e91..3feeb5b1e2851 100644 --- a/airflow/models/baseoperator.py +++ b/airflow/models/baseoperator.py @@ -81,6 +81,8 @@ ScheduleInterval = Union[str, timedelta, relativedelta] TaskStateChangeCallback = Callable[[Context], None] +TaskPreExecuteHook = Callable[[Context], None] +TaskPostExecuteHook = Callable[[Context, Any], None] T = TypeVar('T', bound=Callable) @@ -347,6 +349,14 @@ class derived from this one results in the creation of a task object, :param on_success_callback: much like the ``on_failure_callback`` except that it is executed when the task succeeds. :type on_success_callback: TaskStateChangeCallback + :param pre_execute: a function to be called immediately before task + execution, receiving a context dictionary; raising an exception will + prevent the task from being executed. + :type pre_execute: TaskPreExecuteHook + :param post_execute: a function to be called immediately after task + execution, receiving a context dictionary and task result; raising an + exception will prevent the task from succeeding. + :type post_execute: TaskPostExecuteHook :param trigger_rule: defines the rule by which dependencies are applied for the task to get triggered. Options are: ``{ all_success | all_failed | all_done | one_success | @@ -488,6 +498,8 @@ def __init__( on_failure_callback: Optional[TaskStateChangeCallback] = None, on_success_callback: Optional[TaskStateChangeCallback] = None, on_retry_callback: Optional[TaskStateChangeCallback] = None, + pre_execute: Optional[TaskPreExecuteHook] = None, + post_execute: Optional[TaskPostExecuteHook] = None, trigger_rule: str = TriggerRule.ALL_SUCCESS, resources: Optional[Dict] = None, run_as_user: Optional[str] = None, @@ -599,6 +611,8 @@ def __init__( self.on_failure_callback = on_failure_callback self.on_success_callback = on_success_callback self.on_retry_callback = on_retry_callback + self._pre_execute_hook = pre_execute + self._post_execute_hook = post_execute if isinstance(retry_delay, timedelta): self.retry_delay = retry_delay @@ -960,6 +974,8 @@ def global_operator_extra_link_dict(self) -> Dict[str, Any]: @prepare_lineage def pre_execute(self, context: Any): """This hook is triggered right before self.execute() is called.""" + if self._pre_execute_hook is not None: + self._pre_execute_hook(context) def execute(self, context: Any): """ @@ -977,6 +993,8 @@ def post_execute(self, context: Any, result: Any = None): It is passed the execution context and any results returned by the operator. """ + if self._post_execute_hook is not None: + self._post_execute_hook(context, result) def on_kill(self) -> None: """ diff --git a/airflow/operators/subdag.py b/airflow/operators/subdag.py index 62b9b776d6b32..da83bee03ad1f 100644 --- a/airflow/operators/subdag.py +++ b/airflow/operators/subdag.py @@ -156,6 +156,7 @@ def _reset_dag_run_and_task_instances(self, dag_run, execution_date): session.commit() def pre_execute(self, context): + super().pre_execute(context) execution_date = context['execution_date'] dag_run = self._get_dagrun(execution_date) @@ -184,6 +185,7 @@ def poke(self, context): return dag_run.state != State.RUNNING def post_execute(self, context, result=None): + super().post_execute(context) execution_date = context['execution_date'] dag_run = self._get_dagrun(execution_date=execution_date) self.log.info("Execution finished. State is %s", dag_run.state) diff --git a/tests/models/test_baseoperator.py b/tests/models/test_baseoperator.py index ce848db0ed4c7..bf3ea4a7f4481 100644 --- a/tests/models/test_baseoperator.py +++ b/tests/models/test_baseoperator.py @@ -541,6 +541,30 @@ def test_warnings_are_properly_propagated(self): # where the deprecated class was used assert warning.filename == __file__ + def test_pre_execute_hook(self): + called = False + + def hook(context): + nonlocal called + called = True + + op = DummyOperator(task_id="test_task", pre_execute=hook) + op_copy = op.prepare_for_execution() + op_copy.pre_execute({}) + assert called + + def test_post_execute_hook(self): + called = False + + def hook(context, result): + nonlocal called + called = True + + op = DummyOperator(task_id="test_task", post_execute=hook) + op_copy = op.prepare_for_execution() + op_copy.post_execute({}) + assert called + class CustomOp(DummyOperator): template_fields = ("field", "field2") diff --git a/tests/serialization/test_dag_serialization.py b/tests/serialization/test_dag_serialization.py index 826b10f6b3207..c3872165e09b1 100644 --- a/tests/serialization/test_dag_serialization.py +++ b/tests/serialization/test_dag_serialization.py @@ -872,6 +872,8 @@ def test_no_new_fields_added_to_base_operator(self): '_log': base_operator.log, '_outlets': [], '_upstream_task_ids': set(), + '_pre_execute_hook': None, + '_post_execute_hook': None, 'depends_on_past': False, 'do_xcom_push': True, 'doc': None,
This adds optional operator parameters `pre_execute` and `post_execute`, both of which run synchronously immediately before and after task execution. For example, this can be used to skip execution for a particular day of the week: ```python from croniter import match def execute_unless(cron_expr): def hook(context): if match(cron_expr, context["execution_date"]): raise AirflowSkipException() return hook DummyOperator(..., pre_execute=execute_unless("* * * * sat")) ``` Related: #17545
https://api.github.com/repos/apache/airflow/pulls/17576
2021-08-12T12:44:39Z
2021-08-24T08:56:24Z
2021-08-24T08:56:24Z
2021-12-06T01:37:59Z
1,361
apache/airflow
14,758
Add .pre-commit-config.yaml
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3796a397..0f9cfabe 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -6,8 +6,38 @@ on: pull_request: branches: - main + jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Fetch base branch + run: git fetch origin ${{ github.base_ref }} + - uses: actions/setup-python@v4 + with: + python-version: "3.8" + architecture: x64 + - name: Get pip cache dir + id: pip-cache + run: | + echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT + - name: pip/pre-commit cache + uses: actions/cache@v3 + with: + path: | + ${{ steps.pip-cache.outputs.dir }} + ~/.cache/pre-commit + key: ${{ runner.os }}-pip-pre-commit-${{ hashFiles('**/.pre-commit-config.yaml') }} + restore-keys: | + ${{ runner.os }}-pip-pre-commit + - name: pre-commit + run: | + pip install -U pre-commit + pre-commit install --install-hooks + pre-commit run --from-ref=origin/${{ github.base_ref }} --to-ref=HEAD whisper-test: + needs: pre-commit runs-on: ubuntu-latest strategy: matrix: @@ -23,7 +53,4 @@ jobs: - uses: actions/checkout@v3 - run: echo "$CONDA/envs/test/bin" >> $GITHUB_PATH - run: pip install .["dev"] - - run: black --check --diff -t py38 --include '(\.pyi?)$' . - - run: isort --check --diff . - - run: flake8 --ignore E203,W503,W504,E501,E731,E741 . - run: pytest --durations=0 -vv -k 'not test_transcribe or test_transcribe[tiny] or test_transcribe[tiny.en]' -m 'not requires_cuda' diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..3f5a74b6 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,28 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: check-json + - id: end-of-file-fixer + types: [file, python] + - id: trailing-whitespace + types: [file, python] + - id: mixed-line-ending + - id: check-added-large-files + args: [--maxkb=4096] + - repo: https://github.com/psf/black + rev: 23.7.0 + hooks: + - id: black + - repo: https://github.com/pycqa/isort + rev: 5.12.0 + hooks: + - id: isort + name: isort (python) + args: ["--profile", "black", "-l", "88", "--trailing-comma", "--multi-line", "3"] + - repo: https://github.com/pycqa/flake8.git + rev: 6.0.0 + hooks: + - id: flake8 + types: [python] + args: ["--max-line-length", "88", "--ignore", "E203,E501,W503,W504"] diff --git a/whisper/timing.py b/whisper/timing.py index 56e84d43..befcf464 100644 --- a/whisper/timing.py +++ b/whisper/timing.py @@ -202,7 +202,7 @@ def find_alignment( hook.remove() # heads * tokens * frames - weights = torch.stack([QKs[l][h] for l, h in model.alignment_heads.indices().T]) + weights = torch.stack([QKs[_l][_h] for _l, _h in model.alignment_heads.indices().T]) weights = weights[:, :, : num_frames // 2] weights = (weights * qk_scale).softmax(dim=-1) std, mean = torch.std_mean(weights, dim=-2, keepdim=True, unbiased=False) diff --git a/whisper/tokenizer.py b/whisper/tokenizer.py index 4030e15a..3b239918 100644 --- a/whisper/tokenizer.py +++ b/whisper/tokenizer.py @@ -226,7 +226,7 @@ def all_language_tokens(self) -> Tuple[int]: @cached_property def all_language_codes(self) -> Tuple[str]: - return tuple(self.decode([l]).strip("<|>") for l in self.all_language_tokens) + return tuple(self.decode([_l]).strip("<|>") for _l in self.all_language_tokens) @cached_property def sot_sequence_including_notimestamps(self) -> Tuple[int]:
I've created a pull request to run a pre-commit GitHub action explicitly for repetitive linter tasks. - Replace black, isort, flake8 with pre-commit ( .pre-commit-config.yaml ) - After checking coverage for all files using the command below, I've reduced the number of flake8's ignore from E203,W503,W504,E501,E731,E741 to four: E203,E501,W503,W504. ```bash pre-commit run -a ``` - I've tested in https://github.com/kimdwkimdw/whisper/pull/1
https://api.github.com/repos/openai/whisper/pulls/1528
2023-07-17T06:38:33Z
2023-09-18T23:15:34Z
2023-09-18T23:15:34Z
2024-04-13T15:26:21Z
1,217
openai/whisper
45,871
Allow user to select all domains by typing empty string at checklist
diff --git a/certbot/display/util.py b/certbot/display/util.py index 30d604313bf..0ad8fa200c9 100644 --- a/certbot/display/util.py +++ b/certbot/display/util.py @@ -179,9 +179,12 @@ def checklist(self, message, tags, default_status=True, **unused_kwargs): self._print_menu(message, tags) code, ans = self.input("Select the appropriate numbers separated " - "by commas and/or spaces") + "by commas and/or spaces, or leave input " + "blank to select all options shown") if code == OK: + if len(ans.strip()) == 0: + ans = " ".join(str(x) for x in range(1, len(tags)+1)) indices = separate_list_input(ans) selected_tags = self._scrub_checklist_input(indices, tags) if selected_tags: diff --git a/certbot/tests/display/util_test.py b/certbot/tests/display/util_test.py index b04235bd984..fa1cb89ba12 100644 --- a/certbot/tests/display/util_test.py +++ b/certbot/tests/display/util_test.py @@ -79,6 +79,13 @@ def test_checklist_valid(self, mock_input): self.assertEqual( (code, set(tag_list)), (display_util.OK, set(["tag1", "tag2"]))) + @mock.patch("certbot.display.util.FileDisplay.input") + def test_checklist_empty(self, mock_input): + mock_input.return_value = (display_util.OK, "") + code, tag_list = self.displayer.checklist("msg", TAGS) + self.assertEqual( + (code, set(tag_list)), (display_util.OK, set(["tag1", "tag2", "tag3"]))) + @mock.patch("certbot.display.util.FileDisplay.input") def test_checklist_miss_valid(self, mock_input): mock_input.side_effect = [
Addresses #3629, as a follow-up to #3665.
https://api.github.com/repos/certbot/certbot/pulls/3693
2016-10-26T01:26:51Z
2016-10-26T22:43:40Z
2016-10-26T22:43:40Z
2016-12-08T01:14:37Z
438
certbot/certbot
2,681
3 useful scripts added to the repo
diff --git a/cartesian_product.py b/cartesian_product.py new file mode 100644 index 0000000000..7ed49aae29 --- /dev/null +++ b/cartesian_product.py @@ -0,0 +1,25 @@ +"""Cartesian Product of Two Lists.""" + +# Import +from itertools import product + + +# Cartesian Product of Two Lists +def cartesian_product(list1, list2): + """Cartesian Product of Two Lists.""" + for _i in list1: + for _j in list2: + print((_i, _j), end=' ') + + +# Main +if __name__ == '__main__': + list1 = input().split() + list2 = input().split() + + # Convert to ints + list1 = [int(i) for i in list1] + list2 = [int(i) for i in list2] + + cartesian_product(list1, list2) + diff --git a/rangoli.py b/rangoli.py new file mode 100644 index 0000000000..75191e0854 --- /dev/null +++ b/rangoli.py @@ -0,0 +1,45 @@ +"""Rangoli Model""" + + +# Prints a rangoli of size n +def print_rangoli(n): + """Prints a rangoli of size n""" + # Width of the rangoli + width = 4 * n - 3 + + # String to be printed + string = "" + + # Loop to print the rangoli + for i in range(1, n + 1): + for j in range(0, i): + string += chr(96 + n - j) + if len(string) < width: + string += "-" + + for k in range(i - 1, 0, -1): + string += chr(97 + n - k) + if len(string) < width: + string += "-" + + print(string.center(width, "-")) + string = "" + + for i in range(n - 1, 0, -1): + for j in range(0, i): + string += chr(96 + n - j) + if len(string) < width: + string += "-" + + for k in range(i - 1, 0, -1): + string += chr(97 + n - k) + if len(string) < width: + string += "-" + + print(string.center(width, "-")) + string = "" + + +if __name__ == '__main__': + n = int(input()) + print_rangoli(n) diff --git a/time_delta.py b/time_delta.py new file mode 100644 index 0000000000..9b153fd970 --- /dev/null +++ b/time_delta.py @@ -0,0 +1,67 @@ +"""Time Delta Solution """ + + +# ----------------------------------------------------------------------------- +# You are givent two timestams in the format: Day dd Mon yyyy hh:mm:ss +xxxx +# where +xxxx represents the timezone. + +# Input Format: +# The first line contains T, the number of test cases. +# Each test case contains two lines, representing the t1 and t2 timestamps. + +# Constraints: +# input contains only valid timestamps. +# year is < 3000. + +# Output Format: +# Print the absoulte diffrence (t2 - t1) in seconds. + +# Sample Input: +# 2 +# Sun 10 May 2015 13:54:36 -0700 +# Sun 10 May 2015 13:54:36 -0000 +# Sat 02 May 2015 19:54:36 +0530 +# Fri 01 May 2015 13:54:36 -0000 + +# Sample Output: +# 25200 +# 88200 +#------------------------------------------------------------------------------ + +# Imports +import math +import os +import random +import re +import sys +import datetime + +# Complete the time_delta function below. +def time_delta(t1, t2): + """ + Calculate the time delta between two timestamps in seconds. + """ + # Convert the timestamps to datetime objects + t1 = datetime.datetime.strptime(t1, '%a %d %b %Y %H:%M:%S %z') + t2 = datetime.datetime.strptime(t2, '%a %d %b %Y %H:%M:%S %z') + + return (t1 - t2) + + + +if __name__ == '__main__': + + t = int(input()) + + for itr_t in range(t): + t1 = input() + + t2 = input() + + delta = time_delta(t1, t2) + # print Delta with 1 Decimal Place + print(round(delta.total_seconds(), 1)) + + + +
3 useful scripts added to the repo: 1-create rangoli art, time delta between…een 2 events based on UTC, Caretesian seri Calculation
https://api.github.com/repos/geekcomputers/Python/pulls/1603
2022-07-11T11:20:24Z
2022-07-16T09:04:25Z
2022-07-16T09:04:25Z
2022-07-19T09:41:23Z
1,106
geekcomputers/Python
31,340
[doc] fix requirements typo for issue #3125
diff --git a/examples/language/gpt/experiments/auto_parallel/requirements.txt b/examples/language/gpt/experiments/auto_parallel/requirements.txt index ff046ad1cae9..1b2561f098d5 100644 --- a/examples/language/gpt/experiments/auto_parallel/requirements.txt +++ b/examples/language/gpt/experiments/auto_parallel/requirements.txt @@ -1,4 +1,4 @@ colossalai >= 0.1.12 torch >= 1.8.1 -transformers >= 4.231 +transformers >= 4.23.1 PuLP >= 2.7.0
## 📌 Checklist before creating the PR - [x] I have created an issue for this PR for traceability - [x] The title follows the standard format: `[doc/gemini/tensor/...]: A concise description` - [x] I have added relevant tags if possible for us to better distinguish different PRs ## 🚨 Issue number > Link this PR to your issue with words like fixed to automatically close the linked issue upon merge > > fixed #3125 ## 📝 What does this PR do? fix the typo in requirements.txt: `transformers >= 4.231` to `transformers >= 4.23.1` ## 💥 Checklist before requesting a review - [x] I have linked my PR to an issue ([instruction](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue)) - [x] My issue clearly describes the problem/feature/proposal, with diagrams/charts/table/code if possible - [x] I have performed a self-review of my code - [ ] I have added thorough tests. - [ ] I have added docstrings for all the functions/methods I implemented ## ⭐️ Do you enjoy contributing to Colossal-AI? - [x] 🌝 Yes, I do. - [ ] 🌚 No, I don't. Tell us more if you don't enjoy contributing to Colossal-AI.
https://api.github.com/repos/hpcaitech/ColossalAI/pulls/3209
2023-03-22T15:51:27Z
2023-03-23T02:22:08Z
2023-03-23T02:22:08Z
2023-03-23T02:22:09Z
135
hpcaitech/ColossalAI
11,582
map ui: Fixed ETA text alignment issue in CJK
diff --git a/selfdrive/ui/qt/maps/map_eta.cc b/selfdrive/ui/qt/maps/map_eta.cc index 4262258cfbd38b..13e11e5597aed0 100644 --- a/selfdrive/ui/qt/maps/map_eta.cc +++ b/selfdrive/ui/qt/maps/map_eta.cc @@ -47,7 +47,7 @@ void MapETA::updateETA(float s, float s_typical, float d) { auto distance = std::array{QString::number(num, 'f', num < 100 ? 1 : 0), uiState()->scene.is_metric ? tr("km") : tr("mi")}; - eta_doc.setHtml(QString(R"(<body><table><tr><td><b>%1</b></td><td>%2</td> + eta_doc.setHtml(QString(R"(<body><table><tr style="vertical-align:bottom;"><td><b>%1</b></td><td>%2</td> <td style="padding-left:40px;color:%3;"><b>%4</b></td><td style="padding-right:40px;color:%3;">%5</td> <td><b>%6</b></td><td>%7</td></tr></body>)") .arg(eta[0], eta[1], color, remaining[0], remaining[1], distance[0], distance[1]));
ETA section text seems out of alignment when text is translated into CJK. English: ![old_eng](https://github.com/commaai/openpilot/assets/16603033/b7ae9df4-5d70-473a-82ac-b05ef5a13a20) CHT: ![old_cht](https://github.com/commaai/openpilot/assets/16603033/94cdb8ea-babd-4ae8-8703-cf65876600d0) Japanese: ![old_jp](https://github.com/commaai/openpilot/assets/16603033/726d96ba-fe59-45c7-8433-0a8983bc8f21) Korean: ![old_kr](https://github.com/commaai/openpilot/assets/16603033/693f4e82-8c15-4aaf-93f0-087dfe4479e7) -------- Adding vertical-align: bottom fixed the issue a little (not perfect, but good enough for OCD): English: ![new_eng](https://github.com/commaai/openpilot/assets/16603033/ea7cdde6-7f8c-4e40-b849-bef6a65d9b14) CHT: ![new_cht](https://github.com/commaai/openpilot/assets/16603033/6f30d8c7-d5ba-45a2-aa86-f69dc8390ccb) Japanese: ![new_jp](https://github.com/commaai/openpilot/assets/16603033/60b0dd47-012e-4da6-a5bd-131ac2ce81f8) Korean: ![new_kr](https://github.com/commaai/openpilot/assets/16603033/37385265-cfae-409c-9206-5becc28db811)
https://api.github.com/repos/commaai/openpilot/pulls/29311
2023-08-10T04:11:48Z
2023-08-10T09:15:44Z
2023-08-10T09:15:44Z
2023-12-11T08:27:11Z
303
commaai/openpilot
8,872
⬆️ Upgrade Starlette version, support new `lifespan` with state
diff --git a/docs/en/docs/advanced/events.md b/docs/en/docs/advanced/events.md index 556bbde71d60a..6b7de41309bbe 100644 --- a/docs/en/docs/advanced/events.md +++ b/docs/en/docs/advanced/events.md @@ -138,9 +138,6 @@ Here, the `shutdown` event handler function will write a text line `"Application So, we declare the event handler function with standard `def` instead of `async def`. -!!! info - You can read more about these event handlers in <a href="https://www.starlette.io/events/" class="external-link" target="_blank">Starlette's Events' docs</a>. - ### `startup` and `shutdown` together There's a high chance that the logic for your *startup* and *shutdown* is connected, you might want to start something and then finish it, acquire a resource and then release it, etc. @@ -155,6 +152,11 @@ Just a technical detail for the curious nerds. 🤓 Underneath, in the ASGI technical specification, this is part of the <a href="https://asgi.readthedocs.io/en/latest/specs/lifespan.html" class="external-link" target="_blank">Lifespan Protocol</a>, and it defines events called `startup` and `shutdown`. +!!! info + You can read more about the Starlette `lifespan` handlers in <a href="https://www.starlette.io/lifespan/" class="external-link" target="_blank">Starlette's Lifespan' docs</a>. + + Including how to handle lifespan state that can be used in other areas of your code. + ## Sub Applications 🚨 Have in mind that these lifespan events (startup and shutdown) will only be executed for the main application, not for [Sub Applications - Mounts](./sub-applications.md){.internal-link target=_blank}. diff --git a/fastapi/applications.py b/fastapi/applications.py index e864c4907c3ad..3305259365295 100644 --- a/fastapi/applications.py +++ b/fastapi/applications.py @@ -1,7 +1,6 @@ from enum import Enum from typing import ( Any, - AsyncContextManager, Awaitable, Callable, Coroutine, @@ -42,7 +41,7 @@ from starlette.requests import Request from starlette.responses import HTMLResponse, JSONResponse, Response from starlette.routing import BaseRoute -from starlette.types import ASGIApp, Receive, Scope, Send +from starlette.types import ASGIApp, Lifespan, Receive, Scope, Send class FastAPI(Starlette): @@ -72,7 +71,7 @@ def __init__( ] = None, on_startup: Optional[Sequence[Callable[[], Any]]] = None, on_shutdown: Optional[Sequence[Callable[[], Any]]] = None, - lifespan: Optional[Callable[["FastAPI"], AsyncContextManager[Any]]] = None, + lifespan: Optional[Lifespan] = None, terms_of_service: Optional[str] = None, contact: Optional[Dict[str, Union[str, Any]]] = None, license_info: Optional[Dict[str, Union[str, Any]]] = None, diff --git a/fastapi/routing.py b/fastapi/routing.py index 5a618e4dedd21..7e48c8c3ecde3 100644 --- a/fastapi/routing.py +++ b/fastapi/routing.py @@ -7,7 +7,6 @@ from enum import Enum, IntEnum from typing import ( Any, - AsyncContextManager, Callable, Coroutine, Dict, @@ -58,7 +57,7 @@ websocket_session, ) from starlette.status import WS_1008_POLICY_VIOLATION -from starlette.types import ASGIApp, Scope +from starlette.types import ASGIApp, Lifespan, Scope from starlette.websockets import WebSocket @@ -493,7 +492,7 @@ def __init__( route_class: Type[APIRoute] = APIRoute, on_startup: Optional[Sequence[Callable[[], Any]]] = None, on_shutdown: Optional[Sequence[Callable[[], Any]]] = None, - lifespan: Optional[Callable[[Any], AsyncContextManager[Any]]] = None, + lifespan: Optional[Lifespan] = None, deprecated: Optional[bool] = None, include_in_schema: bool = True, generate_unique_id_function: Callable[[APIRoute], str] = Default( diff --git a/pyproject.toml b/pyproject.toml index 3e651ae36497f..5b9d002767215 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,7 @@ classifiers = [ "Topic :: Internet :: WWW/HTTP", ] dependencies = [ - "starlette>=0.25.0,<0.26.0", + "starlette>=0.26.0,<0.27.0", "pydantic >=1.6.2,!=1.7,!=1.7.1,!=1.7.2,!=1.7.3,!=1.8,!=1.8.1,<2.0.0", ] dynamic = ["version"]
⬆️ Upgrade Starlette version, support new `lifespan` with state. I'm not updating the docs much to include information about the new state because I want to improve how lifespan/global dependencies would work, and the two things would be used for similar use cases, but dependencies would be the most natural in FastAPI.
https://api.github.com/repos/tiangolo/fastapi/pulls/9239
2023-03-10T18:16:08Z
2023-03-10T18:24:04Z
2023-03-10T18:24:04Z
2023-03-10T18:24:06Z
1,191
tiangolo/fastapi
22,902
User agents are "normalized" to lower case, so make it "openbsd ftp" …
diff --git a/bin/srv.py b/bin/srv.py index fe5cb258..b44f9344 100644 --- a/bin/srv.py +++ b/bin/srv.py @@ -49,7 +49,7 @@ def is_html_needed(user_agent): """ Basing on `user_agent`, return whether it needs HTML or ANSI """ - plaintext_clients = ['curl', 'wget', 'fetch', 'httpie', 'lwp-request', 'OpenBSD ftp', 'python-requests'] + plaintext_clients = ['curl', 'wget', 'fetch', 'httpie', 'lwp-request', 'openbsd ftp', 'python-requests'] return all([x not in user_agent for x in plaintext_clients]) def is_result_a_script(query):
…instead.
https://api.github.com/repos/chubin/cheat.sh/pulls/103
2018-08-17T16:16:20Z
2018-08-17T20:12:18Z
2018-08-17T20:12:18Z
2018-08-17T20:12:18Z
170
chubin/cheat.sh
15,110
Explicit `git clone` master
diff --git a/utils/aws/userdata.sh b/utils/aws/userdata.sh index 890606b76a0..5846fedb16f 100644 --- a/utils/aws/userdata.sh +++ b/utils/aws/userdata.sh @@ -7,7 +7,7 @@ cd home/ubuntu if [ ! -d yolov5 ]; then echo "Running first-time script." # install dependencies, download COCO, pull Docker - git clone https://github.com/ultralytics/yolov5 && sudo chmod -R 777 yolov5 + git clone https://github.com/ultralytics/yolov5 -b master && sudo chmod -R 777 yolov5 cd yolov5 bash data/scripts/get_coco.sh && echo "Data done." & sudo docker pull ultralytics/yolov5:latest && echo "Docker done." &
## 🛠️ PR Summary <sub>Made with ❤️ by [Ultralytics Actions](https://github.com/ultralytics/actions)<sub> ### 🌟 Summary Optimization of initial setup process for YOLOv5 on AWS. ### 📊 Key Changes - Modified the cloning command for the YOLOv5 repository in the AWS user data script. ### 🎯 Purpose & Impact - Ensures that the latest master branch is specifically cloned during initial AWS environment setup. - Prevents potential issues from default branch changes or from cloning other branches unintentionally. - Provides users with a more reliable and predictable setup experience when deploying YOLOv5 on AWS instances. 🚀
https://api.github.com/repos/ultralytics/yolov5/pulls/3311
2021-05-24T10:42:07Z
2021-05-24T10:42:36Z
2021-05-24T10:42:36Z
2024-01-19T18:12:16Z
194
ultralytics/yolov5
24,867
Fix OpenWeatherMap dewpoint conversion
diff --git a/homeassistant/components/openweathermap/weather_update_coordinator.py b/homeassistant/components/openweathermap/weather_update_coordinator.py index 73edc9fae7567b..385e7a9dca42cf 100644 --- a/homeassistant/components/openweathermap/weather_update_coordinator.py +++ b/homeassistant/components/openweathermap/weather_update_coordinator.py @@ -18,10 +18,10 @@ ATTR_FORECAST_WIND_BEARING, ATTR_FORECAST_WIND_SPEED, ) -from homeassistant.const import TEMP_CELSIUS from homeassistant.helpers import sun from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.util import dt +from homeassistant.util.temperature import kelvin_to_celsius from .const import ( ATTR_API_CLOUDS, @@ -180,10 +180,10 @@ def _convert_forecast(self, entry): return forecast - def _fmt_dewpoint(self, dewpoint): + @staticmethod + def _fmt_dewpoint(dewpoint): if dewpoint is not None: - dewpoint = dewpoint - 273.15 - return round(self.hass.config.units.temperature(dewpoint, TEMP_CELSIUS), 1) + return round(kelvin_to_celsius(dewpoint), 1) return None @staticmethod
<!-- You are amazing! Thanks for contributing to our project! Please, DO NOT DELETE ANY TEXT from this template! (unless instructed). --> ## Breaking change <!-- If your PR contains a breaking change for existing users, it is important to tell them what breaks, how to make it work again and why we did this. This piece of text is published with the release notes, so it helps if you write it towards our users, not us. Note: Remove this section if this PR is NOT a breaking change. --> ## Proposed change <!-- Describe the big picture of your changes here to communicate to the maintainers why we should accept this pull request. If it fixes a bug or resolves a feature request, be sure to link to that issue in the additional information section. --> The OpenWeatherMap integration was double converting the dew point when Home Assistant is configured in imperial units. This is my first attempt at a PR to Home Assistant, apologies if I made any egregious mistakes. ## Type of change <!-- What type of change does your PR introduce to Home Assistant? NOTE: Please, check only 1! box! If your PR requires multiple boxes to be checked, you'll most likely need to split it into multiple PRs. This makes things easier and faster to code review. --> - [ ] Dependency upgrade - [x] Bugfix (non-breaking change which fixes an issue) - [ ] New integration (thank you!) - [ ] New feature (which adds functionality to an existing integration) - [ ] Breaking change (fix/feature causing existing functionality to break) - [ ] Code quality improvements to existing code or addition of tests ## Additional information <!-- Details are important, and help maintainers processing your PR. Please be sure to fill out additional details, if applicable. --> - This PR fixes or closes issue: fixes #56033 - This PR is related to issue: - Link to documentation pull request: ## Checklist <!-- Put an `x` in the boxes that apply. You can also fill these out after creating the PR. If you're unsure about any of them, don't hesitate to ask. We're here to help! This is simply a reminder of what we are going to look for before merging your code. --> - [x] The code change is tested and works locally. - [x] Local tests pass. **Your PR cannot be merged unless tests pass** - [x] There is no commented out code in this PR. - [x] I have followed the [development checklist][dev-checklist] - [x] The code has been formatted using Black (`black --fast homeassistant tests`) - [ ] Tests have been added to verify that the new code works. If user exposed functionality or configuration variables are added/changed: - [ ] Documentation added/updated for [www.home-assistant.io][docs-repository] If the code communicates with devices, web services, or third-party tools: - [ ] The [manifest file][manifest-docs] has all fields filled out correctly. Updated and included derived files by running: `python3 -m script.hassfest`. - [ ] New or updated dependencies have been added to `requirements_all.txt`. Updated by running `python3 -m script.gen_requirements_all`. - [ ] For the updated dependencies - a link to the changelog, or at minimum a diff between library versions is added to the PR description. - [ ] Untested files have been added to `.coveragerc`. The integration reached or maintains the following [Integration Quality Scale][quality-scale]: <!-- The Integration Quality Scale scores an integration on the code quality and user experience. Each level of the quality scale consists of a list of requirements. We highly recommend getting your integration scored! --> - [ ] No score or internal - [ ] 🥈 Silver - [ ] 🥇 Gold - [ ] 🏆 Platinum <!-- This project is very active and we have a high turnover of pull requests. Unfortunately, the number of incoming pull requests is higher than what our reviewers can review and merge so there is a long backlog of pull requests waiting for review. You can help here! By reviewing another pull request, you will help raise the code quality of that pull request and the final review will be faster. This way the general pace of pull request reviews will go up and your wait time will go down. When picking a pull request to review, try to choose one that hasn't yet been reviewed. Thanks for helping out! --> To help with the load of incoming pull requests: - [ ] I have reviewed two other [open pull requests][prs] in this repository. [prs]: https://github.com/home-assistant/core/pulls?q=is%3Aopen+is%3Apr+-author%3A%40me+-draft%3Atrue+-label%3Awaiting-for-upstream+sort%3Acreated-desc+review%3Anone <!-- Thank you for contributing <3 Below, some useful links you could explore: --> [dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html [manifest-docs]: https://developers.home-assistant.io/docs/en/creating_integration_manifest.html [quality-scale]: https://developers.home-assistant.io/docs/en/next/integration_quality_scale_index.html [docs-repository]: https://github.com/home-assistant/home-assistant.io
https://api.github.com/repos/home-assistant/core/pulls/56303
2021-09-16T19:35:13Z
2021-09-23T17:14:16Z
2021-09-23T17:14:15Z
2021-09-24T18:01:52Z
295
home-assistant/core
39,150
[AIRFLOW-2227] Add delete method to Variable class
diff --git a/airflow/bin/cli.py b/airflow/bin/cli.py index bca37bfa6f89b..9c264386cda97 100644 --- a/airflow/bin/cli.py +++ b/airflow/bin/cli.py @@ -342,8 +342,7 @@ def variables(args): except ValueError as e: print(e) if args.delete: - with db.create_session() as session: - session.query(Variable).filter_by(key=args.delete).delete() + Variable.delete(args.delete) if args.set: Variable.set(args.set[0], args.set[1]) # Work around 'import' as a reserved keyword diff --git a/airflow/models/__init__.py b/airflow/models/__init__.py index f4524ebb44620..3e749c5057762 100755 --- a/airflow/models/__init__.py +++ b/airflow/models/__init__.py @@ -4311,10 +4311,15 @@ def set(cls, key, value, serialize_json=False, session=None): else: stored_value = str(value) - session.query(cls).filter(cls.key == key).delete() + Variable.delete(key) session.add(Variable(key=key, val=stored_value)) session.flush() + @classmethod + @provide_session + def delete(cls, key, session=None): + session.query(cls).filter(cls.key == key).delete() + def rotate_fernet_key(self): fernet = get_fernet() if self._val and self.is_encrypted: diff --git a/tests/core.py b/tests/core.py index c42fe4b906eff..6860ecd93be5c 100644 --- a/tests/core.py +++ b/tests/core.py @@ -745,6 +745,24 @@ def test_variable_setdefault_existing_json(self): self.assertEqual(value, val) self.assertEqual(value, Variable.get(key, deserialize_json=True)) + def test_variable_delete(self): + key = "tested_var_delete" + value = "to be deleted" + + # No-op if the variable doesn't exist + Variable.delete(key) + with self.assertRaises(KeyError): + Variable.get(key) + + # Set the variable + Variable.set(key, value) + self.assertEqual(value, Variable.get(key)) + + # Delete the variable + Variable.delete(key) + with self.assertRaises(KeyError): + Variable.get(key) + def test_parameterized_config_gen(self): cfg = configuration.parameterized_config(configuration.DEFAULT_CONFIG)
Make sure you have checked _all_ steps below. ### Jira - [x] My PR addresses the following [Airflow Jira](https://issues.apache.org/jira/browse/AIRFLOW/) issues and references them in the PR title. - https://issues.apache.org/jira/browse/AIRFLOW-2227 ### Description - [x] Here are some details about my PR, including screenshots of any UI changes: Add a delete method to the Variable class that no-ops if the Variable already does not exist. Additionally, refactor usages of `session.query(Variable).filter(key).delete()` to use the new `Variable.delete(key)` instead. ### Tests - [x] My PR adds the following unit tests __OR__ does not need testing for this extremely good reason: - `tests.core:CoreTest.test_variable_delete` ### Commits - [x] My commits all reference Jira issues in their subject lines, and I have squashed multiple commits if they address the same issue. In addition, my commits follow the guidelines from "[How to write a good git commit message](http://chris.beams.io/posts/git-commit/)": 1. Subject is separated from body by a blank line 1. Subject is limited to 50 characters (not including Jira issue reference) 1. Subject does not end with a period 1. Subject uses the imperative mood ("add", not "adding") 1. Body wraps at 72 characters 1. Body explains "what" and "why", not "how" ### Documentation - [x] In case of new functionality, my PR adds documentation that describes how to use it. - When adding new operators/hooks/sensors, the autoclass documentation generation needs to be added. - All the public functions and the classes in the PR contain docstrings that explain what it does - If you implement backwards incompatible changes, please leave a note in the [Updating.md](https://github.com/apache/airflow/blob/master/UPDATING.md) so we can assign it to a appropriate release ### Code Quality - [x] Passes `flake8`
https://api.github.com/repos/apache/airflow/pulls/4963
2019-03-23T20:06:07Z
2019-03-26T13:21:42Z
2019-03-26T13:21:42Z
2019-03-26T13:21:42Z
572
apache/airflow
14,895
Fix a couple small shellcheck errors
diff --git a/thefuck/shells/bash.py b/thefuck/shells/bash.py index d8a53180a..df972380c 100644 --- a/thefuck/shells/bash.py +++ b/thefuck/shells/bash.py @@ -20,8 +20,8 @@ def app_alias(self, alias_name): export TF_HISTORY=$(fc -ln -10); export PYTHONIOENCODING=utf-8; TF_CMD=$( - thefuck {argument_placeholder} $@ - ) && eval $TF_CMD; + thefuck {argument_placeholder} "$@" + ) && eval "$TF_CMD"; unset TF_HISTORY; export PYTHONIOENCODING=$TF_PYTHONIOENCODING; {alter_history} @@ -79,7 +79,7 @@ def how_to_configure(self): config = 'bash config' return self._create_shell_configuration( - content=u'eval $(thefuck --alias)', + content=u'eval "$(thefuck --alias)"', path=config, reload=u'source {}'.format(config)) diff --git a/thefuck/shells/generic.py b/thefuck/shells/generic.py index ddd868aed..d7a936cb5 100644 --- a/thefuck/shells/generic.py +++ b/thefuck/shells/generic.py @@ -34,8 +34,8 @@ def to_shell(self, command_script): return command_script def app_alias(self, alias_name): - return "alias {0}='eval $(TF_ALIAS={0} PYTHONIOENCODING=utf-8 " \ - "thefuck $(fc -ln -1))'".format(alias_name) + return """alias {0}='eval "$(TF_ALIAS={0} PYTHONIOENCODING=utf-8 """ \ + """thefuck "$(fc -ln -1)")"'""".format(alias_name) def instant_mode_alias(self, alias_name): warn("Instant mode not supported by your shell")
These are unlikely to cause practical issues but after bootstrapping with `fuck; fuck` shellcheck gave me an error in my bashrc.
https://api.github.com/repos/nvbn/thefuck/pulls/915
2019-05-19T02:37:41Z
2019-05-22T18:22:10Z
2019-05-22T18:22:10Z
2019-05-23T16:37:53Z
443
nvbn/thefuck
30,637
fix language detection in `TrackedTextarea`
diff --git a/website/src/components/Survey/TrackedTextarea.tsx b/website/src/components/Survey/TrackedTextarea.tsx index 2096f66451..9a35725b88 100644 --- a/website/src/components/Survey/TrackedTextarea.tsx +++ b/website/src/components/Survey/TrackedTextarea.tsx @@ -3,8 +3,8 @@ import { Progress, Stack, Textarea, TextareaProps, useColorModeValue } from "@ch import lande from "lande"; import { useTranslation } from "next-i18next"; import React from "react"; -import { useCookies } from "react-cookie"; import TextareaAutosize, { TextareaAutosizeProps } from "react-textarea-autosize"; +import { useCurrentLocale } from "src/hooks/locale/useCurrentLocale"; import { LanguageAbbreviations } from "src/lib/iso6393"; import { getLocaleDisplayName } from "src/lib/languages"; import { colors } from "src/styles/Theme/colors"; @@ -24,8 +24,7 @@ export const TrackedTextarea = (props: TrackedTextboxProps) => { const { t } = useTranslation("tasks"); const wordLimitForLangDetection = 4; const backgroundColor = useColorModeValue("gray.100", "gray.900"); - const [cookies] = useCookies(["NEXT_LOCALE"]); - const currentLanguage = cookies["NEXT_LOCALE"]; + const currentLanguage = useCurrentLocale(); const wordCount = (props.text.match(/\w+/g) || []).length; const detectLang = (text: string) => {
https://api.github.com/repos/LAION-AI/Open-Assistant/pulls/1796
2023-02-22T10:55:14Z
2023-02-22T11:16:34Z
2023-02-22T11:16:34Z
2023-02-22T11:16:35Z
361
LAION-AI/Open-Assistant
37,152
remove rest of pylint
diff --git a/.github/workflows/selfdrive_tests.yaml b/.github/workflows/selfdrive_tests.yaml index 361329e93f4360..5d4f4c3e8f33bf 100644 --- a/.github/workflows/selfdrive_tests.yaml +++ b/.github/workflows/selfdrive_tests.yaml @@ -62,7 +62,6 @@ jobs: run: | cd $GITHUB_WORKSPACE cp .pre-commit-config.yaml $STRIPPED_DIR - cp .pylintrc $STRIPPED_DIR cp mypy.ini $STRIPPED_DIR cp pyproject.toml $STRIPPED_DIR cp poetry.lock $STRIPPED_DIR diff --git a/.pylintrc b/.pylintrc deleted file mode 100644 index db61b8703d533a..00000000000000 --- a/.pylintrc +++ /dev/null @@ -1,469 +0,0 @@ -[MASTER] - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code -extension-pkg-whitelist=scipy,cereal.messaging.messaging_pyx,PyQt5,av,pycurl - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=CVS - -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. -jobs=4 - -# List of plugins (as comma separated values of python modules names) to load, -# usually to register additional checkers. -load-plugins= - -# Pickle collected data for later comparisons. -persistent=yes - -# Specify a configuration file. -#rcfile= - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages -suggestion-mode=yes - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once).You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use"--disable=all --enable=classes -# --disable=W" -disable=C,R,W0613,W0511,W0212,W0201,W0106,W0603,W0621,W0703,W1201,W1203,E1136,W1514 - - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member,C0301 - - -[REPORTS] - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio).You can also give a reporter class, eg -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages -reports=no - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=optparse.Values,sys.exit - - -[LOGGING] - -# Logging modules to check that the string format arguments are in logging -# function parameter format -logging-modules=logging - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes -max-spelling-suggestions=4 - -# Spelling dictionary name. Available dictionaries: none. To make it working -# install python-enchant package. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to indicated private dictionary in -# --spelling-private-dict-file option instead of raising a message. -spelling-store-unknown-words=no - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO - - -[SIMILARITIES] - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - -# Minimum lines number of a similarity. -min-similarity-lines=4 - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members=capnp.* cereal.* pygame.* zmq.* setproctitle.* smbus2.* usb1.* serial.* cv2.* ft4222.* carla.* - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis. It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules=flask setproctitle usb1 flask.ext.socketio smbus2 usb1.* - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid to define new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expectedly -# not used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )?<?https?://\S+>?$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=160 - -# Maximum number of lines in a module -max-module-lines=1000 - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[BASIC] - -# Naming style matching correct argument names -argument-naming-style=snake_case - -# Regular expression matching correct argument names. Overrides argument- -# naming-style -#argument-rgx= - -# Naming style matching correct attribute names -attr-naming-style=snake_case - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style -#attr-rgx= - -# Bad variable names which should always be refused, separated by a comma -bad-names=foo, - bar, - baz, - toto, - tutu, - tata - -# Naming style matching correct class attribute names -class-attribute-naming-style=any - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style -#class-attribute-rgx= - -# Naming style matching correct class names -class-naming-style=PascalCase - -# Regular expression matching correct class names. Overrides class-naming-style -#class-rgx= - -# Naming style matching correct constant names -const-naming-style=UPPER_CASE - -# Regular expression matching correct constant names. Overrides const-naming- -# style -#const-rgx= - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming style matching correct function names -function-naming-style=snake_case - -# Regular expression matching correct function names. Overrides function- -# naming-style -#function-rgx= - -# Good variable names which should always be accepted, separated by a comma -good-names=i, - j, - k, - ex, - Run, - _ - -# Include a hint for the correct naming format with invalid-name -include-naming-hint=no - -# Naming style matching correct inline iteration names -inlinevar-naming-style=any - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style -#inlinevar-rgx= - -# Naming style matching correct method names -method-naming-style=snake_case - -# Regular expression matching correct method names. Overrides method-naming- -# style -#method-rgx= - -# Naming style matching correct module names -module-naming-style=snake_case - -# Regular expression matching correct module names. Overrides module-naming- -# style -#module-rgx= - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -property-classes=abc.abstractproperty - -# Naming style matching correct variable names -variable-naming-style=snake_case - -# Regular expression matching correct variable names. Overrides variable- -# naming-style -#variable-rgx= - - -[DESIGN] - -# Maximum number of arguments for function / method -max-args=5 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Maximum number of boolean expressions in a if statement -max-bool-expr=5 - -# Maximum number of branch for function / method body -max-branches=12 - -# Maximum number of locals for function / method body -max-locals=15 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body -max-returns=6 - -# Maximum number of statements in function / method body -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=mcs - - -[IMPORTS] - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma -deprecated-modules=regsub, - TERMIOS, - Bastion, - rexec - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled) -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled) -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled) -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - -[STRING] - -# This flag controls whether the implicit-str-concat should generate a warning -# on implicit string concatenation in sequences defined over several lines. -check-str-concat-over-line-jumps=yes - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception" -overgeneral-exceptions=
https://api.github.com/repos/commaai/openpilot/pulls/29302
2023-08-09T23:35:59Z
2023-08-10T02:03:36Z
2023-08-10T02:03:36Z
2023-08-10T02:03:37Z
3,879
commaai/openpilot
9,851
[AIRFLOW-3742] Respect the `fallback` arg in airflow.configuration.get
diff --git a/airflow/configuration.py b/airflow/configuration.py index 70203975446b2..1de5c3aeb1354 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -36,7 +36,7 @@ import sys import warnings -from backports.configparser import ConfigParser +from backports.configparser import ConfigParser, _UNSET, NoOptionError from zope.deprecation import deprecated from airflow.exceptions import AirflowConfigException @@ -247,7 +247,7 @@ def get(self, section, key, **kwargs): return option # ...then the default config - if self.airflow_defaults.has_option(section, key): + if self.airflow_defaults.has_option(section, key) or 'fallback' in kwargs: return expand_env_var( self.airflow_defaults.get(section, key, **kwargs)) @@ -291,9 +291,10 @@ def has_option(self, section, option): try: # Using self.get() to avoid reimplementing the priority order # of config variables (env, config, cmd, defaults) - self.get(section, option) + # UNSET to avoid logging a warning about missing values + self.get(section, option, fallback=_UNSET) return True - except AirflowConfigException: + except NoOptionError: return False def remove_option(self, section, option, remove_default=True): diff --git a/airflow/settings.py b/airflow/settings.py index 4a5d99d17b279..035b0ff0e80fb 100644 --- a/airflow/settings.py +++ b/airflow/settings.py @@ -176,13 +176,10 @@ def configure_orm(disable_connection_pool=False): engine_args['pool_size'] = pool_size engine_args['pool_recycle'] = pool_recycle - try: - # Allow the user to specify an encoding for their DB otherwise default - # to utf-8 so jobs & users with non-latin1 characters can still use - # us. - engine_args['encoding'] = conf.get('core', 'SQL_ENGINE_ENCODING') - except conf.AirflowConfigException: - engine_args['encoding'] = 'utf-8' + # Allow the user to specify an encoding for their DB otherwise default + # to utf-8 so jobs & users with non-latin1 characters can still use + # us. + engine_args['encoding'] = conf.get('core', 'SQL_ENGINE_ENCODING', fallback='utf-8') # For Python2 we get back a newstr and need a str engine_args['encoding'] = engine_args['encoding'].__str__() @@ -226,10 +223,7 @@ def configure_adapters(): def validate_session(): - try: - worker_precheck = conf.getboolean('core', 'worker_precheck') - except conf.AirflowConfigException: - worker_precheck = False + worker_precheck = conf.getboolean('core', 'worker_precheck', fallback=False) if not worker_precheck: return True else: diff --git a/tests/cli/test_worker_initialisation.py b/tests/cli/test_worker_initialisation.py index 477221693abe1..03fb9487fec58 100644 --- a/tests/cli/test_worker_initialisation.py +++ b/tests/cli/test_worker_initialisation.py @@ -59,8 +59,10 @@ def test_worker_precheck_exception(self, mock_getboolean): Test to check the behaviour of validate_session method when worker_precheck is absent in airflow configuration """ - mock_getboolean.side_effect = airflow.configuration.AirflowConfigException - self.assertEqual(airflow.settings.validate_session(), True) + mock_getboolean.return_value = False + + self.assertTrue(airflow.settings.validate_session()) + mock_getboolean.assert_called_once_with('core', 'worker_precheck', fallback=False) @mock.patch('sqlalchemy.orm.session.Session.execute') @mock.patch('airflow.configuration.getboolean') diff --git a/tests/test_configuration.py b/tests/test_configuration.py index 9f903f58b3599..df1bcae58d6cb 100644 --- a/tests/test_configuration.py +++ b/tests/test_configuration.py @@ -98,6 +98,8 @@ def test_env_var_config(self): opt = conf.get('testsection', 'testpercent') self.assertEqual(opt, 'with%percent') + self.assertTrue(conf.has_option('testsection', 'testkey')) + def test_conf_as_dict(self): cfg_dict = conf.as_dict() @@ -165,6 +167,10 @@ def test_command_config(self): self.assertEqual('key4_result', test_conf.get('test', 'key4')) self.assertEqual('value6', test_conf.get('another', 'key6')) + self.assertEqual('hello', test_conf.get('test', 'key1', fallback='fb')) + self.assertEqual('value6', test_conf.get('another', 'key6', fallback='fb')) + self.assertEqual('fb', test_conf.get('another', 'key7', fallback='fb')) + self.assertTrue(test_conf.has_option('test', 'key1')) self.assertTrue(test_conf.has_option('test', 'key2')) self.assertTrue(test_conf.has_option('test', 'key3'))
Make sure you have checked _all_ steps below. ### Jira - [x] https://issues.apache.org/jira/browse/AIRFLOW-3742 ### Description - [x] This `fallback` argument is part of the API from our parent class, but we didn't support it because of the various steps we perform in `get()` - this makes it behave more like the parent class, and can simplify a few instances in our code (I've only included one that I found here) ### Tests - [x] My PR adds the following unit tests __OR__ does not need testing for this extremely good reason: few small tests added ### Commits - [x] My commits all reference Jira issues in their subject lines, and I have squashed multiple commits if they address the same issue. In addition, my commits follow the guidelines from "[How to write a good git commit message](http://chris.beams.io/posts/git-commit/)": 1. Subject is separated from body by a blank line 1. Subject is limited to 50 characters (not including Jira issue reference) 1. Subject does not end with a period 1. Subject uses the imperative mood ("add", not "adding") 1. Body wraps at 72 characters 1. Body explains "what" and "why", not "how" ### Documentation - [x] In case of new functionality, my PR adds documentation that describes how to use it. - When adding new operators/hooks/sensors, the autoclass documentation generation needs to be added. - All the public functions and the classes in the PR contain docstrings that explain what it does ### Code Quality - [x] Passes `flake8`
https://api.github.com/repos/apache/airflow/pulls/4567
2019-01-21T16:10:46Z
2019-01-29T19:56:08Z
2019-01-29T19:56:08Z
2019-02-11T09:56:17Z
1,185
apache/airflow
14,358
Update Comet integration
diff --git a/segment/tutorial.ipynb b/segment/tutorial.ipynb index f2aee9e26b3..0ece0f60e4d 100644 --- a/segment/tutorial.ipynb +++ b/segment/tutorial.ipynb @@ -63,7 +63,7 @@ "source": [ "!git clone https://github.com/ultralytics/yolov5 # clone\n", "%cd yolov5\n", - "%pip install -qr requirements.txt # install\n", + "%pip install -qr requirements.txt comet_ml # install\n", "\n", "import torch\n", "import utils\n", diff --git a/train.py b/train.py index 48eeb09468f..b5044deb9b5 100644 --- a/train.py +++ b/train.py @@ -26,6 +26,11 @@ from datetime import datetime from pathlib import Path +try: + import comet_ml # must be imported before torch (if installed) +except ImportError: + comet_ml = None + import numpy as np import torch import torch.distributed as dist diff --git a/tutorial.ipynb b/tutorial.ipynb index be87068822a..42c6bc632ae 100644 --- a/tutorial.ipynb +++ b/tutorial.ipynb @@ -59,7 +59,7 @@ "source": [ "!git clone https://github.com/ultralytics/yolov5 # clone\n", "%cd yolov5\n", - "%pip install -qr requirements.txt # install\n", + "%pip install -qr requirements.txt comet_ml # install\n", "\n", "import torch\n", "import utils\n", diff --git a/utils/__init__.py b/utils/__init__.py index 6c10857df07..bccac42711e 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -54,7 +54,7 @@ def notebook_init(verbose=True): import os import shutil - from utils.general import check_font, check_requirements, is_colab + from utils.general import check_font, is_colab from utils.torch_utils import select_device # imports check_font() diff --git a/utils/loggers/__init__.py b/utils/loggers/__init__.py index c7c283b728a..ba7d2790e61 100644 --- a/utils/loggers/__init__.py +++ b/utils/loggers/__init__.py @@ -46,15 +46,15 @@ clearml = None try: - if RANK not in [0, -1]: - comet_ml = None - else: + if RANK in {0, -1}: import comet_ml assert hasattr(comet_ml, '__version__') # verify package import not local dir from utils.loggers.comet import CometLogger -except (ModuleNotFoundError, ImportError, AssertionError): + else: + comet_ml = None +except (ImportError, AssertionError): comet_ml = None @@ -88,10 +88,6 @@ def __init__(self, save_dir=None, weights=None, opt=None, hyp=None, logger=None, self.csv = True # always log to csv # Messages - if not clearml: - prefix = colorstr('ClearML: ') - s = f"{prefix}run 'pip install clearml' to automatically track, visualize and remotely train YOLOv5 🚀 in ClearML" - self.logger.info(s) if not comet_ml: prefix = colorstr('Comet: ') s = f"{prefix}run 'pip install comet_ml' to automatically track and visualize YOLOv5 🚀 runs in Comet" diff --git a/utils/loggers/comet/__init__.py b/utils/loggers/comet/__init__.py index d4599841c9f..aac06b11c8c 100644 --- a/utils/loggers/comet/__init__.py +++ b/utils/loggers/comet/__init__.py @@ -18,7 +18,7 @@ # Project Configuration config = comet_ml.config.get_config() COMET_PROJECT_NAME = config.get_string(os.getenv('COMET_PROJECT_NAME'), 'comet.project_name', default='yolov5') -except (ModuleNotFoundError, ImportError): +except ImportError: comet_ml = None COMET_PROJECT_NAME = None @@ -82,7 +82,7 @@ def __init__(self, opt, hyp, run_id=None, job_type='Training', **experiment_kwar self.comet_log_batch_interval = COMET_BATCH_LOGGING_INTERVAL # Dataset Artifact Settings - self.upload_dataset = self.opt.upload_dataset if self.opt.upload_dataset else COMET_UPLOAD_DATASET + self.upload_dataset = self.opt.upload_dataset or COMET_UPLOAD_DATASET self.resume = self.opt.resume # Default parameters to pass to Experiment objects @@ -93,6 +93,7 @@ def __init__(self, opt, hyp, run_id=None, job_type='Training', **experiment_kwar 'project_name': COMET_PROJECT_NAME,} self.default_experiment_kwargs.update(experiment_kwargs) self.experiment = self._get_experiment(self.comet_mode, run_id) + self.experiment.set_name(self.opt.name) self.data_dict = self.check_dataset(self.opt.data) self.class_names = self.data_dict['names']
<!-- Thank you for submitting a YOLOv5 🚀 Pull Request! We want to make contributing to YOLOv5 as easy and transparent as possible. A few tips to get you started: - Search existing YOLOv5 [PRs](https://github.com/ultralytics/yolov5/pull) to see if a similar PR already exists. - Link this PR to a YOLOv5 [issue](https://github.com/ultralytics/yolov5/issues) to help us understand what bug fix or feature is being implemented. - Provide before and after profiling/inference/training results to help us quantify the improvement your PR provides (if applicable). Please see our ✅ [Contributing Guide](https://docs.ultralytics.com/help/contributing) for more details. Note that Copilot will summarize this PR below, do not modify the 'copilot:all' line. --> <!-- copilot:all --> ### <samp>🤖 Generated by Copilot at 17ea9a1</samp> ### Summary 🚀🧹🌟 <!-- 1. 🚀 for adding a feature that allows optional use of comet_ml 2. 🧹 for cleaning up unused or unnecessary code 3. 🌟 for improving the code quality and readability of the CometLogger class --> This pull request improves the code quality and functionality of the logging modules in `./utils/loggers/`. It also makes comet_ml an optional dependency for `train.py` and removes an unused import in `utils/__init__.py`. > _We don't need no comet_ml to track our destiny_ > _We simplify and clean up the code of our `CometLogger`_ > _We remove the useless imports and the clearml noise_ > _We set the experiment name to match our run of choice_ ### Walkthrough * Add optional support for comet_ml logging and tracking ([link](https://github.com/ultralytics/yolov5/pull/11648/files?diff=unified&w=0#diff-ed183d67207df065a11e1289f19d34cc2abbc5448dea952683cfe9728c342b95R29-R33), [link](https://github.com/ultralytics/yolov5/pull/11648/files?diff=unified&w=0#diff-33c598c680edd3d5bb6a80177861c1a89a6c4f17bea580adb0212e772b268c9dL49-R49), [link](https://github.com/ultralytics/yolov5/pull/11648/files?diff=unified&w=0#diff-33c598c680edd3d5bb6a80177861c1a89a6c4f17bea580adb0212e772b268c9dL57-R57), [link](https://github.com/ultralytics/yolov5/pull/11648/files?diff=unified&w=0#diff-34e9c736220c721191e55be28d097da70bdf1bb5b74de365c1fe4f35fad51e46L21-R21), [link](https://github.com/ultralytics/yolov5/pull/11648/files?diff=unified&w=0#diff-34e9c736220c721191e55be28d097da70bdf1bb5b74de365c1fe4f35fad51e46L85-R85), [link](https://github.com/ultralytics/yolov5/pull/11648/files?diff=unified&w=0#diff-34e9c736220c721191e55be28d097da70bdf1bb5b74de365c1fe4f35fad51e46L93-R96), [link](https://github.com/ultralytics/yolov5/pull/11648/files?diff=unified&w=0#diff-34e9c736220c721191e55be28d097da70bdf1bb5b74de365c1fe4f35fad51e46L155-R156), [link](https://github.com/ultralytics/yolov5/pull/11648/files?diff=unified&w=0#diff-34e9c736220c721191e55be28d097da70bdf1bb5b74de365c1fe4f35fad51e46L172-R173), [link](https://github.com/ultralytics/yolov5/pull/11648/files?diff=unified&w=0#diff-34e9c736220c721191e55be28d097da70bdf1bb5b74de365c1fe4f35fad51e46L216-R217), [link](https://github.com/ultralytics/yolov5/pull/11648/files?diff=unified&w=0#diff-34e9c736220c721191e55be28d097da70bdf1bb5b74de365c1fe4f35fad51e46L272-R273), [link](https://github.com/ultralytics/yolov5/pull/11648/files?diff=unified&w=0#diff-34e9c736220c721191e55be28d097da70bdf1bb5b74de365c1fe4f35fad51e46L281-R282)) * Remove unused import of check_requirements in `utils/__init__.py` ([link](https://github.com/ultralytics/yolov5/pull/11648/files?diff=unified&w=0#diff-6e982446d45d5ffa23213eb0cb66991fb02283df19b49da3bc8057ce1ab5094aL57-R57)) ## 🛠️ PR Summary <sub>Made with ❤️ by [Ultralytics Actions](https://github.com/ultralytics/actions)<sub> ### 🌟 Summary Integration of Comet ML for enhanced training experiment tracking in the YOLOv5 repository. ### 📊 Key Changes - Added `comet_ml` as a dependency to the YOLOv5 tutorials, ensuring it's installed alongside other requirements. - Modified import logic in `train.py` to conditionally import `comet_ml` if available. - Adjusted logger initialization in the YOLOv5 utilities to handle Comet ML integration and removed unnecessary warning messages when `clearml` is not available. - In the Comet logger utility, streamlined dataset artifact upload logic and ensured naming for experiments is set through the Comet experiment object. ### 🎯 Purpose & Impact - 🚀 **Enhanced Experiment Tracking**: Incorporating `comet_ml` allows users to track, compare, and monitor their YOLOv5 training experiments more efficiently. - ✨ **Streamlined Setup**: With `comet_ml` now part of the tutorial requirements, new users will have an easier time setting up their environment for optimal experiment management. - 🧹 **Cleaner Codebase**: Cleaning up logger initialization code contributes to a more maintainable and less error-prone codebase. - 📈 **User Impact**: Users who leverage experiment tracking will benefit from improved insights and greater control over their model training processes. The integration also potentially attracts a wider user base comfortable with Comet ML.
https://api.github.com/repos/ultralytics/yolov5/pulls/11648
2023-06-02T16:03:32Z
2023-06-15T11:49:19Z
2023-06-15T11:49:19Z
2024-01-19T01:47:48Z
1,222
ultralytics/yolov5
25,511
Update plot_ols.py (trivial)
diff --git a/examples/linear_model/plot_ols.py b/examples/linear_model/plot_ols.py index 39a03d1fa0ad4..541ff629b2b79 100644 --- a/examples/linear_model/plot_ols.py +++ b/examples/linear_model/plot_ols.py @@ -32,12 +32,11 @@ # Use only one feature -diabetes_X = diabetes.data[:, np.newaxis] -diabetes_X_temp = diabetes_X[:, :, 2] +diabetes_X = diabetes.data[:, np.newaxis, 2] # Split the data into training/testing sets -diabetes_X_train = diabetes_X_temp[:-20] -diabetes_X_test = diabetes_X_temp[-20:] +diabetes_X_train = diabetes_X[:-20] +diabetes_X_test = diabetes_X[-20:] # Split the targets into training/testing sets diabetes_y_train = diabetes.target[:-20]
Minor/trivial NumPy slicing refactor
https://api.github.com/repos/scikit-learn/scikit-learn/pulls/4861
2015-06-15T06:18:10Z
2015-06-15T06:40:40Z
2015-06-15T06:40:40Z
2015-06-15T06:41:23Z
198
scikit-learn/scikit-learn
46,860
fix(poloniex): Pro timeframes
diff --git a/ts/src/pro/poloniex.ts b/ts/src/pro/poloniex.ts index f550eb7bacd1..49f630c8ab28 100644 --- a/ts/src/pro/poloniex.ts +++ b/ts/src/pro/poloniex.ts @@ -554,7 +554,8 @@ export default class poloniex extends poloniexRest { const marketId = this.safeString (data, 'symbol'); const symbol = this.safeSymbol (marketId); const market = this.safeMarket (symbol); - const timeframe = this.findTimeframe (channel); + const timeframes = this.safeValue (this.options, 'timeframes', {}); + const timeframe = this.findTimeframe (channel, timeframes); const messageHash = channel + '::' + symbol; const parsed = this.parseWsOHLCV (data, market); this.ohlcvs[symbol] = this.safeValue (this.ohlcvs, symbol, {});
- findTimeFrames was using Rest timeframes
https://api.github.com/repos/ccxt/ccxt/pulls/20689
2024-01-05T18:05:48Z
2024-01-05T18:20:58Z
2024-01-05T18:20:58Z
2024-01-05T18:20:58Z
213
ccxt/ccxt
13,677
fix colotensor.type() raise NotImplementedError
diff --git a/colossalai/nn/_ops/element_wise.py b/colossalai/nn/_ops/element_wise.py index c3c1421e7fdc..462670e7278a 100644 --- a/colossalai/nn/_ops/element_wise.py +++ b/colossalai/nn/_ops/element_wise.py @@ -18,6 +18,8 @@ def elementwise_op(input_tensor: GeneralTensor, *args, **kwargs): output = op(input_tensor, *args, **kwargs) if isinstance(input_tensor, ColoTensor): + if isinstance(output, str): + return output if not isinstance(output, torch.Tensor): raise NotImplementedError return ColoTensor.from_torch_tensor(output,
hi, dev from biomap. Encountered this error with calling colotensor.type()
https://api.github.com/repos/hpcaitech/ColossalAI/pulls/1682
2022-10-08T10:55:21Z
2022-10-10T02:13:31Z
2022-10-10T02:13:31Z
2022-10-10T02:13:31Z
170
hpcaitech/ColossalAI
11,776
Hook validation: skip leading spaces/newlines
diff --git a/certbot/hooks.py b/certbot/hooks.py index 138e2addce1..890021a5f70 100644 --- a/certbot/hooks.py +++ b/certbot/hooks.py @@ -27,7 +27,7 @@ def _validate_hook(shell_cmd, hook_name): :raises .errors.HookCommandNotFound: if the command is not found """ if shell_cmd: - cmd = shell_cmd.partition(" ")[0] + cmd = shell_cmd.split(None, 1)[0] if not _prog(cmd): path = os.environ["PATH"] msg = "Unable to find {2}-hook command {0} in the PATH.\n(PATH is {1})".format(
Improves the situation with #3020 a bit. Does nothing about other valid shell commands that the current validation would reject: - shell builtins like `--post-hook 'if [ -x /my/script ]; then /my/script; fi'` - variable assignments like `--post-hook 'ENV_VAR=value command'` - comments - redirections like `--post-hook '<infile command'` - I'm sure I'm missing many other exciting possibilities Still, I believe this is a step in the right direction and therefore ask you to please merge it (assuming I haven't broken any tests).
https://api.github.com/repos/certbot/certbot/pulls/3021
2016-05-18T09:05:13Z
2016-05-18T19:07:25Z
2016-05-18T19:07:25Z
2016-05-18T19:07:33Z
164
certbot/certbot
2,485
improve/simplify gitignore
diff --git a/.gitignore b/.gitignore index 3cfbbb2257..368529166b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,26 +1,21 @@ -cache/* -characters/* -extensions/silero_tts/outputs/* -extensions/elevenlabs_tts/outputs/* -extensions/sd_api_pictures/outputs/* -logs/* -loras/* -models/* -softprompts/* -torch-dumps/* +cache +characters +training/datasets +extensions/silero_tts/outputs +extensions/elevenlabs_tts/outputs +extensions/sd_api_pictures/outputs +logs +loras +models +softprompts +torch-dumps *pycache* */*pycache* */*/pycache* venv/ .venv/ +repositories settings.json img_bot* img_me* - -!characters/Example.json -!characters/Example.png -!loras/place-your-loras-here.txt -!models/place-your-models-here.txt -!softprompts/place-your-softprompts-here.txt -!torch-dumps/place-your-pt-models-here.txt
This is a minor change but bit of QOL for forking/contributing to this repo - add repositories - remove the redundant `/*` on folders - remove the exclusions for files that already exist If it's unclear what difference this makes: pretty much it fixes the majority of issues with git thinking you've made changes to the repo just by virtue of ... using the WebUI. Not all, but most. Notably `repositories` being added fixes the issue where GTPQ support adds an unneeded local change. The `!` exclusions at the bottom don't have any function, gitignore doesn't apply to files that are already in the repository. The `/*` is somewhere between redundant and actively annoying. - This also adds `datasets` to gitignore for #570 Only remaining issues for me personally is - it's annoyed about my `models` dir being a symlink but <https://github.com/oobabooga/text-generation-webui/pull/549> will resolve that. - there isn't a good directory for personal custom presets (ie ones I don't intend to git commit) PS while I have oobabooga's attention here: have you thought about making an official Discord for the WebUI? Either for the community or just for devs/extension authors/etc. at least to communicate (For comparison Automatic1111 WebUI has an official dev-only discord and a separate unofficial public discord)
https://api.github.com/repos/oobabooga/text-generation-webui/pulls/565
2023-03-25T17:09:43Z
2023-03-26T16:31:45Z
2023-03-26T16:31:45Z
2023-03-26T16:31:45Z
268
oobabooga/text-generation-webui
26,680
Fix model_worker error
diff --git a/fastchat/serve/inference.py b/fastchat/serve/inference.py index c97fd1c017..4e51916101 100644 --- a/fastchat/serve/inference.py +++ b/fastchat/serve/inference.py @@ -108,6 +108,7 @@ def generate_stream( past_key_values = out = None sent_interrupt = False + finish_reason = None for i in range(max_new_tokens): if i == 0: # prefill if model.config.is_encoder_decoder: @@ -240,12 +241,11 @@ def generate_stream( break # Finish stream event, which contains finish reason - if i == max_new_tokens - 1: + else: finish_reason = "length" - elif stopped: + + if stopped: finish_reason = "stop" - else: - finish_reason = None yield { "text": output,
<!-- Thank you for your contribution! --> <!-- Please add a reviewer to the assignee section when you create a PR. If you don't have the access to it, we will shortly find a reviewer and assign them to your PR. --> ## Why are these changes needed? Fix the bug that `model_worker` will raise the error `local variable 'i' referenced before assignment` in some case. ## Related issue number (if applicable) <!-- For example: "Closes #1234" --> Closes #2382 ## Checks - [] I've run `format.sh` to lint the changes in this PR. - [x] I've included any doc changes needed. - [ ] I've made sure the relevant tests are passing (if applicable).
https://api.github.com/repos/lm-sys/FastChat/pulls/2404
2023-09-12T02:13:33Z
2023-09-12T04:03:00Z
2023-09-12T04:03:00Z
2023-09-12T04:03:00Z
213
lm-sys/FastChat
41,529
add: one rep max calculator, weightlifting.
diff --git a/nitkarshchourasia/one_rep_max_calculator/README.md b/nitkarshchourasia/one_rep_max_calculator/README.md new file mode 100644 index 0000000000..78aa7469f7 --- /dev/null +++ b/nitkarshchourasia/one_rep_max_calculator/README.md @@ -0,0 +1,25 @@ +# One-Rep Max Calculator + +This repository contains two Python programs that can calculate the estimated one-repetition maximum (1RM) for a weightlifting exercise. The 1RM is the maximum amount of weight that you can lift for one rep. It is useful for tracking your strength progress and planning your training. + +## Command-line version + +The file `one_rep_max_calculator.py` is a command-line version of the 1RM calculator. It prompts the user to enter the weight lifted and the number of reps performed, and then calculates and displays the estimated 1RM based on the *Epley formula*. + +To run this program, you need Python 3 installed on your system. You can execute the program by typing `python one_rep_max_calculator.py` in your terminal. + +## Graphical user interface version + +The file `one_rep_max_calculator_gui.py` is a graphical user interface version of the 1RM calculator. It uses Tkinter to create a window with entry fields, labels, and a button. The user can input the weight lifted and the number of reps performed, and then click the calculate button to see the estimated 1RM based on the Epley formula. + +To run this program, you need Python 3 and Tkinter installed on your system. You can execute the program by typing `python one_rep_max_calculator_gui.py` in your terminal. + +## References + +- Epley, B. Poundage chart. In: Boyd Epley Workout. Lincoln, NE: Body Enterprises, 1985. p. 23. +- https://en.wikipedia.org/wiki/One-repetition_maximum +- https://www.topendsports.com/testing/calculators/1repmax.htm + +<!-- author: Nitkarsh Chourasia --> +<!-- github_Username: NitkarshChourasia --> +<!-- github_profil_url: https://github.com/NitkarshChourasia --> \ No newline at end of file diff --git a/nitkarshchourasia/one_rep_max_calculator/one_rep_max_calculator.py b/nitkarshchourasia/one_rep_max_calculator/one_rep_max_calculator.py new file mode 100644 index 0000000000..fdf8460fe7 --- /dev/null +++ b/nitkarshchourasia/one_rep_max_calculator/one_rep_max_calculator.py @@ -0,0 +1,45 @@ +class OneRepMaxCalculator: + """ + A class to calculate the one-repetition maximum (1RM) for a weightlifting exercise. + """ + + def __init__(self): + """ + Initializes the OneRepMaxCalculator with default values. + """ + self.weight_lifted = 0 + self.reps_performed = 0 + + def get_user_input(self): + """ + Prompts the user to enter the weight lifted and the number of reps performed. + """ + self.weight_lifted = int(input("Enter the weight you lifted (in kg): ")) + self.reps_performed = int(input("Enter the number of reps you performed: ")) + + def calculate_one_rep_max(self): + """ + Calculates the one-rep max based on the Epley formula. + """ + return (self.weight_lifted * self.reps_performed * 0.0333) + self.weight_lifted + + def display_one_rep_max(self): + """ + Displays the calculated one-rep max. + """ + one_rep_max = self.calculate_one_rep_max() + print(f"Your estimated one-rep max (1RM) is: {one_rep_max} kg") + + +def main(): + """ + The main function that creates an instance of OneRepMaxCalculator and uses it to get user input, + calculate the one-rep max, and display the result. + """ + calculator = OneRepMaxCalculator() + calculator.get_user_input() + calculator.display_one_rep_max() + + +if __name__ == "__main__": + main() diff --git a/nitkarshchourasia/one_rep_max_calculator/one_rep_max_calculator_gui.py b/nitkarshchourasia/one_rep_max_calculator/one_rep_max_calculator_gui.py new file mode 100644 index 0000000000..7189401b2e --- /dev/null +++ b/nitkarshchourasia/one_rep_max_calculator/one_rep_max_calculator_gui.py @@ -0,0 +1,75 @@ +import tkinter as tk + + +class OneRepMaxCalculator: + """ + A class used to calculate the estimated one-repetition maximum (1RM) for a weightlifting exercise. + + Attributes + ---------- + window : tk.Tk + The main window of the application. + weight_entry : tk.Entry + Entry field to input the weight lifted. + rep_entry : tk.Entry + Entry field to input the number of reps performed. + result_value_label : tk.Label + Label to display the calculated 1RM. + + Methods + ------- + calculate_1rm(): + Calculates the estimated 1RM based on the Epley formula. + display_result(): + Displays the calculated 1RM in the application window. + run(): + Runs the application. + """ + + def __init__(self): + """Initializes the OneRepMaxCalculator with a window and widgets.""" + self.window = tk.Tk() + self.window.title("One-Rep Max Calculator") + self.window.geometry("300x150") + + # Create and pack widgets + tk.Label(self.window, text="Enter the weight you lifted (in kg):").pack() + self.weight_entry = tk.Entry(self.window) + self.weight_entry.pack() + + tk.Label(self.window, text="Enter the number of reps you performed:").pack() + self.rep_entry = tk.Entry(self.window) + self.rep_entry.pack() + + tk.Button(self.window, text="Calculate", command=self.display_result).pack() + + tk.Label(self.window, text="Your estimated one-rep max (1RM):").pack() + self.result_value_label = tk.Label(self.window) + self.result_value_label.pack() + + def calculate_1rm(self): + """Calculates and returns the estimated 1RM.""" + weight = int(self.weight_entry.get()) + reps = int(self.rep_entry.get()) + return (weight * reps * 0.0333) + weight + + def display_result(self): + """Calculates the 1RM and updates result_value_label with it.""" + one_rep_max = self.calculate_1rm() + self.result_value_label.config(text=f"{one_rep_max} kg") + + def run(self): + """Runs the Tkinter event loop.""" + self.window.mainloop() + + +# Usage +if __name__ == "__main__": + calculator = OneRepMaxCalculator() + calculator.run() + +# Improve the program. +# Make the fonts, bigger. +# - Use text formatting... +# Use dark mode. +# Have an option to use dark mode and light mode.
# One Rep Max Calculator. Used Epley formula to determine one rep max in lifting weights. # Includes. - Command-Line based calculator. - GUI based calculator. - README.md for documenting and explaining the programs. # Description We all suffer to calculate the maximum amount we can lift as per to our level of strength. Now, there is an easy way to calculate it. Using, this calculator. ### Dependencies. - Python 3 and up. - Tkinter
https://api.github.com/repos/geekcomputers/Python/pulls/1993
2023-10-05T12:40:52Z
2023-10-08T07:17:28Z
2023-10-08T07:17:28Z
2023-10-08T07:17:29Z
1,682
geekcomputers/Python
31,304
Update dice_rolling_simulator.py
diff --git a/dice_rolling_simulator.py b/dice_rolling_simulator.py index 693a1b1ad8..afb91c3740 100644 --- a/dice_rolling_simulator.py +++ b/dice_rolling_simulator.py @@ -1,98 +1,77 @@ #Made on May 27th, 2017 #Made by SlimxShadyx +#Editted by CaptMcTavish, June 17th, 2017 #Dice Rolling Simulator import random -#These variables are used for user input and while loop checking. -correct_word = False -dice_checker = False -dicer = False -roller_loop = False +global user_exit_checker +user_exit_checker="exit" -#Checking the user input to start the program. -while correct_word == False: +def start(): + print "Welcome to dice rolling simulator: \nPress Enter to proceed" + raw_input(">") - user_input_raw = raw_input("\r\nWelcome to the Dice Rolling Simulator! We currently support 6, 8, and 12 sided die! \ - Type [start] to begin!\r\n?>") + result() - #Converting the user input to lower case. - user_input = (user_input_raw.lower()) +def bye(): + print "Thanks for using the Dice Rolling Simulator! Have a great day! =)" - if user_input == 'start': - correct_word = True - - else: - print "Please type [start] to begin!\r\n" - -#Main program loop. Exiting this, exits the program. -while roller_loop == False: - - #Second While loop to ask the user for the certain die they want. - while dice_checker == False: - user_dice_chooser = raw_input("\r\nGreat! Begin by choosing a die! [6] [8] [10]\r\n?>") - - user_dice_chooser = int(user_dice_chooser) - - if user_dice_chooser == 6: - dice_checker = True +def result(): - elif user_dice_chooser == 8: - dice_checker = True + #user_dice_chooser No idea how this got in here, thanks EroMonsterSanji. - elif user_dice_chooser == 12: - dice_checker = True - else: - print "\r\nPlease choose one of the applicable options!\r\n" - - #Another inner while loop. This one does the actual rolling, as well as - #allowing the user to re-roll without restarting the program. - while dicer == False: + print "\r\nGreat! Begin by choosing a die! [6] [8] [12]?\r\n" + user_dice_chooser = raw_input(">") - if user_dice_chooser == 6: - dice_6 = random.randint(1,6) - print "\r\nYou rolled a " + str(dice_6) + "!\r\n" - dicer = True + user_dice_chooser = int(user_dice_chooser) - user_exit_checker_raw = raw_input("\r\nIf you want to roll another die, type [roll]. To exit, type [exit].\r\n?>") - user_exit_checker = (user_exit_checker_raw.lower()) + if user_dice_chooser == 6: + dice6() - if user_exit_checker == 'roll': - dicer = False + elif user_dice_chooser == 8: + dice8() - elif user_exit_checker == 'exit': - roller_loop = True + elif user_dice_chooser == 12: + dice12() + else: + print "\r\nPlease choose one of the applicable options!\r\n" + result() - elif user_dice_chooser == 8: - dice_8 = random.randint(1,8) - print "\r\nYou rolled a " + str(dice_8) + "!" - dicer = True - - user_exit_checker_raw = raw_input("\r\nIf you want to roll another die, type [roll]. To exit, type [exit].\r\n?>") - user_exit_checker = (user_exit_checker_raw.lower()) - if user_exit_checker == 'roll': - dicer = False +def dice6(): + dice_6 = random.randint(1,6) + print "\r\nYou rolled a " + str(dice_6) + "!\r\n" - elif user_exit_checker == 'exit': - roller_loop = True + user_exit_checker_raw = raw_input("\r\nIf you want to roll another die, type [roll]. To exit, type [exit].\r\n?>") + user_exit_checker = (user_exit_checker_raw.lower()) + if user_exit_checker=="roll": + start() + else: + bye() - elif user_dice_chooser == 12: - dice_12 = random.randint(1,12) - print "\r\nYou rolled a " + str(dice_12) + "!" - dicer = True - - user_exit_checker_raw = raw_input("\r\nIf you want to roll another die, type [roll]. To exit, type [exit].\r\n?>") - user_exit_checker = (user_exit_checker_raw.lower()) +def dice8(): + dice_8 = random.randint(1,8) + print "\r\nYou rolled a " + str(dice_8) + "!" - if user_exit_checker == 'roll': - dicer = False + user_exit_checker_raw = raw_input("\r\nIf you want to roll another die, type [roll]. To exit, type [exit].\r\n?>") + user_exit_checker = (user_exit_checker_raw.lower()) + if user_exit_checker=="roll": + start() + else: + bye() - elif user_exit_checker == 'exit': - roller_loop = True +def dice12(): + dice_12 = random.randint(1,12) + print "\r\nYou rolled a " + str(dice_12) + "!" -print "Thanks for using the Dice Rolling Simulator! Have a great day! =)" - + user_exit_checker_raw = raw_input("\r\nIf you want to roll another die, type [roll]. To exit, type [exit].\r\n?>") + user_exit_checker = (user_exit_checker_raw.lower()) + if user_exit_checker=="roll": + start() + else: + bye() +start()
Someone suggested me not to use while too much, so tried to create an alternative. I am a newbie and this is my first contribution! A line in the previous code (The equivalent to which in the current code is line #26 said [10], which should have been [12], though its a minor slip, please fix it.) Thanks.
https://api.github.com/repos/geekcomputers/Python/pulls/185
2017-06-17T15:24:55Z
2017-06-17T17:15:55Z
2017-06-17T17:15:55Z
2017-06-17T17:17:41Z
1,488
geekcomputers/Python
31,236
Fixing a minor grammatical mistake
diff --git a/docs/use_cases/q_and_a/rag_cli.md b/docs/use_cases/q_and_a/rag_cli.md index 3d17e7676a035..6e69103ded497 100644 --- a/docs/use_cases/q_and_a/rag_cli.md +++ b/docs/use_cases/q_and_a/rag_cli.md @@ -2,7 +2,7 @@ One common use case is chatting with an LLM about files you have saved locally on your computer. -We have written a CLI tool do help you do just that! You can point the rag CLI tool to a set of files you've saved locally, and it will ingest those files into a local vector database that is then used for a Chat Q&A repl within your terminal. +We have written a CLI tool to help you do just that! You can point the rag CLI tool to a set of files you've saved locally, and it will ingest those files into a local vector database that is then used for a Chat Q&A repl within your terminal. By default, this tool uses OpenAI for the embeddings & LLM as well as a local Chroma Vector DB instance. **Warning**: this means that, by default, the local data you ingest with this tool _will_ be sent to OpenAI's API.
# Description Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change. Fixes # (issue) ## Type of Change Please delete options that are not relevant. - [ ] Bug fix (non-breaking change which fixes an issue) - [ ] New feature (non-breaking change which adds functionality) - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) - [ ] This change requires a documentation update # How Has This Been Tested? Please describe the tests that you ran to verify your changes. Provide instructions so we can reproduce. Please also list any relevant details for your test configuration - [ ] Added new unit/integration tests - [ ] Added new notebook (that tests end-to-end) - [ ] I stared at the code and made sure it makes sense # Suggested Checklist: - [ ] I have performed a self-review of my own code - [ ] I have commented my code, particularly in hard-to-understand areas - [ ] I have made corresponding changes to the documentation - [ ] I have added Google Colab support for the newly added notebooks. - [ ] My changes generate no new warnings - [ ] I have added tests that prove my fix is effective or that my feature works - [ ] New and existing unit tests pass locally with my changes - [ ] I ran `make format; make lint` to appease the lint gods
https://api.github.com/repos/run-llama/llama_index/pulls/10408
2024-02-02T08:37:07Z
2024-02-02T20:34:27Z
2024-02-02T20:34:27Z
2024-02-02T20:34:27Z
274
run-llama/llama_index
6,127
Make mypy pass on our tests
diff --git a/certbot-apache/certbot_apache/_internal/tests/apache-conf-files/apache-conf-test-pebble.py b/certbot-apache/certbot_apache/_internal/tests/apache-conf-files/apache-conf-test-pebble.py index 68bd6287d75..383c652f8cf 100755 --- a/certbot-apache/certbot_apache/_internal/tests/apache-conf-files/apache-conf-test-pebble.py +++ b/certbot-apache/certbot_apache/_internal/tests/apache-conf-files/apache-conf-test-pebble.py @@ -12,9 +12,8 @@ SCRIPT_DIRNAME = os.path.dirname(__file__) -def main(args=None): - if not args: - args = sys.argv[1:] +def main() -> int: + args = sys.argv[1:] with acme_server.ACMEServer('pebble', [], False) as acme_xdist: environ = os.environ.copy() environ['SERVER'] = acme_xdist['directory_url'] diff --git a/certbot-nginx/certbot_nginx/_internal/parser_obj.py b/certbot-nginx/certbot_nginx/_internal/parser_obj.py index 0af38a9360a..7a094650015 100644 --- a/certbot-nginx/certbot_nginx/_internal/parser_obj.py +++ b/certbot-nginx/certbot_nginx/_internal/parser_obj.py @@ -1,5 +1,6 @@ # type: ignore -# This module is not used for now, so we just skip type check for the sake of simplicity. +# This module is not used for now, so we just skip type checking for the sake +# of simplicity. """ This file contains parsing routines and object classes to help derive meaning from raw lists of tokens from pyparsing. """ diff --git a/certbot-nginx/certbot_nginx/_internal/tests/http_01_test.py b/certbot-nginx/certbot_nginx/_internal/tests/http_01_test.py index 6726b85ad2e..c81357607d0 100644 --- a/certbot-nginx/certbot_nginx/_internal/tests/http_01_test.py +++ b/certbot-nginx/certbot_nginx/_internal/tests/http_01_test.py @@ -7,6 +7,7 @@ import pytest from acme import challenges +from acme import messages from certbot import achallenges from certbot.tests import acme_util from certbot.tests import util as test_util @@ -23,29 +24,29 @@ class HttpPerformTest(util.NginxTest): achalls = [ achallenges.KeyAuthorizationAnnotatedChallenge( challb=acme_util.chall_to_challb( - challenges.HTTP01(token=b"kNdwjwOeX0I_A8DXt9Msmg"), "pending"), + challenges.HTTP01(token=b"kNdwjwOeX0I_A8DXt9Msmg"), messages.STATUS_PENDING), domain="www.example.com", account_key=account_key), achallenges.KeyAuthorizationAnnotatedChallenge( challb=acme_util.chall_to_challb( challenges.HTTP01( token=b"\xba\xa9\xda?<m\xaewmx\xea\xad\xadv\xf4\x02\xc9y" b"\x80\xe2_X\t\xe7\xc7\xa4\t\xca\xf7&\x945" - ), "pending"), + ), messages.STATUS_PENDING), domain="ipv6.com", account_key=account_key), achallenges.KeyAuthorizationAnnotatedChallenge( challb=acme_util.chall_to_challb( challenges.HTTP01( token=b"\x8c\x8a\xbf_-f\\cw\xee\xd6\xf8/\xa5\xe3\xfd" b"\xeb9\xf1\xf5\xb9\xefVM\xc9w\xa4u\x9c\xe1\x87\xb4" - ), "pending"), + ), messages.STATUS_PENDING), domain="www.example.org", account_key=account_key), achallenges.KeyAuthorizationAnnotatedChallenge( challb=acme_util.chall_to_challb( - challenges.HTTP01(token=b"kNdwjxOeX0I_A8DXt9Msmg"), "pending"), + challenges.HTTP01(token=b"kNdwjxOeX0I_A8DXt9Msmg"), messages.STATUS_PENDING), domain="migration.com", account_key=account_key), achallenges.KeyAuthorizationAnnotatedChallenge( challb=acme_util.chall_to_challb( - challenges.HTTP01(token=b"kNdwjxOeX0I_A8DXt9Msmg"), "pending"), + challenges.HTTP01(token=b"kNdwjxOeX0I_A8DXt9Msmg"), messages.STATUS_PENDING), domain="ipv6ssl.com", account_key=account_key), ] @@ -137,7 +138,7 @@ def test_mod_config_deduplicate(self, mock_add_server_directives): """A vhost that appears in both HTTP and HTTPS vhosts only gets modded once""" achall = achallenges.KeyAuthorizationAnnotatedChallenge( challb=acme_util.chall_to_challb( - challenges.HTTP01(token=b"kNdwjxOeX0I_A8DXt9Msmg"), "pending"), + challenges.HTTP01(token=b"kNdwjxOeX0I_A8DXt9Msmg"), messages.STATUS_PENDING), domain="ssl.both.com", account_key=AUTH_KEY) self.http01.add_chall(achall) self.http01._mod_config() # pylint: disable=protected-access diff --git a/certbot-nginx/certbot_nginx/_internal/tests/parser_obj_test.py b/certbot-nginx/certbot_nginx/_internal/tests/parser_obj_test.py index 7503897cc67..710b63fc4be 100644 --- a/certbot-nginx/certbot_nginx/_internal/tests/parser_obj_test.py +++ b/certbot-nginx/certbot_nginx/_internal/tests/parser_obj_test.py @@ -1,3 +1,6 @@ +# type: ignore +# As done in parser_obj.py, this module is not used for now, so we just skip +# type checking for the sake of simplicity. """ Tests for functions and classes in parser_obj.py """ import sys diff --git a/certbot/certbot/_internal/tests/plugins/common_test.py b/certbot/certbot/_internal/tests/plugins/common_test.py index 4a0af7c5e29..8c5b260319b 100644 --- a/certbot/certbot/_internal/tests/plugins/common_test.py +++ b/certbot/certbot/_internal/tests/plugins/common_test.py @@ -9,6 +9,7 @@ import pytest from acme import challenges +from acme import messages from certbot import achallenges from certbot import crypto_util from certbot import errors @@ -20,7 +21,7 @@ AUTH_KEY = jose.JWKRSA.load(test_util.load_vector("rsa512_key.pem")) ACHALL = achallenges.KeyAuthorizationAnnotatedChallenge( challb=acme_util.chall_to_challb(challenges.HTTP01(token=b'token1'), - "pending"), + messages.STATUS_PENDING), domain="encryption-example.demo", account_key=AUTH_KEY) @@ -51,7 +52,7 @@ def prepare(self) -> None: pass def more_info(self) -> str: - pass + return "info" @classmethod def add_parser_arguments(cls, add): diff --git a/certbot/certbot/_internal/tests/util_test.py b/certbot/certbot/_internal/tests/util_test.py index ac3b5ab888f..b4256176e14 100644 --- a/certbot/certbot/_internal/tests/util_test.py +++ b/certbot/certbot/_internal/tests/util_test.py @@ -247,13 +247,6 @@ def test_default_exists(self): fd3.close() -try: - file_type = file -except NameError: - import io - file_type = io.TextIOWrapper # type: ignore - - class UniqueLineageNameTest(test_util.TempDirTestCase): """Tests for certbot.util.unique_lineage_name.""" @@ -263,7 +256,7 @@ def _call(self, filename, mode=0o777): def test_basic(self): f, path = self._call("wow") - assert isinstance(f, file_type) + assert isinstance(f, io.TextIOWrapper) assert os.path.join(self.tempdir, "wow.conf") == path f.close() @@ -272,7 +265,7 @@ def test_multiple(self): for _ in range(10): items.append(self._call("wow")) f, name = items[-1] - assert isinstance(f, file_type) + assert isinstance(f, io.TextIOWrapper) assert isinstance(name, str) assert "wow-0009.conf" in name for f, _ in items: diff --git a/mypy.ini b/mypy.ini index 6c01929d4ec..2c9ba518764 100644 --- a/mypy.ini +++ b/mypy.ini @@ -1,8 +1,14 @@ [mypy] -# Removing this exclude setting is being tracked by -# https://github.com/certbot/certbot/issues/7909. -exclude = .*/_internal/tests/ ignore_missing_imports = True warn_unused_ignores = True show_error_codes = True disallow_untyped_defs = True + +# Using stricter settings here is being tracked by +# https://github.com/certbot/certbot/issues/9647. +[mypy-*._internal.tests.*] +# By default, mypy prints notes without erroring about any type annotations it +# finds in untyped function bodies when check_untyped_defs is false. Disabling +# this "error" code removes this visual noise. +disable_error_code = annotation-unchecked +disallow_untyped_defs = False
Fixes https://github.com/certbot/certbot/issues/7909 after opening https://github.com/certbot/certbot/issues/9647. This approach to getting a very small subset of mypy's tests passing on existing code is suggested in the mypy docs at https://mypy.readthedocs.io/en/stable/existing_code.html#start-small.
https://api.github.com/repos/certbot/certbot/pulls/9648
2023-03-30T17:45:29Z
2023-03-30T22:20:44Z
2023-03-30T22:20:44Z
2023-03-30T22:20:45Z
2,251
certbot/certbot
826
[3.7] bpo-33769: start_tls: Fix error message; cancel callbacks on error (GH-7403)
diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py index 34cc6252e77cb6..68a1ebe623b871 100644 --- a/Lib/asyncio/base_events.py +++ b/Lib/asyncio/base_events.py @@ -1097,7 +1097,7 @@ async def start_tls(self, transport, protocol, sslcontext, *, if not getattr(transport, '_start_tls_compatible', False): raise TypeError( - f'transport {self!r} is not supported by start_tls()') + f'transport {transport!r} is not supported by start_tls()') waiter = self.create_future() ssl_protocol = sslproto.SSLProtocol( @@ -1111,13 +1111,15 @@ async def start_tls(self, transport, protocol, sslcontext, *, transport.pause_reading() transport.set_protocol(ssl_protocol) - self.call_soon(ssl_protocol.connection_made, transport) - self.call_soon(transport.resume_reading) + conmade_cb = self.call_soon(ssl_protocol.connection_made, transport) + resume_cb = self.call_soon(transport.resume_reading) try: await waiter except Exception: transport.close() + conmade_cb.cancel() + resume_cb.cancel() raise return ssl_protocol._app_transport diff --git a/Lib/asyncio/sslproto.py b/Lib/asyncio/sslproto.py index 8515ec5eebd32e..fac2ae74e808b8 100644 --- a/Lib/asyncio/sslproto.py +++ b/Lib/asyncio/sslproto.py @@ -399,6 +399,7 @@ def abort(self): called with None as its argument. """ self._ssl_protocol._abort() + self._closed = True class SSLProtocol(protocols.Protocol): diff --git a/Misc/NEWS.d/next/Library/2018-06-04-13-46-39.bpo-33769.D_pxYz.rst b/Misc/NEWS.d/next/Library/2018-06-04-13-46-39.bpo-33769.D_pxYz.rst new file mode 100644 index 00000000000000..9a124fafc6d24f --- /dev/null +++ b/Misc/NEWS.d/next/Library/2018-06-04-13-46-39.bpo-33769.D_pxYz.rst @@ -0,0 +1,2 @@ +asyncio/start_tls: Fix error message; cancel callbacks in case of an +unhandled error; mark SSLTransport as closed if it is aborted.
In addition to that, mark SSLTransport as "closed" in its "abort()" method to prevent bogus warnings. (cherry picked from commit 415bc46a78e785f357c8960ae70f18a6b6cccbb6) Co-authored-by: Yury Selivanov <yury@magic.io> <!-- issue-number: bpo-33769 --> https://bugs.python.org/issue33769 <!-- /issue-number -->
https://api.github.com/repos/python/cpython/pulls/7428
2018-06-05T13:03:51Z
2018-06-05T14:18:21Z
2018-06-05T14:18:21Z
2018-06-05T14:18:26Z
604
python/cpython
4,195
typo fix "func" to "function" in testing.py
diff --git a/sklearn/utils/testing.py b/sklearn/utils/testing.py index 47b37576348c0..63f2d8094b84d 100644 --- a/sklearn/utils/testing.py +++ b/sklearn/utils/testing.py @@ -406,12 +406,12 @@ def assert_raise_message(exceptions, message, function, *args, **kwargs): exceptions : exception or tuple of exception Name of the estimator - func : callable + function : callable Calable object to raise error - *args : the positional arguments to `func`. + *args : the positional arguments to `function`. - **kw : the keyword arguments to `func` + **kw : the keyword arguments to `function` """ try: function(*args, **kwargs)
Hi, I adjusted the docstring of the `assert_raise_message` function from "func" to "function" to match the function parameter (`def assert_raise_message(exceptions, message, function, *args, **kwargs)`). However, I am wondering if we shouldn't change it to `func` instead: `def assert_raise_message(exceptions, message, func, *args, **kwargs)` to be consistent with the other functions in testing.py. I could take care of the current tests and adjust them where appropriate. What do you think?
https://api.github.com/repos/scikit-learn/scikit-learn/pulls/6700
2016-04-22T21:02:24Z
2016-07-01T19:50:40Z
2016-07-01T19:50:40Z
2016-07-01T19:50:44Z
184
scikit-learn/scikit-learn
46,048
Manual: market structure: lot
diff --git a/wiki/Manual.md b/wiki/Manual.md index bec86bc93cee..ad93fd72447a 100644 --- a/wiki/Manual.md +++ b/wiki/Manual.md @@ -430,6 +430,7 @@ In terms of the ccxt library, every exchange offers multiple markets within itse 'amount': 8, // integer 'cost': 8, // integer }, + 'lot': 0.00000001, // order amount should be a multiple of lot 'limits': { // value limits when placing orders on this market 'amount': { 'min': 0.01, // order amount should be > min @@ -451,6 +452,7 @@ Each market is an associative array (aka dictionary) with the following keys: - `active`. A boolean indicating whether or not trading this market is currently possible. - `info`. An associative array of non-common market properties, including fees, rates, limits and other general market information. The internal info array is different for each particular market, its contents depend on the exchange. - `precision`. The amounts of decimal digits accepted in order values by exchanges upon order placement for price, amount and cost. +- `lot`. Order amount should be a multiple of lot. In case of fixed digit precision it equals to `10 ** -precision['amount']`. - `limits`. The minimums and maximums for prices, amounts (volumes) and costs (where cost = price * amount). *The `precision` and `limits` params are currently under heavy development, some of these fields may be missing here and there until the unification process is complete. This does not influence most of the orders but can be significant in extreme cases of very large or very small orders. The `active` flag is not yet supported and/or implemented by all markets.*
https://api.github.com/repos/ccxt/ccxt/pulls/1676
2018-01-31T20:12:21Z
2018-02-01T08:06:35Z
2018-02-01T08:06:35Z
2018-02-01T08:08:46Z
402
ccxt/ccxt
13,516
No pylint in py2.6 tox
diff --git a/docs/project.rst b/docs/project.rst index fa59c1af3eb..5da350cfb71 100644 --- a/docs/project.rst +++ b/docs/project.rst @@ -69,7 +69,6 @@ In order to generate the Sphinx documentation, run the following commands. :: - ./venv/bin/python setup.py docs cd docs make clean html SPHINXBUILD=../venv/bin/sphinx-build diff --git a/setup.cfg b/setup.cfg index 6772479552d..3369f299366 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,8 +2,7 @@ zip_ok = false [aliases] -dev = develop easy_install letsencrypt[testing] -docs = develop easy_install letsencrypt[docs] +dev = develop easy_install letsencrypt[testing,dev] [nosetests] nocapture=1 diff --git a/setup.py b/setup.py index 5501c7dd668..27d1e3fb74b 100755 --- a/setup.py +++ b/setup.py @@ -36,7 +36,8 @@ def read_file(filename, encoding='utf8'): 'M2Crypto', ] -docs_extras = [ +dev_extras = [ + 'pylint>=1.4.0', # upstream #248 'repoze.sphinx.autointerface', 'Sphinx', ] @@ -45,7 +46,6 @@ def read_file(filename, encoding='utf8'): 'coverage', 'nose', 'nosexcover', - 'pylint>=1.4.0', # upstream #248 'tox', ] @@ -69,7 +69,7 @@ def read_file(filename, encoding='utf8'): tests_require=install_requires, test_suite='letsencrypt', extras_require={ - 'docs': docs_extras, + 'dev': dev_extras, 'testing': testing_extras, }, entry_points={ diff --git a/tox.ini b/tox.ini index 4049c78a028..cd81c012526 100644 --- a/tox.ini +++ b/tox.ini @@ -7,13 +7,13 @@ envlist = py26,py27,cover,lint [testenv] commands = - python setup.py dev + python setup.py develop easy_install letsencrypt[testing] python setup.py test -q # -q does not suppress errors [testenv:cover] basepython = python2.7 commands = - python setup.py dev + python setup.py develop easy_install letsencrypt[testing] python setup.py nosetests --with-coverage --cover-min-percentage=66 [testenv:lint]
This might help with some Travis builds failing...
https://api.github.com/repos/certbot/certbot/pulls/234
2015-02-06T23:42:59Z
2015-02-07T22:09:38Z
2015-02-07T22:09:38Z
2016-05-06T19:21:27Z
620
certbot/certbot
2,057
Make the onnx submodule init lazy
diff --git a/src/transformers/onnx/__init__.py b/src/transformers/onnx/__init__.py index a80567e202b0e..7419e8c21b516 100644 --- a/src/transformers/onnx/__init__.py +++ b/src/transformers/onnx/__init__.py @@ -13,6 +13,24 @@ # See the License for the specific language governing permissions and # limitations under the License. -from .config import EXTERNAL_DATA_FORMAT_SIZE_LIMIT, OnnxConfig, OnnxConfigWithPast, PatchingSpec -from .convert import export, validate_model_outputs -from .utils import ParameterFormat, compute_serialized_parameters_size +from typing import TYPE_CHECKING + +from ..file_utils import _LazyModule + + +_import_structure = { + "config": ["EXTERNAL_DATA_FORMAT_SIZE_LIMIT", "OnnxConfig", "OnnxConfigWithPast", "PatchingSpec"], + "convert": ["export", "validate_model_outputs"], + "utils": ["ParameterFormat", "compute_serialized_parameters_size"], +} + + +if TYPE_CHECKING: + from .config import EXTERNAL_DATA_FORMAT_SIZE_LIMIT, OnnxConfig, OnnxConfigWithPast, PatchingSpec + from .convert import export, validate_model_outputs + from .utils import ParameterFormat, compute_serialized_parameters_size + +else: + import sys + + sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure)
# What does this PR do? The `onnx` submodule does not use a lazy init like the other ones. This results in importing a given model/tokenzier/config, which imports OnnxConfig, initializing the onnx submodule completely, and in turn importing `PreTrainedModel` and `TFPreTrainedModel` (so PyTorch and TensorFlow). This PR solves this issue by making the init lazy like all the others.
https://api.github.com/repos/huggingface/transformers/pulls/14855
2021-12-20T20:48:48Z
2021-12-21T08:11:26Z
2021-12-21T08:11:26Z
2021-12-21T08:11:26Z
335
huggingface/transformers
12,197
llama.cpp: fix ban_eos_token
diff --git a/modules/llamacpp_model.py b/modules/llamacpp_model.py index 44bb61f72a..951267ef51 100644 --- a/modules/llamacpp_model.py +++ b/modules/llamacpp_model.py @@ -120,7 +120,7 @@ def generate(self, prompt, state, callback=None): logit_processors = LogitsProcessorList() if state['ban_eos_token']: - logit_processors.append(partial(ban_eos_logits_processor, self.model.tokenizer.eos_token_id)) + logit_processors.append(partial(ban_eos_logits_processor, self.model.token_eos())) if state['custom_token_bans']: to_ban = [int(x) for x in state['custom_token_bans'].split(',')]
- [x] I have read the [Contributing guidelines](https://github.com/oobabooga/text-generation-webui/wiki/Contributing-guidelines). *** Fixes this exception when attempting to ban the EOS token with llama.cpp: ``` Traceback (most recent call last): File "/home/cebtenzzre/src/forks/text-generation-webui/modules/text_generation.py", line 381, in generate_reply_custom reply = shared.model.generate(question, state) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/home/cebtenzzre/src/forks/text-generation-webui/modules/llamacpp_model.py", line 135, in generate logit_processors.append(partial(ban_eos_logits_processor, self.model.tokenizer.eos_token_id)) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ AttributeError: 'function' object has no attribute 'eos_token_id' ```
https://api.github.com/repos/oobabooga/text-generation-webui/pulls/3987
2023-09-18T05:01:14Z
2023-09-18T15:15:02Z
2023-09-18T15:15:02Z
2023-09-18T15:15:02Z
176
oobabooga/text-generation-webui
26,464
Remove urllib3-specific kwargs from non-urllib3 branch.
diff --git a/requests/models.py b/requests/models.py index 6600a91a0f..3672b3774d 100644 --- a/requests/models.py +++ b/requests/models.py @@ -544,7 +544,7 @@ def generate(): except AttributeError: # Standard file-like object. while 1: - chunk = self.raw.read(chunk_size, decode_content=True) + chunk = self.raw.read(chunk_size) if not chunk: break yield chunk
As pointed out by @lukesneeringer in #1395, the kwargs in the non-urllib3 branch of the `iter_content` code are urllib3 specific. This PR will finish the split I originally made in #1425. /cc @sigmavirus24 and @lukesneeringer who have both done work that has been rendered unnecessary (though not useless) by this change.
https://api.github.com/repos/psf/requests/pulls/1441
2013-07-04T09:41:43Z
2013-07-15T13:22:41Z
2013-07-15T13:22:41Z
2021-09-08T23:05:14Z
114
psf/requests
32,602
Fixed #31046 -- Allowed RelatedManager.add()/create()/set() to accept callable values in through_defaults.
diff --git a/django/db/models/fields/related_descriptors.py b/django/db/models/fields/related_descriptors.py index ec603bc8afde0..a9445d5d10768 100644 --- a/django/db/models/fields/related_descriptors.py +++ b/django/db/models/fields/related_descriptors.py @@ -68,6 +68,7 @@ class Child(Model): from django.db.models import Q, signals from django.db.models.query import QuerySet from django.db.models.query_utils import DeferredAttribute +from django.db.models.utils import resolve_callables from django.utils.functional import cached_property @@ -1113,49 +1114,48 @@ def _add_items(self, source_field_name, target_field_name, *objs, through_defaul # source_field_name: the PK fieldname in join table for the source object # target_field_name: the PK fieldname in join table for the target object # *objs - objects to add. Either object instances, or primary keys of object instances. - through_defaults = through_defaults or {} - - # If there aren't any objects, there is nothing to do. - if objs: - target_ids = self._get_target_ids(target_field_name, objs) - db = router.db_for_write(self.through, instance=self.instance) - can_ignore_conflicts, must_send_signals, can_fast_add = self._get_add_plan(db, source_field_name) - if can_fast_add: - self.through._default_manager.using(db).bulk_create([ - self.through(**{ - '%s_id' % source_field_name: self.related_val[0], - '%s_id' % target_field_name: target_id, - }) - for target_id in target_ids - ], ignore_conflicts=True) - return + if not objs: + return - missing_target_ids = self._get_missing_target_ids( - source_field_name, target_field_name, db, target_ids - ) - with transaction.atomic(using=db, savepoint=False): - if must_send_signals: - signals.m2m_changed.send( - sender=self.through, action='pre_add', - instance=self.instance, reverse=self.reverse, - model=self.model, pk_set=missing_target_ids, using=db, - ) + through_defaults = dict(resolve_callables(through_defaults or {})) + target_ids = self._get_target_ids(target_field_name, objs) + db = router.db_for_write(self.through, instance=self.instance) + can_ignore_conflicts, must_send_signals, can_fast_add = self._get_add_plan(db, source_field_name) + if can_fast_add: + self.through._default_manager.using(db).bulk_create([ + self.through(**{ + '%s_id' % source_field_name: self.related_val[0], + '%s_id' % target_field_name: target_id, + }) + for target_id in target_ids + ], ignore_conflicts=True) + return - # Add the ones that aren't there already. - self.through._default_manager.using(db).bulk_create([ - self.through(**through_defaults, **{ - '%s_id' % source_field_name: self.related_val[0], - '%s_id' % target_field_name: target_id, - }) - for target_id in missing_target_ids - ], ignore_conflicts=can_ignore_conflicts) - - if must_send_signals: - signals.m2m_changed.send( - sender=self.through, action='post_add', - instance=self.instance, reverse=self.reverse, - model=self.model, pk_set=missing_target_ids, using=db, - ) + missing_target_ids = self._get_missing_target_ids( + source_field_name, target_field_name, db, target_ids + ) + with transaction.atomic(using=db, savepoint=False): + if must_send_signals: + signals.m2m_changed.send( + sender=self.through, action='pre_add', + instance=self.instance, reverse=self.reverse, + model=self.model, pk_set=missing_target_ids, using=db, + ) + # Add the ones that aren't there already. + self.through._default_manager.using(db).bulk_create([ + self.through(**through_defaults, **{ + '%s_id' % source_field_name: self.related_val[0], + '%s_id' % target_field_name: target_id, + }) + for target_id in missing_target_ids + ], ignore_conflicts=can_ignore_conflicts) + + if must_send_signals: + signals.m2m_changed.send( + sender=self.through, action='post_add', + instance=self.instance, reverse=self.reverse, + model=self.model, pk_set=missing_target_ids, using=db, + ) def _remove_items(self, source_field_name, target_field_name, *objs): # source_field_name: the PK colname in join table for the source object diff --git a/django/db/models/query.py b/django/db/models/query.py index 08a6f421d1c0b..73991df7bbb92 100644 --- a/django/db/models/query.py +++ b/django/db/models/query.py @@ -23,6 +23,7 @@ from django.db.models.functions import Cast, Trunc from django.db.models.query_utils import FilteredRelation, Q from django.db.models.sql.constants import CURSOR, GET_ITERATOR_CHUNK_SIZE +from django.db.models.utils import resolve_callables from django.db.utils import NotSupportedError from django.utils import timezone from django.utils.functional import cached_property, partition @@ -591,8 +592,8 @@ def update_or_create(self, defaults=None, **kwargs): obj, created = self._create_object_from_params(kwargs, params, lock=True) if created: return obj, created - for k, v in defaults.items(): - setattr(obj, k, v() if callable(v) else v) + for k, v in resolve_callables(defaults): + setattr(obj, k, v) obj.save(using=self.db) return obj, False @@ -603,7 +604,7 @@ def _create_object_from_params(self, lookup, params, lock=False): """ try: with transaction.atomic(using=self.db): - params = {k: v() if callable(v) else v for k, v in params.items()} + params = dict(resolve_callables(params)) obj = self.create(**params) return obj, True except IntegrityError as e: diff --git a/django/db/models/utils.py b/django/db/models/utils.py index b966029d638db..989667dc8c8cd 100644 --- a/django/db/models/utils.py +++ b/django/db/models/utils.py @@ -19,3 +19,12 @@ def make_model_tuple(model): "Invalid model reference '%s'. String model references " "must be of the form 'app_label.ModelName'." % model ) + + +def resolve_callables(mapping): + """ + Generate key/value pairs for the given mapping where the values are + evaluated if they're callable. + """ + for k, v in mapping.items(): + yield k, v() if callable(v) else v diff --git a/docs/ref/models/relations.txt b/docs/ref/models/relations.txt index d50e3891dc9ca..2560a8e81cb38 100644 --- a/docs/ref/models/relations.txt +++ b/docs/ref/models/relations.txt @@ -71,7 +71,13 @@ Related objects reference Use the ``through_defaults`` argument to specify values for the new :ref:`intermediate model <intermediary-manytomany>` instance(s), if - needed. + needed. You can use callables as values in the ``through_defaults`` + dictionary and they will be evaluated once before creating any + intermediate instance(s). + + .. versionchanged:: 3.1 + + ``through_defaults`` values can now be callables. .. method:: create(through_defaults=None, **kwargs) @@ -105,7 +111,12 @@ Related objects reference Use the ``through_defaults`` argument to specify values for the new :ref:`intermediate model <intermediary-manytomany>` instance, if - needed. + needed. You can use callables as values in the ``through_defaults`` + dictionary. + + .. versionchanged:: 3.1 + + ``through_defaults`` values can now be callables. .. method:: remove(*objs, bulk=True) @@ -193,7 +204,13 @@ Related objects reference Use the ``through_defaults`` argument to specify values for the new :ref:`intermediate model <intermediary-manytomany>` instance(s), if - needed. + needed. You can use callables as values in the ``through_defaults`` + dictionary and they will be evaluated once before creating any + intermediate instance(s). + + .. versionchanged:: 3.1 + + ``through_defaults`` values can now be callables. .. note:: diff --git a/docs/releases/3.1.txt b/docs/releases/3.1.txt index dc16b95f79f25..b4179883ab163 100644 --- a/docs/releases/3.1.txt +++ b/docs/releases/3.1.txt @@ -209,6 +209,10 @@ Models * :attr:`.CheckConstraint.check` now supports boolean expressions. +* The :meth:`.RelatedManager.add`, :meth:`~.RelatedManager.create`, and + :meth:`~.RelatedManager.set` methods now accept callables as values in the + ``through_defaults`` argument. + Pagination ~~~~~~~~~~ diff --git a/tests/m2m_through/tests.py b/tests/m2m_through/tests.py index deb9015ba630a..dd40e9760c57f 100644 --- a/tests/m2m_through/tests.py +++ b/tests/m2m_through/tests.py @@ -62,6 +62,40 @@ def test_add_on_m2m_with_intermediate_model(self): self.assertSequenceEqual(self.rock.members.all(), [self.bob]) self.assertEqual(self.rock.membership_set.get().invite_reason, 'He is good.') + def test_add_on_m2m_with_intermediate_model_callable_through_default(self): + def invite_reason_callable(): + return 'They were good at %s' % datetime.now() + + self.rock.members.add( + self.bob, self.jane, + through_defaults={'invite_reason': invite_reason_callable}, + ) + self.assertSequenceEqual(self.rock.members.all(), [self.bob, self.jane]) + self.assertEqual( + self.rock.membership_set.filter( + invite_reason__startswith='They were good at ', + ).count(), + 2, + ) + # invite_reason_callable() is called once. + self.assertEqual( + self.bob.membership_set.get().invite_reason, + self.jane.membership_set.get().invite_reason, + ) + + def test_set_on_m2m_with_intermediate_model_callable_through_default(self): + self.rock.members.set( + [self.bob, self.jane], + through_defaults={'invite_reason': lambda: 'Why not?'}, + ) + self.assertSequenceEqual(self.rock.members.all(), [self.bob, self.jane]) + self.assertEqual( + self.rock.membership_set.filter( + invite_reason__startswith='Why not?', + ).count(), + 2, + ) + def test_add_on_m2m_with_intermediate_model_value_required(self): self.rock.nodefaultsnonulls.add(self.jim, through_defaults={'nodefaultnonull': 1}) self.assertEqual(self.rock.testnodefaultsornulls_set.get().nodefaultnonull, 1) @@ -75,6 +109,17 @@ def test_create_on_m2m_with_intermediate_model(self): self.assertSequenceEqual(self.rock.members.all(), [annie]) self.assertEqual(self.rock.membership_set.get().invite_reason, 'She was just awesome.') + def test_create_on_m2m_with_intermediate_model_callable_through_default(self): + annie = self.rock.members.create( + name='Annie', + through_defaults={'invite_reason': lambda: 'She was just awesome.'}, + ) + self.assertSequenceEqual(self.rock.members.all(), [annie]) + self.assertEqual( + self.rock.membership_set.get().invite_reason, + 'She was just awesome.', + ) + def test_create_on_m2m_with_intermediate_model_value_required(self): self.rock.nodefaultsnonulls.create(name='Test', through_defaults={'nodefaultnonull': 1}) self.assertEqual(self.rock.testnodefaultsornulls_set.get().nodefaultnonull, 1)
ticket-31046
https://api.github.com/repos/django/django/pulls/12161
2019-11-29T17:06:08Z
2019-12-03T19:07:46Z
2019-12-03T19:07:46Z
2019-12-03T20:24:06Z
2,888
django/django
51,195
ImportError: No module named 'requests.packages.urllib3.util'
diff --git a/setup.py b/setup.py index 879d94a636..c25b36805d 100755 --- a/setup.py +++ b/setup.py @@ -21,7 +21,8 @@ 'requests.packages.urllib3', 'requests.packages.urllib3.packages', 'requests.packages.urllib3.contrib', - 'requests.packages.urllib3.packages.ssl_match_hostname' + 'requests.packages.urllib3.util', + 'requests.packages.urllib3.packages.ssl_match_hostname', ] requires = []
I'm using Python 3.4. I get this error from `import requests`: ``` Traceback (most recent call last): File "devwiki.py", line 4, in <module> import requests File "/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/site-packages/requests-2.3.0-py3.4.egg/requests/__init__.py", line 58, in <module> from . import utils File "/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/site-packages/requests-2.3.0-py3.4.egg/requests/utils.py", line 25, in <module> from .compat import parse_http_list as _parse_list_header File "/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/site-packages/requests-2.3.0-py3.4.egg/requests/compat.py", line 7, in <module> from .packages import chardet File "/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/site-packages/requests-2.3.0-py3.4.egg/requests/packages/__init__.py", line 3, in <module> from . import urllib3 File "/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/site-packages/requests-2.3.0-py3.4.egg/requests/packages/urllib3/__init__.py", line 16, in <module> from .connectionpool import ( File "/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/site-packages/requests-2.3.0-py3.4.egg/requests/packages/urllib3/connectionpool.py", line 36, in <module> from .connection import ( File "/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/site-packages/requests-2.3.0-py3.4.egg/requests/packages/urllib3/connection.py", line 43, in <module> from .util import ( ImportError: No module named 'requests.packages.urllib3.util' ```
https://api.github.com/repos/psf/requests/pulls/2026
2014-04-29T17:52:26Z
2014-05-02T19:09:39Z
2014-05-02T19:09:39Z
2021-09-09T00:01:26Z
125
psf/requests
32,712
bump CI steps and dependencies
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 5436e363e8..701632e74d 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -13,7 +13,7 @@ jobs: - uses: actions/checkout@v2 with: persist-credentials: false - - uses: TrueBrain/actions-flake8@9a43ff1b2c7b96f3edffc48a49973ce3de116ba1 + - uses: TrueBrain/actions-flake8@c2deca24d388aa5aedd6478332aa9df4600b5eac # v2.1 # mirrored at https://github.com/mitmproxy/mitmproxy/settings/actions lint-local: if: github.event_name == 'push' @@ -171,8 +171,8 @@ jobs: with: python-version: '3.9' - run: | - wget -q https://github.com/gohugoio/hugo/releases/download/v0.83.1/hugo_extended_0.83.1_Linux-64bit.deb - echo "9487ea3b80f8ddd0ba600d42850b96b6a8b0bb9b41bc08cb285635ebbd41328d hugo_extended_0.83.1_Linux-64bit.deb" | sha256sum -c + wget -q https://github.com/gohugoio/hugo/releases/download/v0.88.1/hugo_extended_0.88.1_Linux-64bit.deb + echo "865ab9a930e0a9e4957e7dbfdf91c32847324f022e33271b5661d5717600bc2b hugo_extended_0.88.1_Linux-64bit.deb" | sha256sum -c sudo dpkg -i hugo*.deb - run: pip install -e .[dev] - run: ./docs/build.py @@ -210,8 +210,8 @@ jobs: with: name: binaries.linux path: release/dist - - uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480 - - uses: docker/setup-buildx-action@b1f1f719c7cd5364be7c82e366366da322d01f7c + - uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480 # v1.2.0 + - uses: docker/setup-buildx-action@b1f1f719c7cd5364be7c82e366366da322d01f7c # v1.6.0 - run: pip install -e .[dev] - run: python release/cibuild.py build - run: python release/cibuild.py upload
The codecov bash uploader is deprecated and will experience deliberate brown-outs: https://about.codecov.io/blog/introducing-codecovs-new-uploader/ This PR bump all external/3rd party CI actions to the most recent releases. I've already allowed the new commit hashes (in addition to the old/existing ones. After merging, I will clean up the old (then unused) ones.
https://api.github.com/repos/mitmproxy/mitmproxy/pulls/4875
2021-10-20T17:54:30Z
2021-10-21T20:10:23Z
2021-10-21T20:10:23Z
2021-10-21T20:10:26Z
683
mitmproxy/mitmproxy
28,241
Test on Python 3.10
diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index daae6668b0..75231110ae 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -14,8 +14,9 @@ jobs: runs-on: ubuntu-latest strategy: + fail-fast: false matrix: - python-version: [3.5, 3.6, 3.7, 3.8, 3.9, pypy3] + python-version: [3.5, 3.6, 3.7, 3.8, 3.9, "3.10", pypy3] steps: - uses: actions/checkout@v2 diff --git a/you-get.json b/you-get.json index e98e2e8a4a..a9ef1dd594 100644 --- a/you-get.json +++ b/you-get.json @@ -25,6 +25,8 @@ "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", "Topic :: Internet", "Topic :: Internet :: WWW/HTTP", "Topic :: Multimedia",
“3.10” must be quoted or yaml will treat it as 3.1.
https://api.github.com/repos/soimort/you-get/pulls/2932
2021-10-24T05:58:40Z
2021-11-02T20:41:54Z
2021-11-02T20:41:54Z
2022-09-17T16:51:57Z
322
soimort/you-get
21,405
[TikTok] Properly pass cookies to formats
diff --git a/yt_dlp/extractor/tiktok.py b/yt_dlp/extractor/tiktok.py index ac537643abd..c44fd49b0b9 100644 --- a/yt_dlp/extractor/tiktok.py +++ b/yt_dlp/extractor/tiktok.py @@ -161,6 +161,10 @@ def extract_addr(addr, add_meta={}): })) self._remove_duplicate_formats(formats) + auth_cookie = self._get_cookies(self._WEBPAGE_HOST).get('sid_tt') + if auth_cookie: + for f in formats: + self._set_cookie(f['url'], 'sid_tt', auth_cookie.value) self._sort_formats(formats, ('quality', 'codec', 'size', 'br')) thumbnails = []
## Please follow the guide below - You will be asked some questions, please read them **carefully** and answer honestly - Put an `x` into all the boxes [ ] relevant to your *pull request* (like that [x]) - Use *Preview* tab to see how your *pull request* will actually look like --- ### Before submitting a *pull request* make sure you have: - [x] At least skimmed through [contributing guidelines](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#developer-instructions) including [yt-dlp coding conventions](https://github.com/yt-dlp/yt-dlp/blob/master/CONTRIBUTING.md#yt-dlp-coding-conventions) - [x] [Searched](https://github.com/yt-dlp/yt-dlp/search?q=is%3Apr&type=Issues) the bugtracker for similar pull requests - [x] Checked the code with [flake8](https://pypi.python.org/pypi/flake8) ### In order to be accepted and merged into yt-dlp each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check one of the following options: - [x] I am the original author of this code and I am willing to release it under [Unlicense](http://unlicense.org/) - [ ] I am not the original author of this code but it is in public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence) ### What is the purpose of your *pull request*? - [x] Bug fix - [ ] Improvement - [ ] New extractor - [ ] New feature --- ### Description of your *pull request* and other information Explanation of your *pull request* in arbitrary form goes here. Please make sure the description explains the purpose and effect of your *pull request* and is worded well enough to be understood. Provide as much context and examples as possible. Closes https://github.com/yt-dlp/yt-dlp/issues/2166
https://api.github.com/repos/yt-dlp/yt-dlp/pulls/2171
2021-12-30T06:17:45Z
2021-12-31T20:10:46Z
2021-12-31T20:10:46Z
2021-12-31T20:10:46Z
182
yt-dlp/yt-dlp
8,112
Ensure --fullchain-path gets put under paths in --help all
diff --git a/certbot/cli.py b/certbot/cli.py index 5b8711da6c4..0faf4a7c6ba 100644 --- a/certbot/cli.py +++ b/certbot/cli.py @@ -1155,7 +1155,7 @@ def _paths_parser(helpful): default_cp = None if verb == "certonly": default_cp = flag_default("auth_chain_path") - add(["install", "paths"], "--fullchain-path", default=default_cp, type=os.path.abspath, + add(["paths", "install"], "--fullchain-path", default=default_cp, type=os.path.abspath, help="Accompanying path to a full certificate chain (cert plus chain).") add("paths", "--chain-path", default=default_cp, type=os.path.abspath, help="Accompanying path to a certificate chain.")
Addresses #4331
https://api.github.com/repos/certbot/certbot/pulls/4420
2017-03-27T19:21:24Z
2017-03-28T21:08:57Z
2017-03-28T21:08:57Z
2022-07-19T18:06:21Z
196
certbot/certbot
3,676
Add WordPress.org
diff --git a/data.json b/data.json index 816996fa5..d4b5852fa 100644 --- a/data.json +++ b/data.json @@ -1270,6 +1270,14 @@ "username_claimed": "blue", "username_unclaimed": "noonewouldeverusethis7" }, + "WordPressOrg": { + "errorType": "response_url", + "errorUrl": "https://wordpress.org", + "url": "https://profiles.wordpress.org/{}/", + "urlMain": "https://wordpress.org/", + "username_claimed": "blue", + "username_unclaimed": "noonewouldeverusethis7" + }, "YouNow": { "errorMsg": "No users found", "errorType": "message", @@ -1373,4 +1381,4 @@ "username_claimed": "blue", "username_unclaimed": "noonewouldeverusethis7" } -} \ No newline at end of file +}
This is different from dot com version which is for blogs.
https://api.github.com/repos/sherlock-project/sherlock/pulls/275
2019-07-22T22:01:24Z
2019-07-23T07:00:29Z
2019-07-23T07:00:29Z
2019-07-23T08:43:00Z
239
sherlock-project/sherlock
36,530
Revert "This is just for my practice :))))🕺"
diff --git a/VoiceRepeater/readme.md b/VoiceRepeater/readme.md index 201884f8b0..725aa607cc 100644 --- a/VoiceRepeater/readme.md +++ b/VoiceRepeater/readme.md @@ -9,7 +9,3 @@ ### And plays it! ### Requirements: Python, SpeechRecognition and playsound - - - -I am just trying to contribute please let me do this.....thank you :)
https://api.github.com/repos/geekcomputers/Python/pulls/1958
2023-08-08T11:18:35Z
2023-08-08T11:19:57Z
2023-08-08T11:19:57Z
2023-08-08T12:26:13Z
103
geekcomputers/Python
31,747
DOC make documentation consistent regarding types in _encoders.py
diff --git a/sklearn/preprocessing/_encoders.py b/sklearn/preprocessing/_encoders.py index d3f557d2993cb..65e86e512e381 100644 --- a/sklearn/preprocessing/_encoders.py +++ b/sklearn/preprocessing/_encoders.py @@ -406,7 +406,7 @@ def fit(self, X, y=None): Parameters ---------- - X : array-like, shape [n_samples, n_features] + X : array-like of shape (n_samples, n_features) The data to determine the categories of each feature. y : None @@ -431,7 +431,7 @@ def fit_transform(self, X, y=None): Parameters ---------- - X : array-like, shape [n_samples, n_features] + X : array-like of shape (n_samples, n_features) The data to encode. y : None @@ -440,8 +440,10 @@ def fit_transform(self, X, y=None): Returns ------- - X_out : sparse matrix if sparse=True else a 2-d array - Transformed input. + X_out : {ndarray, sparse matrix} of shape \ + (n_samples, n_encoded_features) + Transformed input. If `sparse=True`, a sparse matrix will be + returned. """ self._validate_keywords() return super().fit_transform(X, y) @@ -452,13 +454,15 @@ def transform(self, X): Parameters ---------- - X : array-like, shape [n_samples, n_features] + X : array-like of shape (n_samples, n_features) The data to encode. Returns ------- - X_out : sparse matrix if sparse=True else a 2-d array - Transformed input. + X_out : {ndarray, sparse matrix} of shape \ + (n_samples, n_encoded_features) + Transformed input. If `sparse=True`, a sparse matrix will be + returned. """ check_is_fitted(self) # validation of X happens in _check_X called by _transform @@ -522,12 +526,13 @@ def inverse_transform(self, X): Parameters ---------- - X : array-like or sparse matrix, shape [n_samples, n_encoded_features] + X : {array-like, sparse matrix} of shape \ + (n_samples, n_encoded_features) The transformed data. Returns ------- - X_tr : array-like, shape [n_samples, n_features] + X_tr : ndarray of shape (n_samples, n_features) Inverse transformed array. """ check_is_fitted(self) @@ -745,7 +750,7 @@ def fit(self, X, y=None): Parameters ---------- - X : array-like, shape [n_samples, n_features] + X : array-like of shape (n_samples, n_features) The data to determine the categories of each feature. y : None @@ -814,12 +819,12 @@ def transform(self, X): Parameters ---------- - X : array-like, shape [n_samples, n_features] + X : array-like of shape (n_samples, n_features) The data to encode. Returns ------- - X_out : sparse matrix or a 2-d array + X_out : ndarray of shape (n_samples, n_features) Transformed input. """ X_int, X_mask = self._transform(X, handle_unknown=self.handle_unknown, @@ -841,12 +846,12 @@ def inverse_transform(self, X): Parameters ---------- - X : array-like or sparse matrix, shape [n_samples, n_encoded_features] + X : {array-like, sparse matrix} of shape (n_samples, n_features) The transformed data. Returns ------- - X_tr : array-like, shape [n_samples, n_features] + X_tr : ndarray of shape (n_samples, n_features) Inverse transformed array. """ check_is_fitted(self)
Some nitpicks regarding the documentation style
https://api.github.com/repos/scikit-learn/scikit-learn/pulls/19876
2021-04-13T10:01:15Z
2021-04-13T10:27:14Z
2021-04-13T10:27:14Z
2021-04-13T10:27:14Z
912
scikit-learn/scikit-learn
46,522
[chore] fix rawsec url
diff --git a/Insecure Deserialization/PHP.md b/Insecure Deserialization/PHP.md index 57c513b187..6e6aabe7b5 100644 --- a/Insecure Deserialization/PHP.md +++ b/Insecure Deserialization/PHP.md @@ -215,7 +215,6 @@ A valid PHAR includes four elements: $phar->stopBuffering(); ``` - ## Real world examples * [Vanilla Forums ImportController index file_exists Unserialize Remote Code Execution Vulnerability - Steven Seeley](https://hackerone.com/reports/410237) @@ -231,9 +230,9 @@ A valid PHAR includes four elements: * [PHP Generic Gadget - ambionics security](https://www.ambionics.io/blog/php-generic-gadget-chains) * [POC2009 Shocking News in PHP Exploitation](https://www.owasp.org/images/f/f6/POC2009-ShockingNewsInPHPExploitation.pdf) * [PHP Internals Book - Serialization](http://www.phpinternalsbook.com/classes_objects/serialization.html) -* [TSULOTT Web challenge write-up from MeePwn CTF 1st 2017 by Rawsec](https://rawsec.ml/en/meepwn-2017-write-ups/#TSULOTT-Web) +* [TSULOTT Web challenge write-up from MeePwn CTF 1st 2017 by Rawsec](https://blog.raw.pm/en/meepwn-2017-write-ups/#TSULOTT-Web) * [CTF writeup: PHP object injection in kaspersky CTF](https://medium.com/@jaimin_gohel/ctf-writeup-php-object-injection-in-kaspersky-ctf-28a68805610d) -* [Jack The Ripper Web challeneg Write-up from ECSC 2019 Quals Team France by Rawsec](https://rawsec.ml/en/ecsc-2019-quals-write-ups/#164-Jack-The-Ripper-Web) +* [Jack The Ripper Web challeneg Write-up from ECSC 2019 Quals Team France by Rawsec](https://blog.raw.pm/en/ecsc-2019-quals-write-ups/#164-Jack-The-Ripper-Web) * [Rusty Joomla RCE Unserialize overflow - Alessandro Groppo - October 3, 2019](https://blog.hacktivesecurity.com/index.php/2019/10/03/rusty-joomla-rce/) * [PHP Pop Chains - Achieving RCE with POP chain exploits. - Vickie Li - September 3, 2020](https://vkili.github.io/blog/insecure%20deserialization/pop-chains/) * [How to exploit the PHAR Deserialization Vulnerability - Alexandru Postolache - May 29, 2020](https://pentest-tools.com/blog/exploit-phar-deserialization-vulnerability/)
fixing domain since the old one got compromised
https://api.github.com/repos/swisskyrepo/PayloadsAllTheThings/pulls/615
2023-01-11T22:20:08Z
2023-01-12T08:57:27Z
2023-01-12T08:57:27Z
2023-01-12T13:26:03Z
633
swisskyrepo/PayloadsAllTheThings
8,535
Fix EyeEm/CreativeMarket/EVE Online False Positives
diff --git a/data.json b/data.json index 84412975e..94e7707f6 100644 --- a/data.json +++ b/data.json @@ -153,7 +153,8 @@ "urlMain": "https://coroflot.com/" }, "CreativeMarket": { - "errorType": "status_code", + "errorType": "response_url", + "errorUrl": "https://www.creativemarket.com/", "url": "https://creativemarket.com/{}", "urlMain": "https://creativemarket.com/" }, @@ -197,9 +198,9 @@ "urlMain": "https://dribbble.com/" }, "EVE Online": { - "errorMsg": "No results found with your search...", - "errorType": "message", - "url": "https://evewho.com/search/{}", + "errorType": "response_url", + "errorUrl": "https://eveonline.com", + "url": "https://evewho.com/pilot/{}/", "urlMain": "https://eveonline.com" }, "Ebay": { @@ -220,7 +221,8 @@ "urlMain": "https://www.etsy.com/" }, "EyeEm": { - "errorType": "status_code", + "errorType": "response_url", + "errorUrl": "https://www.eyeem.com/", "url": "https://www.eyeem.com/u/{}", "urlMain": "https://www.eyeem.com/" }, diff --git a/tests/all.py b/tests/all.py index 3d06281de..c486c6085 100644 --- a/tests/all.py +++ b/tests/all.py @@ -111,7 +111,8 @@ def test_coverage_false_via_response_url(self): self.username_check(['noonewouldeverusethis7'], ["Pinterest", "iMGSRC.RU", "Pastebin", - "WordPress", "devRant", "ImageShack", "MeetMe" + "WordPress", "devRant", "ImageShack", "MeetMe", + "EyeEm", "CreativeMarket", "EVE Online" ], exist_check=False ) @@ -134,7 +135,8 @@ def test_coverage_true_via_response_url(self): self.username_check(['blue'], ["Pinterest", "iMGSRC.RU", "Pastebin", - "WordPress", "devRant", "ImageShack", "MeetMe" + "WordPress", "devRant", "ImageShack", "MeetMe", + "EyeEm", "CreativeMarket", "EVE Online" ], exist_check=True )
This is the fix for #140. Change EyeEm/CreativeMarket/EVE Online detection method to use the newly refurbished "response_url" detection method.
https://api.github.com/repos/sherlock-project/sherlock/pulls/144
2019-01-24T01:19:09Z
2019-01-24T01:22:54Z
2019-01-24T01:22:54Z
2019-02-10T16:40:55Z
620
sherlock-project/sherlock
36,310
Python general purpose ML library added
diff --git a/README.md b/README.md index 7566d68f..f3f41c47 100644 --- a/README.md +++ b/README.md @@ -767,6 +767,7 @@ on MNIST digits[DEEP LEARNING] <a name="python-general-purpose"></a> #### General-Purpose Machine Learning +* [CNTK](https://github.com/Microsoft/CNTK) - Microsoft Cognitive Toolkit (CNTK), an open source deep-learning toolkit. Documentation can be found [here](https://docs.microsoft.com/cognitive-toolkit/). * [auto_ml](https://github.com/ClimbsRocks/auto_ml) - Automated machine learning for production and analytics. Lets you focus on the fun parts of ML, while outputting production-ready code, and detailed analytics of your dataset and results. Includes support for NLP, XGBoost, LightGBM, and soon, deep learning. * [machine learning](https://github.com/jeff1evesque/machine-learning) - automated build consisting of a [web-interface](https://github.com/jeff1evesque/machine-learning#web-interface), and set of [programmatic-interface](https://github.com/jeff1evesque/machine-learning#programmatic-interface) API, for support vector machines. Corresponding dataset(s) are stored into a SQL database, then generated model(s) used for prediction(s), are stored into a NoSQL datastore. * [XGBoost](https://github.com/dmlc/xgboost) - Python bindings for eXtreme Gradient Boosting (Tree) Library
Added **Microsoft's CNTK** library (a Deep Learning toolkit) under Python.
https://api.github.com/repos/josephmisiti/awesome-machine-learning/pulls/438
2017-10-16T03:28:16Z
2017-10-22T10:09:39Z
2017-10-22T10:09:39Z
2017-10-22T10:09:39Z
349
josephmisiti/awesome-machine-learning
51,983
Add tab and carriage return to invalid filename chars
diff --git a/modules/images.py b/modules/images.py index 38aa933d6e5..ba3c43a4509 100644 --- a/modules/images.py +++ b/modules/images.py @@ -318,7 +318,7 @@ def resize(im, w, h): return res -invalid_filename_chars = '<>:"/\\|?*\n' +invalid_filename_chars = '<>:"/\\|?*\n\r\t' invalid_filename_prefix = ' ' invalid_filename_postfix = ' .' re_nonletters = re.compile(r'[\s' + string.punctuation + ']+')
Closes #4685 ## Checklist: - [x] I have read [contributing wiki page](https://github.com/AUTOMATIC1111/stable-diffusion-webui/wiki/Contributing) - [x] I have performed a self-review of my own code - [x] My code follows the [style guidelines](https://github.com/AUTOMATIC1111/stable-diffusion-webui/wiki/Contributing#code-style) - [x] My code passes [tests](https://github.com/AUTOMATIC1111/stable-diffusion-webui/wiki/Tests)
https://api.github.com/repos/AUTOMATIC1111/stable-diffusion-webui/pulls/12327
2023-08-05T03:00:30Z
2023-08-05T04:47:07Z
2023-08-05T04:47:07Z
2024-03-04T23:28:10Z
133
AUTOMATIC1111/stable-diffusion-webui
40,054
Fix broken anchors
diff --git a/CppCoreGuidelines.md b/CppCoreGuidelines.md index 2eeb06c42..fd4c4cbad 100644 --- a/CppCoreGuidelines.md +++ b/CppCoreGuidelines.md @@ -42,7 +42,7 @@ You can [read an explanation of the scope and structure of this Guide](#S-abstra * [T: Templates and generic programming](#S-templates) * [CPL: C-style programming](#S-cpl) * [SF: Source files](#S-source) -* [SL: The Standard Library](#S-stdlib) +* [SL: The Standard Library](#sl-the-standard-library) Supporting sections: @@ -50,7 +50,7 @@ Supporting sections: * [NR: Non-Rules and myths](#S-not) * [RF: References](#S-references) * [Pro: Profiles](#S-profile) -* [GSL: Guidelines support library](#S-gsl) +* [GSL: Guidelines support library](#gsl-guidelines-support-library) * [NL: Naming and layout suggestions](#S-naming) * [FAQ: Answers to frequently asked questions](#S-faq) * [Appendix A: Libraries](#S-libraries) @@ -254,7 +254,7 @@ Take the time to understand the implications of a guideline rule on your program These guidelines are designed according to the "subset of superset" principle ([Stroustrup05](#Stroustrup05)). They do not simply define a subset of C++ to be used (for reliability, safety, performance, or whatever). -Instead, they strongly recommend the use of a few simple "extensions" ([library components](#S-gsl)) +Instead, they strongly recommend the use of a few simple "extensions" ([library components](#gsl-guidelines-support-library)) that make the use of the most error-prone features of C++ redundant, so that they can be banned (in our set of rules). The rules emphasize static type safety and resource safety. @@ -435,7 +435,7 @@ Recommended information sources can be found in [the references](#S-references). * [T: Templates and generic programming](#S-templates) * [CPL: C-style programming](#S-cpl) * [SF: Source files](#S-source) -* [SL: The Standard Library](#S-stdlib) +* [SL: The Standard Library](#sl-the-standard-library) Supporting sections: @@ -443,7 +443,7 @@ Supporting sections: * [NR: Non-Rules and myths](#S-not) * [RF: References](#S-references) * [Pro: Profiles](#S-profile) -* [GSL: Guidelines support library](#S-gsl) +* [GSL: Guidelines support library](#gsl-guidelines-support-library) * [NL: Naming and layout suggestions](#S-naming) * [FAQ: Answers to frequently asked questions](#S-faq) * [Appendix A: Libraries](#S-libraries) @@ -537,7 +537,7 @@ A well-designed library expresses intent (what is to be done, rather than just h A C++ programmer should know the basics of the standard library, and use it where appropriate. Any programmer should know the basics of the foundation libraries of the project being worked on, and use them appropriately. -Any programmer using these guidelines should know the [guidelines support library](#S-gsl), and use it appropriately. +Any programmer using these guidelines should know the [guidelines support library](#gsl-guidelines-support-library), and use it appropriately. ##### Example @@ -628,8 +628,8 @@ The last variant makes it clear that we are not interested in the order in which A programmer should be familiar with -* [The guidelines support library](#S-gsl) -* [The ISO C++ Standard Library](#S-stdlib) +* [The guidelines support library](#gsl-guidelines-support-library) +* [The ISO C++ Standard Library](#sl-the-standard-library) * Whatever foundation libraries are used for the current project(s) ##### Note @@ -1006,7 +1006,7 @@ Combine this with enforcement of [the type and bounds profiles](#SS-force) and y * Look at pointers: Classify them into non-owners (the default) and owners. Where feasible, replace owners with standard-library resource handles (as in the example above). - Alternatively, mark an owner as such using `owner` from [the GSL](#S-gsl). + Alternatively, mark an owner as such using `owner` from [the GSL](#gsl-guidelines-support-library). * Look for naked `new` and `delete` * Look for known resource allocating functions returning raw pointers (such as `fopen`, `malloc`, and `strdup`) @@ -1200,8 +1200,8 @@ You need a reason not to use the standard library (or whatever foundational libr By default use -* The [ISO C++ Standard Library](#S-stdlib) -* The [Guidelines Support Library](#S-gsl) +* The [ISO C++ Standard Library](#sl-the-standard-library) +* The [Guidelines Support Library](#gsl-guidelines-support-library) ##### Note @@ -1560,7 +1560,7 @@ Some preconditions can be expressed as assertions. For example: Ideally, that `Expects(x >= 0)` should be part of the interface of `sqrt()` but that's not easily done. For now, we place it in the definition (function body). -**References**: `Expects()` is described in [GSL](#S-gsl). +**References**: `Expects()` is described in [GSL](#gsl-guidelines-support-library). ##### Note @@ -1868,7 +1868,7 @@ However, that is less elegant and often less efficient than returning the object so use smart pointers only if reference semantics are needed. **Alternative**: Sometimes older code can't be modified because of ABI compatibility requirements or lack of resources. -In that case, mark owning pointers using `owner` from the [guidelines support library](#S-gsl): +In that case, mark owning pointers using `owner` from the [guidelines support library](#gsl-guidelines-support-library): owner<X*> compute(args) // It is now clear that ownership is transferred { @@ -1918,7 +1918,7 @@ By stating the intent in source, implementers and tools can provide better diagn ##### Note -`not_null` is defined in the [guidelines support library](#S-gsl). +`not_null` is defined in the [guidelines support library](#gsl-guidelines-support-library). ##### Note @@ -1984,7 +1984,7 @@ This `draw2()` passes the same amount of information to `draw()`, but makes the ##### Exception Use `zstring` and `czstring` to represent C-style, zero-terminated strings. -But when doing so, use `std::string_view` or `span<char>` from the [GSL](#S-gsl) to prevent range errors. +But when doing so, use `std::string_view` or `span<char>` from the [GSL](#gsl-guidelines-support-library) to prevent range errors. ##### Enforcement @@ -3438,7 +3438,7 @@ better **Also**: Assume that a `T*` obtained from a smart pointer to `T` (e.g., `unique_ptr<T>`) points to a single element. -**See also**: [Support library](#S-gsl) +**See also**: [Support library](#gsl-guidelines-support-library) **See also**: [Do not pass an array as a single pointer](#Ri-array) @@ -3530,7 +3530,7 @@ A `span<T>` object does not own its elements and is so small that it can be pass Passing a `span` object as an argument is exactly as efficient as passing a pair of pointer arguments or passing a pointer and an integer count. -**See also**: [Support library](#S-gsl) +**See also**: [Support library](#gsl-guidelines-support-library) ##### Enforcement @@ -3563,7 +3563,7 @@ When I call `length(s)` should I check if `s` is `nullptr` first? Should the imp `zstring` does not represent ownership. -**See also**: [Support library](#S-gsl) +**See also**: [Support library](#gsl-guidelines-support-library) ### <a name="Rf-unique_ptr"></a>F.26: Use a `unique_ptr<T>` to transfer ownership where a pointer is needed @@ -13948,7 +13948,7 @@ The ideal is zero-overhead generalization. * Libraries: Use libraries with good interfaces. If no library is available build one yourself and imitate the interface style from a good library. -The [standard library](#S-stdlib) is a good first place to look for inspiration. +The [standard library](#sl-the-standard-library) is a good first place to look for inspiration. * Isolation: Isolate your code from messy and/or old-style code by providing an interface of your choosing to it. This is sometimes called "providing a wrapper" for the useful/necessary but messy code. @@ -16419,7 +16419,7 @@ Better: ##### Reason -`finally` from the [GSL](#S-gsl) is less verbose and harder to get wrong than `try`/`catch`. +`finally` from the [GSL](#gsl-guidelines-support-library) is less verbose and harder to get wrong than `try`/`catch`. ##### Example @@ -17412,7 +17412,7 @@ Flag template type arguments without concepts ##### Reason - "Standard" concepts (as provided by the [GSL](#S-gsl) and the ISO standard itself) + "Standard" concepts (as provided by the [GSL](#gsl-guidelines-support-library) and the ISO standard itself) save us the work of thinking up our own concepts, are better thought out than we can manage to do in a hurry, and improve interoperability. ##### Note
* Standard library and GSL anchors do not work anymore.
https://api.github.com/repos/isocpp/CppCoreGuidelines/pulls/2038
2023-02-15T18:40:34Z
2023-02-17T16:31:07Z
2023-02-17T16:31:07Z
2023-02-17T16:31:22Z
2,247
isocpp/CppCoreGuidelines
15,613
lazy import PyMuPDF
diff --git a/ppocr/utils/utility.py b/ppocr/utils/utility.py index 47461d7d5e..91b74f022d 100755 --- a/ppocr/utils/utility.py +++ b/ppocr/utils/utility.py @@ -107,7 +107,8 @@ def check_and_read(img_path): imgvalue = frame[:, :, ::-1] return imgvalue, True, False elif os.path.basename(img_path)[-3:].lower() == 'pdf': - import fitz + from paddle.utils import try_import + try_import('fitz') from PIL import Image imgs = [] with fitz.open(img_path) as pdf: diff --git a/ppstructure/pdf2word/pdf2word.py b/ppstructure/pdf2word/pdf2word.py index 5c8f8f2bd3..d5d715ed66 100644 --- a/ppstructure/pdf2word/pdf2word.py +++ b/ppstructure/pdf2word/pdf2word.py @@ -22,6 +22,8 @@ import platform import numpy as np import fitz +from paddle.utils import try_import +try_import('fitz') from PIL import Image from pdf2docx.converter import Converter from qtpy.QtWidgets import QApplication, QWidget, QPushButton, QProgressBar, \ diff --git a/requirements.txt b/requirements.txt index af5abe4539..7dc53ca039 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,6 +14,5 @@ lxml premailer openpyxl attrdict -PyMuPDF<1.21.0 Pillow>=10.0.0 pyyaml
To address the compatibility issues caused by PyMuPDF dependency. - license compatibility: #11634, ##8873 - env compatibility: #11119,#7934, #11662 - and you can see more discussions by searing PyMuPDF in issues. This pull request proposes a lazy import for PyMuPDF. This means: - No PyMuPDF dependency: PyMuPDF will be removed from the requirements.txt file. - Error handling: An informative error message will guide the user to install PyMuPDF if it's not already present in their environment. This error will only occur when PyMuPDF functionality is actually required. Additionally, if this PR is accepted, the following actions will be necessary: - Release a new version of PaddleOCR. - Update relevant documentation within this repository. I've used `addle.util.try_import` in the implementation, we can also discuss on this implementation. ### PR 类型 PR types Others ### PR 变化内容类型 PR changes Others ### 描述 Description see above ### 提PR之前的检查 Check-list - [ ] 这个 PR 是提交到dygraph分支或者是一个cherry-pick,否则请先提交到dygarph分支。 This PR is pushed to the dygraph branch or cherry-picked from the dygraph branch. Otherwise, please push your changes to the dygraph branch. - [ ] 这个PR清楚描述了功能,帮助评审能提升效率。This PR have fully described what it does such that reviewers can speedup. - [ ] 这个PR已经经过本地测试。This PR can be covered by existing tests or locally verified.
https://api.github.com/repos/PaddlePaddle/PaddleOCR/pulls/11685
2024-03-06T07:59:09Z
2024-03-07T03:23:05Z
2024-03-07T03:23:05Z
2024-03-27T12:26:58Z
372
PaddlePaddle/PaddleOCR
41,849
Fix typos found by codespell
diff --git a/bootstrap/_suse_common.sh b/bootstrap/_suse_common.sh index 4b41bac36a2..46f9d693bf4 100755 --- a/bootstrap/_suse_common.sh +++ b/bootstrap/_suse_common.sh @@ -1,6 +1,6 @@ #!/bin/sh -# SLE12 dont have python-virtualenv +# SLE12 don't have python-virtualenv zypper -nq in -l git-core \ python \ diff --git a/docs/ciphers.rst b/docs/ciphers.rst index 12c403d09a8..49c0824a3d9 100644 --- a/docs/ciphers.rst +++ b/docs/ciphers.rst @@ -105,7 +105,7 @@ https://wiki.mozilla.org/Security/Server_Side_TLS and the version implemented by the Let's Encrypt client will be the version that was most current as of the release date of each client -version. Mozilla offers three seperate sets of cryptographic options, +version. Mozilla offers three separate sets of cryptographic options, which trade off security and compatibility differently. These are referred to as as the "Modern", "Intermediate", and "Old" configurations (in order from most secure to least secure, and least-backwards compatible diff --git a/letsencrypt/client.py b/letsencrypt/client.py index 8e053e9268b..d7113ca2504 100644 --- a/letsencrypt/client.py +++ b/letsencrypt/client.py @@ -430,7 +430,7 @@ def _rollback_and_restart(self, success_msg): except: # TODO: suggest letshelp-letsencypt here reporter.add_message( - "An error occured and we failed to restore your config and " + "An error occurred and we failed to restore your config and " "restart your server. Please submit a bug report to " "https://github.com/letsencrypt/letsencrypt", reporter.HIGH_PRIORITY)
Signed-off-by: Stefan Weil sw@weilnetz.de
https://api.github.com/repos/certbot/certbot/pulls/1562
2015-11-18T21:43:47Z
2015-11-19T00:20:24Z
2015-11-19T00:20:24Z
2016-05-06T19:22:35Z
440
certbot/certbot
552
Extend "when necessary" explanation
diff --git a/CppCoreGuidelines.md b/CppCoreGuidelines.md index 1ae151b02..427b09911 100644 --- a/CppCoreGuidelines.md +++ b/CppCoreGuidelines.md @@ -9214,7 +9214,7 @@ The default is the easiest to read and write. ##### Note -Specifying the underlying type is necessary in forward declarations of enumerations: +Specifying the underlying type is necessary to forward-declare an enum or enum class: enum Flags : char; @@ -9224,6 +9224,9 @@ Specifying the underlying type is necessary in forward declarations of enumerati enum Flags : char { /* ... */ }; +or to ensure that values of that type have a specified bit-precision: + + enum Bitboard : uint64_t { /* ... */ }; ##### Enforcement
Extend Enum.7 `Note` section for #1977
https://api.github.com/repos/isocpp/CppCoreGuidelines/pulls/2000
2022-11-28T17:13:35Z
2022-12-01T22:25:22Z
2022-12-01T22:25:22Z
2022-12-01T22:25:25Z
192
isocpp/CppCoreGuidelines
15,981
Bump pypa/cibuildwheel from 2.16.1 to 2.16.2
diff --git a/.github/workflows/pypi_upload.yml b/.github/workflows/pypi_upload.yml index 41ab646079..a57013d67c 100644 --- a/.github/workflows/pypi_upload.yml +++ b/.github/workflows/pypi_upload.yml @@ -88,7 +88,7 @@ jobs: steps: - uses: actions/checkout@v4 - - uses: pypa/cibuildwheel@v2.16.1 + - uses: pypa/cibuildwheel@v2.16.2 with: only: ${{ matrix.only }}
Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 2.16.1 to 2.16.2. <details> <summary>Release notes</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/releases">pypa/cibuildwheel's releases</a>.</em></p> <blockquote> <h2>v2.16.2</h2> <ul> <li>🛠 Updates CPython 3.12 version to 3.12.0, final release (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1635">#1635</a>)</li> <li>✨ Adds a debug option <a href="https://cibuildwheel.readthedocs.io/en/stable/options/#cibw_debug_keep_container"><code>CIBW_DEBUG_KEEP_CONTAINER</code></a> to stop cibuildwheel deleting build containers after the build finishes. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1620">#1620</a>)</li> <li>📚 Adds support for <code>[tool.cibuildwheel]</code> checking by adding a schema compatible with the <a href="https://github.com/abravalheri/validate-pyproject/">validate-pyproject</a> tool (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1622">#1622</a>, <a href="https://redirect.github.com/pypa/cibuildwheel/issues/1628">#1628</a>, <a href="https://redirect.github.com/pypa/cibuildwheel/issues/1629">#1629</a>)</li> <li>🐛 Fix parsing of <code>CIBW_CONTAINER_ENGINE</code> and <code>CIBW_BUILD_FRONTEND</code> options to not break arguments on <code>:</code> characters (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1621">#1621</a>)</li> <li>🐛 Fix the evaluation order of <code>CIBW_ENVIRONMENT</code> and <code>CIBW_ENVIRONMENT_PASS</code> so that <code>CIBW_ENVIRONMENT</code> assignments can reference environment variables passed through from the host machine. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1617">#1617</a>)</li> <li>🛠 Supports manylinux images' deferred installation of interpreters through the <code>manylinux-interpreters</code> tool (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1630">#1630</a>)</li> </ul> </blockquote> </details> <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/pypa/cibuildwheel/blob/main/docs/changelog.md">pypa/cibuildwheel's changelog</a>.</em></p> <blockquote> <h3>v2.16.2</h3> <p><em>3 October 2023</em></p> <ul> <li>🛠 Updates CPython 3.12 version to 3.12.0, final release (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1635">#1635</a>)</li> <li>✨ Adds a debug option <a href="https://cibuildwheel.readthedocs.io/en/stable/options/#cibw_debug_keep_container"><code>CIBW_DEBUG_KEEP_CONTAINER</code></a> to stop cibuildwheel deleting build containers after the build finishes. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1620">#1620</a>)</li> <li>📚 Adds support for <code>[tool.cibuildwheel]</code> checking by adding a schema compatible with the <a href="https://github.com/abravalheri/validate-pyproject/">validate-pyproject</a> tool (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1622">#1622</a>, <a href="https://redirect.github.com/pypa/cibuildwheel/issues/1628">#1628</a>, <a href="https://redirect.github.com/pypa/cibuildwheel/issues/1629">#1629</a>)</li> <li>🐛 Fix parsing of <code>CIBW_CONTAINER_ENGINE</code> and <code>CIBW_BUILD_FRONTEND</code> options to not break arguments on <code>:</code> characters (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1621">#1621</a>)</li> <li>🐛 Fix the evaluation order of <code>CIBW_ENVIRONMENT</code> and <code>CIBW_ENVIRONMENT_PASS</code> so that <code>CIBW_ENVIRONMENT</code> assignments can reference environment variables passed through from the host machine. (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1617">#1617</a>)</li> <li>🛠 Supports manylinux images' deferred installation of interpreters through the <code>manylinux-interpreters</code> tool (<a href="https://redirect.github.com/pypa/cibuildwheel/issues/1630">#1630</a>)</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="https://github.com/pypa/cibuildwheel/commit/fff9ec32ed25a9c576750c91e06b410ed0c15db7"><code>fff9ec3</code></a> Bump version: v2.16.2</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/f4291e7492ca7d2d51314a5734f103541e300e3d"><code>f4291e7</code></a> Merge pull request <a href="https://redirect.github.com/pypa/cibuildwheel/issues/1617">#1617</a> from pypa/env-order</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/9da4ede6166a2e1fc5774216d8f4d15d2c902395"><code>9da4ede</code></a> Merge pull request <a href="https://redirect.github.com/pypa/cibuildwheel/issues/1620">#1620</a> from Helveg/cibw-debug-keep-container</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/77d3a5f98ef09fa92567006bc4209fadb59ed16b"><code>77d3a5f</code></a> Merge pull request <a href="https://redirect.github.com/pypa/cibuildwheel/issues/1621">#1621</a> from pypa/parse-kvs-colons</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/9535d168faa75eac2085e92defb27ce9194b5707"><code>9535d16</code></a> Merge pull request <a href="https://redirect.github.com/pypa/cibuildwheel/issues/1629">#1629</a> from henryiii/henryiii/refactor/schemaref</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/70fae8d0b7341087e736ace9aa753ce18a8afd06"><code>70fae8d</code></a> Merge pull request <a href="https://redirect.github.com/pypa/cibuildwheel/issues/1630">#1630</a> from mayeut/manylinux-interpreters</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/7699db3017051aad483f46772f8d8aaafe734427"><code>7699db3</code></a> Merge pull request <a href="https://redirect.github.com/pypa/cibuildwheel/issues/1632">#1632</a> from gdementen/patch-1</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/6dceb290087321f43a4252b093471f1ebe4f2d5f"><code>6dceb29</code></a> Merge pull request <a href="https://redirect.github.com/pypa/cibuildwheel/issues/1634">#1634</a> from pypa/pre-commit-ci-update-config</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/035f5a2297c3a3236d1521aed8d0c2965b91fc43"><code>035f5a2</code></a> Merge pull request <a href="https://redirect.github.com/pypa/cibuildwheel/issues/1635">#1635</a> from pypa/update-dependencies-pr</li> <li><a href="https://github.com/pypa/cibuildwheel/commit/951d453a2ea22631a315c19aa6c5104aef1d8458"><code>951d453</code></a> [pre-commit.ci] auto fixes from pre-commit.com hooks</li> <li>Additional commits viewable in <a href="https://github.com/pypa/cibuildwheel/compare/v2.16.1...v2.16.2">compare view</a></li> </ul> </details> <br /> [![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pypa/cibuildwheel&package-manager=github_actions&previous-version=2.16.1&new-version=2.16.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show <dependency name> ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details>
https://api.github.com/repos/psf/black/pulls/3934
2023-10-09T06:29:42Z
2023-10-09T14:00:49Z
2023-10-09T14:00:49Z
2023-10-09T14:00:51Z
141
psf/black
24,131
Add object segmentation papers
diff --git a/README.md b/README.md index b8995b8..a6080b9 100644 --- a/README.md +++ b/README.md @@ -323,14 +323,16 @@ I would continue adding papers to this roadmap. ## 3.14 Other Frontiers +## 3.15 Object Segmentation +**[1]** J. Long, E. Shelhamer, and T. Darrell, “**Fully convolutional networks for semantic segmentation**.” in CVPR, 2015. [[pdf]](https://arxiv.org/pdf/1411.4038v2.pdf) :star::star::star::star::star: +**[2]** L.-C. Chen, G. Papandreou, I. Kokkinos, K. Murphy, and A. L. Yuille. "**Semantic image segmentation with deep convolutional nets and fully connected crfs**." In ICLR, 2015. [[pdf]](https://arxiv.org/pdf/1606.00915v1.pdf) :star::star::star::star::star: +**[3]** Pinheiro, P.O., Collobert, R., Dollar, P. "**Learning to segment object candidates.**" In: NIPS. 2015. [[pdf]](https://arxiv.org/pdf/1506.06204v2.pdf) :star::star::star::star: +**[4]** Dai, J., He, K., Sun, J. "**Instance-aware semantic segmentation via multi-task network cascades**." in CVPR. 2016 [[pdf]](https://arxiv.org/pdf/1512.04412v1.pdf) :star::star::star: - - - - +**[5]** Dai, J., He, K., Sun, J. "**Instance-sensitive Fully Convolutional Networks**." arXiv preprint arXiv:1603.08678 (2016). [[pdf]](https://arxiv.org/pdf/1603.08678v1.pdf) :star::star::star:
https://api.github.com/repos/floodsung/Deep-Learning-Papers-Reading-Roadmap/pulls/37
2016-12-29T02:28:45Z
2017-01-03T03:15:25Z
2017-01-03T03:15:25Z
2017-01-03T03:15:25Z
454
floodsung/Deep-Learning-Papers-Reading-Roadmap
51,713
Bagatur/runnable with fallbacks
diff --git a/libs/langchain/langchain/callbacks/manager.py b/libs/langchain/langchain/callbacks/manager.py index 1a0f6ac0a09c62..7016a13ed952bc 100644 --- a/libs/langchain/langchain/callbacks/manager.py +++ b/libs/langchain/langchain/callbacks/manager.py @@ -730,7 +730,7 @@ def on_chain_end(self, outputs: Dict[str, Any], **kwargs: Any) -> None: def on_chain_error( self, - error: Union[Exception, KeyboardInterrupt], + error: BaseException, **kwargs: Any, ) -> None: """Run when chain errors. @@ -812,7 +812,7 @@ async def on_chain_end(self, outputs: Dict[str, Any], **kwargs: Any) -> None: async def on_chain_error( self, - error: Union[Exception, KeyboardInterrupt], + error: BaseException, **kwargs: Any, ) -> None: """Run when chain errors. diff --git a/libs/langchain/langchain/schema/runnable.py b/libs/langchain/langchain/schema/runnable.py index 2669409a3a299f..4cfd3f913f59d6 100644 --- a/libs/langchain/langchain/schema/runnable.py +++ b/libs/langchain/langchain/schema/runnable.py @@ -14,6 +14,9 @@ List, Mapping, Optional, + Sequence, + Tuple, + Type, TypedDict, TypeVar, Union, @@ -189,6 +192,247 @@ def _call_with_config( ) return output + def with_fallbacks( + self, + fallbacks: Sequence[Runnable[Input, Output]], + *, + exceptions_to_handle: Tuple[Type[BaseException]] = (Exception,), + ) -> RunnableWithFallbacks[Input, Output]: + return RunnableWithFallbacks( + runnable=self, + fallbacks=fallbacks, + exceptions_to_handle=exceptions_to_handle, + ) + + +class RunnableWithFallbacks(Serializable, Runnable[Input, Output]): + runnable: Runnable[Input, Output] + fallbacks: Sequence[Runnable[Input, Output]] + exceptions_to_handle: Tuple[Type[BaseException]] = (Exception,) + + class Config: + arbitrary_types_allowed = True + + @property + def runnables(self) -> Iterator[Runnable[Input, Output]]: + yield self.runnable + yield from self.fallbacks + + def invoke(self, input: Input, config: Optional[RunnableConfig] = None) -> Output: + from langchain.callbacks.manager import CallbackManager + + # setup callbacks + config = config or {} + callback_manager = CallbackManager.configure( + inheritable_callbacks=config.get("callbacks"), + local_callbacks=None, + verbose=False, + inheritable_tags=config.get("tags"), + local_tags=None, + inheritable_metadata=config.get("metadata"), + local_metadata=None, + ) + # start the root run + run_manager = callback_manager.on_chain_start( + dumpd(self), input if isinstance(input, dict) else {"input": input} + ) + first_error = None + for runnable in self.runnables: + try: + output = runnable.invoke( + input, + _patch_config(config, run_manager.get_child()), + ) + except self.exceptions_to_handle as e: + if first_error is None: + first_error = e + except BaseException as e: + run_manager.on_chain_error(e) + raise e + else: + run_manager.on_chain_end( + output if isinstance(output, dict) else {"output": output} + ) + return output + if first_error is None: + raise ValueError("No error stored at end of fallbacks.") + run_manager.on_chain_error(first_error) + raise first_error + + async def ainvoke( + self, input: Input, config: Optional[RunnableConfig] = None + ) -> Output: + from langchain.callbacks.manager import AsyncCallbackManager + + # setup callbacks + config = config or {} + callback_manager = AsyncCallbackManager.configure( + inheritable_callbacks=config.get("callbacks"), + local_callbacks=None, + verbose=False, + inheritable_tags=config.get("tags"), + local_tags=None, + inheritable_metadata=config.get("metadata"), + local_metadata=None, + ) + # start the root run + run_manager = await callback_manager.on_chain_start( + dumpd(self), input if isinstance(input, dict) else {"input": input} + ) + + first_error = None + for runnable in self.runnables: + try: + output = await runnable.ainvoke( + input, + _patch_config(config, run_manager.get_child()), + ) + except self.exceptions_to_handle as e: + if first_error is None: + first_error = e + except BaseException as e: + await run_manager.on_chain_error(e) + raise e + else: + await run_manager.on_chain_end( + output if isinstance(output, dict) else {"output": output} + ) + return output + if first_error is None: + raise ValueError("No error stored at end of fallbacks.") + await run_manager.on_chain_error(first_error) + raise first_error + + def batch( + self, + inputs: List[Input], + config: Optional[Union[RunnableConfig, List[RunnableConfig]]] = None, + *, + max_concurrency: Optional[int] = None, + ) -> List[Output]: + from langchain.callbacks.manager import CallbackManager + + # setup callbacks + configs = self._get_config_list(config, len(inputs)) + callback_managers = [ + CallbackManager.configure( + inheritable_callbacks=config.get("callbacks"), + local_callbacks=None, + verbose=False, + inheritable_tags=config.get("tags"), + local_tags=None, + inheritable_metadata=config.get("metadata"), + local_metadata=None, + ) + for config in configs + ] + # start the root runs, one per input + run_managers = [ + cm.on_chain_start( + dumpd(self), input if isinstance(input, dict) else {"input": input} + ) + for cm, input in zip(callback_managers, inputs) + ] + + first_error = None + for runnable in self.runnables: + try: + outputs = runnable.batch( + inputs, + [ + # each step a child run of the corresponding root run + _patch_config(config, rm.get_child()) + for rm, config in zip(run_managers, configs) + ], + max_concurrency=max_concurrency, + ) + except self.exceptions_to_handle as e: + if first_error is None: + first_error = e + except BaseException as e: + for rm in run_managers: + rm.on_chain_error(e) + raise e + else: + for rm, output in zip(run_managers, outputs): + rm.on_chain_end( + output if isinstance(output, dict) else {"output": output} + ) + return outputs + if first_error is None: + raise ValueError("No error stored at end of fallbacks.") + for rm in run_managers: + rm.on_chain_error(first_error) + raise first_error + + async def abatch( + self, + inputs: List[Input], + config: Optional[Union[RunnableConfig, List[RunnableConfig]]] = None, + *, + max_concurrency: Optional[int] = None, + ) -> List[Output]: + from langchain.callbacks.manager import ( + AsyncCallbackManager, + AsyncCallbackManagerForChainRun, + ) + + # setup callbacks + configs = self._get_config_list(config, len(inputs)) + callback_managers = [ + AsyncCallbackManager.configure( + inheritable_callbacks=config.get("callbacks"), + local_callbacks=None, + verbose=False, + inheritable_tags=config.get("tags"), + local_tags=None, + inheritable_metadata=config.get("metadata"), + local_metadata=None, + ) + for config in configs + ] + # start the root runs, one per input + run_managers: List[AsyncCallbackManagerForChainRun] = await asyncio.gather( + *( + cm.on_chain_start( + dumpd(self), input if isinstance(input, dict) else {"input": input} + ) + for cm, input in zip(callback_managers, inputs) + ) + ) + + first_error = None + for runnable in self.runnables: + try: + outputs = await runnable.abatch( + inputs, + [ + # each step a child run of the corresponding root run + _patch_config(config, rm.get_child()) + for rm, config in zip(run_managers, configs) + ], + max_concurrency=max_concurrency, + ) + except self.exceptions_to_handle as e: + if first_error is None: + first_error = e + except BaseException as e: + await asyncio.gather(*(rm.on_chain_error(e) for rm in run_managers)) + else: + await asyncio.gather( + *( + rm.on_chain_end( + output if isinstance(output, dict) else {"output": output} + ) + for rm, output in zip(run_managers, outputs) + ) + ) + return outputs + if first_error is None: + raise ValueError("No error stored at end of fallbacks.") + await asyncio.gather(*(rm.on_chain_error(first_error) for rm in run_managers)) + raise first_error + class RunnableSequence(Serializable, Runnable[Input, Output]): first: Runnable[Input, Any] diff --git a/libs/langchain/tests/unit_tests/schema/test_runnable.py b/libs/langchain/tests/unit_tests/schema/test_runnable.py index 3b736588865918..181cf50c3a0971 100644 --- a/libs/langchain/tests/unit_tests/schema/test_runnable.py +++ b/libs/langchain/tests/unit_tests/schema/test_runnable.py @@ -6,6 +6,7 @@ from pytest_mock import MockerFixture from syrupy import SnapshotAssertion +from langchain import PromptTemplate from langchain.callbacks.manager import Callbacks from langchain.callbacks.tracers.base import BaseTracer from langchain.callbacks.tracers.schemas import Run @@ -30,6 +31,7 @@ RunnableMap, RunnablePassthrough, RunnableSequence, + RunnableWithFallbacks, ) @@ -754,3 +756,48 @@ def test_bind_bind() -> None: stop=["Observation:"], hello="world" ) ) == dumpd(llm.bind(stop=["Observation:"], one="two", hello="world")) + + +@pytest.fixture() +def llm_with_fallbacks() -> RunnableWithFallbacks: + error_llm = FakeListLLM(responses=["foo"], i=1) + pass_llm = FakeListLLM(responses=["bar"]) + + return error_llm.with_fallbacks([pass_llm]) + + +@pytest.fixture() +def llm_with_multi_fallbacks() -> RunnableWithFallbacks: + error_llm = FakeListLLM(responses=["foo"], i=1) + error_llm_2 = FakeListLLM(responses=["baz"], i=1) + pass_llm = FakeListLLM(responses=["bar"]) + + return error_llm.with_fallbacks([error_llm_2, pass_llm]) + + +@pytest.fixture() +def llm_chain_with_fallbacks() -> RunnableSequence: + error_llm = FakeListLLM(responses=["foo"], i=1) + pass_llm = FakeListLLM(responses=["bar"]) + + prompt = PromptTemplate.from_template("what did baz say to {buz}") + return RunnableMap({"buz": lambda x: x}) | (prompt | error_llm).with_fallbacks( + [prompt | pass_llm] + ) + + +@pytest.mark.parametrize( + "runnable", + ["llm_with_fallbacks", "llm_with_multi_fallbacks", "llm_chain_with_fallbacks"], +) +@pytest.mark.asyncio +async def test_llm_with_fallbacks( + runnable: RunnableWithFallbacks, request: Any +) -> None: + runnable = request.getfixturevalue(runnable) + assert runnable.invoke("hello") == "bar" + assert runnable.batch(["hi", "hey", "bye"]) == ["bar"] * 3 + assert list(runnable.stream("hello")) == ["bar"] + assert await runnable.ainvoke("hello") == "bar" + assert await runnable.abatch(["hi", "hey", "bye"]) == ["bar"] * 3 + assert list(await runnable.ainvoke("hello")) == list("bar")
https://api.github.com/repos/langchain-ai/langchain/pulls/8543
2023-07-31T21:02:44Z
2023-08-04T21:06:05Z
2023-08-04T21:06:05Z
2023-08-04T21:06:06Z
2,969
langchain-ai/langchain
43,235
🌐 Add Japanese translation for Tutorial - Static files
diff --git a/docs/ja/docs/tutorial/static-files.md b/docs/ja/docs/tutorial/static-files.md new file mode 100644 index 0000000000000..fcc3ba924c643 --- /dev/null +++ b/docs/ja/docs/tutorial/static-files.md @@ -0,0 +1,53 @@ +# 静的ファイル + +`StaticFiles` を使用して、ディレクトリから静的ファイルを自動的に提供できます。 + +## `aiofiles` をインストール + +まず、`aiofiles` をインストールする必要があります: + +<div class="termy"> + +```console +$ pip install aiofiles + +---> 100% +``` + +</div> + +## `StaticFiles` の使用 + +* `StaticFiles` をインポート。 +* `StaticFiles()` インスタンスを生成し、特定のパスに「マウント」。 + +```Python hl_lines="2 6" +{!../../../docs_src/static_files/tutorial001.py!} +``` + +!!! note "技術詳細" + `from starlette.staticfiles import StaticFiles` も使用できます。 + + **FastAPI**は、開発者の利便性のために、`starlette.staticfiles` と同じ `fastapi.staticfiles` を提供します。しかし、実際にはStarletteから直接渡されています。 + +### 「マウント」とは + +「マウント」とは、特定のパスに完全な「独立した」アプリケーションを追加することを意味します。これにより、すべてのサブパスの処理がなされます。 + +これは、マウントされたアプリケーションが完全に独立しているため、`APIRouter` とは異なります。メインアプリケーションのOpenAPIとドキュメントには、マウントされたアプリケーションの内容などは含まれません。 + +これについて詳しくは、**高度なユーザーガイド** をご覧ください。 + +## 詳細 + +最初の `"/static"` は、この「サブアプリケーション」が「マウント」されるサブパスを指します。したがって、`"/static"` から始まるパスはすべてサブアプリケーションによって処理されます。 + +`directory="static"` は、静的ファイルを含むディレクトリの名前を指します。 + +`name="static"` は、**FastAPI** が内部で使用できる名前を付けます。 + +これらのパラメータはすべて「`静的`」とは異なる場合があり、独自のアプリケーションのニーズと詳細に合わせて調整します。 + +## より詳しい情報 + +詳細とオプションについては、<a href="https://www.starlette.io/staticfiles/" class="external-link" target="_blank">Starletteの静的ファイルに関するドキュメント</a>を確認してください。 diff --git a/docs/ja/mkdocs.yml b/docs/ja/mkdocs.yml index fabf9d46c6c9d..b3714bcdebac7 100644 --- a/docs/ja/mkdocs.yml +++ b/docs/ja/mkdocs.yml @@ -67,6 +67,7 @@ nav: - tutorial/security/first-steps.md - tutorial/middleware.md - tutorial/cors.md + - tutorial/static-files.md - tutorial/testing.md - tutorial/debugging.md - 高度なユーザーガイド:
This PR translates tutorial/static-files.md in Japanese.
https://api.github.com/repos/tiangolo/fastapi/pulls/2260
2020-10-28T14:13:47Z
2021-02-07T17:38:11Z
2021-02-07T17:38:11Z
2021-05-05T08:11:31Z
822
tiangolo/fastapi
23,183
Add libjpeg-dev required for Pillow install
diff --git a/Dockerfile.dev b/Dockerfile.dev index 582fecdde6a20..fb2cc63d48186 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -32,6 +32,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ gcc \ git \ libffi-dev \ + libjpeg-dev \ libpq-dev \ libxml2-dev \ libxslt-dev \
Fixes Pillow installation in the docker container
https://api.github.com/repos/getsentry/sentry/pulls/3438
2016-06-07T16:27:54Z
2016-06-07T16:28:38Z
2016-06-07T16:28:38Z
2020-12-23T15:03:14Z
111
getsentry/sentry
44,083
cex ohlcv timeframes
diff --git a/js/cex.js b/js/cex.js index 70e3ae1e8a47..8bfbb0a249f0 100644 --- a/js/cex.js +++ b/js/cex.js @@ -36,6 +36,8 @@ module.exports = class cex extends Exchange { }, 'timeframes': { '1m': '1m', + '1h': '1h', + '1d': '1d', }, 'urls': { 'logo': 'https://user-images.githubusercontent.com/1294454/27766442-8ddc33b0-5ed8-11e7-8b98-f786aef0f3c9.jpg',
https://cex.io/rest-api#historical-1m-ohlcv-chart
https://api.github.com/repos/ccxt/ccxt/pulls/10762
2021-12-02T13:38:48Z
2021-12-02T14:49:51Z
2021-12-02T14:49:51Z
2021-12-02T14:49:51Z
165
ccxt/ccxt
13,618
update vendored urllib3
diff --git a/requests/packages/urllib3/connectionpool.py b/requests/packages/urllib3/connectionpool.py index 51c87f58ad..f93e2dfba1 100644 --- a/requests/packages/urllib3/connectionpool.py +++ b/requests/packages/urllib3/connectionpool.py @@ -9,7 +9,7 @@ import errno from socket import error as SocketError, timeout as SocketTimeout -from .util import resolve_cert_reqs, resolve_ssl_version +from .util import resolve_cert_reqs, resolve_ssl_version, assert_fingerprint try: # Python 3 from http.client import HTTPConnection, HTTPException @@ -81,12 +81,15 @@ class VerifiedHTTPSConnection(HTTPSConnection): ssl_version = None def set_cert(self, key_file=None, cert_file=None, - cert_reqs=None, ca_certs=None): + cert_reqs=None, ca_certs=None, + assert_hostname=None, assert_fingerprint=None): self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs self.ca_certs = ca_certs + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint def connect(self): # Add certificate verification @@ -104,8 +107,12 @@ def connect(self): ssl_version=resolved_ssl_version) if resolved_cert_reqs != ssl.CERT_NONE: - match_hostname(self.sock.getpeercert(), self.host) - + if self.assert_fingerprint: + assert_fingerprint(self.sock.getpeercert(binary_form=True), + self.assert_fingerprint) + else: + match_hostname(self.sock.getpeercert(), + self.assert_hostname or self.host) ## Pool objects @@ -502,9 +509,13 @@ class HTTPSConnectionPool(HTTPConnectionPool): :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates, instead of :class:`httplib.HTTPSConnection`. - The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, and ``ssl_version`` - are only used if :mod:`ssl` is available and are fed into - :meth:`urllib3.util.ssl_wrap_socket` to upgrade the connection socket into an SSL socket. + :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``, + ``assert_hostname`` and ``host`` in this order to verify connections. + + The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs`` and + ``ssl_version`` are only used if :mod:`ssl` is available and are fed into + :meth:`urllib3.util.ssl_wrap_socket` to upgrade the connection socket + into an SSL socket. """ scheme = 'https' @@ -512,8 +523,9 @@ class HTTPSConnectionPool(HTTPConnectionPool): def __init__(self, host, port=None, strict=False, timeout=None, maxsize=1, block=False, headers=None, - key_file=None, cert_file=None, - cert_reqs=None, ca_certs=None, ssl_version=None): + key_file=None, cert_file=None, cert_reqs=None, + ca_certs=None, ssl_version=None, + assert_hostname=None, assert_fingerprint=None): HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize, @@ -523,6 +535,8 @@ def __init__(self, host, port=None, self.cert_reqs = cert_reqs self.ca_certs = ca_certs self.ssl_version = ssl_version + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint def _new_conn(self): """ @@ -532,7 +546,7 @@ def _new_conn(self): log.info("Starting new HTTPS connection (%d): %s" % (self.num_connections, self.host)) - if not ssl: # Platform-specific: Python compiled without +ssl + if not ssl: # Platform-specific: Python compiled without +ssl if not HTTPSConnection or HTTPSConnection is object: raise SSLError("Can't connect to HTTPS URL because the SSL " "module is not available.") @@ -545,7 +559,9 @@ def _new_conn(self): port=self.port, strict=self.strict) connection.set_cert(key_file=self.key_file, cert_file=self.cert_file, - cert_reqs=self.cert_reqs, ca_certs=self.ca_certs) + cert_reqs=self.cert_reqs, ca_certs=self.ca_certs, + assert_hostname=self.assert_hostname, + assert_fingerprint=self.assert_fingerprint) connection.ssl_version = self.ssl_version diff --git a/requests/packages/urllib3/contrib/pyopenssl.py b/requests/packages/urllib3/contrib/pyopenssl.py new file mode 100644 index 0000000000..5c4c6d8d31 --- /dev/null +++ b/requests/packages/urllib3/contrib/pyopenssl.py @@ -0,0 +1,167 @@ +'''SSL with SNI-support for Python 2. + +This needs the following packages installed: + +* pyOpenSSL (tested with 0.13) +* ndg-httpsclient (tested with 0.3.2) +* pyasn1 (tested with 0.1.6) + +To activate it call :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3`. +This can be done in a ``sitecustomize`` module, or at any other time before +your application begins using ``urllib3``, like this:: + + try: + import urllib3.contrib.pyopenssl + urllib3.contrib.pyopenssl.inject_into_urllib3() + except ImportError: + pass + +Now you can use :mod:`urllib3` as you normally would, and it will support SNI +when the required modules are installed. +''' + +from ndg.httpsclient.ssl_peer_verification import (ServerSSLCertVerification, + SUBJ_ALT_NAME_SUPPORT) +from ndg.httpsclient.subj_alt_name import SubjectAltName +import OpenSSL.SSL +from pyasn1.codec.der import decoder as der_decoder +from socket import _fileobject +import ssl + +from .. import connectionpool +from .. import util + +__all__ = ['inject_into_urllib3', 'extract_from_urllib3'] + +# SNI only *really* works if we can read the subjectAltName of certificates. +HAS_SNI = SUBJ_ALT_NAME_SUPPORT + +# Map from urllib3 to PyOpenSSL compatible parameter-values. +_openssl_versions = { + ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD, + ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD, + ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, +} +_openssl_verify = { + ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, + ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, + ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER + + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, +} + + +orig_util_HAS_SNI = util.HAS_SNI +orig_connectionpool_ssl_wrap_socket = connectionpool.ssl_wrap_socket + + +def inject_into_urllib3(): + 'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.' + + connectionpool.ssl_wrap_socket = ssl_wrap_socket + util.HAS_SNI = HAS_SNI + + +def extract_from_urllib3(): + 'Undo monkey-patching by :func:`inject_into_urllib3`.' + + connectionpool.ssl_wrap_socket = orig_connectionpool_ssl_wrap_socket + util.HAS_SNI = orig_util_HAS_SNI + + +### Note: This is a slightly bug-fixed version of same from ndg-httpsclient. +def get_subj_alt_name(peer_cert): + # Search through extensions + dns_name = [] + if not SUBJ_ALT_NAME_SUPPORT: + return dns_name + + general_names = SubjectAltName() + for i in range(peer_cert.get_extension_count()): + ext = peer_cert.get_extension(i) + ext_name = ext.get_short_name() + if ext_name != 'subjectAltName': + continue + + # PyOpenSSL returns extension data in ASN.1 encoded form + ext_dat = ext.get_data() + decoded_dat = der_decoder.decode(ext_dat, + asn1Spec=general_names) + + for name in decoded_dat: + if not isinstance(name, SubjectAltName): + continue + for entry in range(len(name)): + component = name.getComponentByPosition(entry) + if component.getName() != 'dNSName': + continue + dns_name.append(str(component.getComponent())) + + return dns_name + + +class WrappedSocket(object): + '''API-compatibility wrapper for Python OpenSSL's Connection-class.''' + + def __init__(self, connection, socket): + self.connection = connection + self.socket = socket + + def makefile(self, mode, bufsize=-1): + return _fileobject(self.connection, mode, bufsize) + + def settimeout(self, timeout): + return self.socket.settimeout(timeout) + + def sendall(self, data): + return self.connection.sendall(data) + + def getpeercert(self, binary_form=False): + x509 = self.connection.get_peer_certificate() + if not x509: + raise ssl.SSLError('') + + if binary_form: + return OpenSSL.crypto.dump_certificate( + OpenSSL.crypto.FILETYPE_ASN1, + x509) + + return { + 'subject': ( + (('commonName', x509.get_subject().CN),), + ), + 'subjectAltName': [ + ('DNS', value) + for value in get_subj_alt_name(x509) + ] + } + + +def _verify_callback(cnx, x509, err_no, err_depth, return_code): + return err_no == 0 + + +def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, + ca_certs=None, server_hostname=None, + ssl_version=None): + ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version]) + if certfile: + ctx.use_certificate_file(certfile) + if keyfile: + ctx.use_privatekey_file(keyfile) + if cert_reqs != ssl.CERT_NONE: + ctx.set_verify(_openssl_verify[cert_reqs], _verify_callback) + if ca_certs: + try: + ctx.load_verify_locations(ca_certs, None) + except OpenSSL.SSL.Error as e: + raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e) + + cnx = OpenSSL.SSL.Connection(ctx, sock) + cnx.set_tlsext_host_name(server_hostname) + cnx.set_connect_state() + try: + cnx.do_handshake() + except OpenSSL.SSL.Error as e: + raise ssl.SSLError('bad handshake', e) + + return WrappedSocket(cnx, sock) diff --git a/requests/packages/urllib3/poolmanager.py b/requests/packages/urllib3/poolmanager.py index 6e7377cb62..64a7b5d755 100644 --- a/requests/packages/urllib3/poolmanager.py +++ b/requests/packages/urllib3/poolmanager.py @@ -23,6 +23,9 @@ log = logging.getLogger(__name__) +SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs', + 'ssl_version') + class PoolManager(RequestMethods): """ @@ -67,7 +70,13 @@ def _new_pool(self, scheme, host, port): to be overridden for customization. """ pool_cls = pool_classes_by_scheme[scheme] - return pool_cls(host, port, **self.connection_pool_kw) + kwargs = self.connection_pool_kw + if scheme == 'http': + kwargs = self.connection_pool_kw.copy() + for kw in SSL_KEYWORDS: + kwargs.pop(kw, None) + + return pool_cls(host, port, **kwargs) def clear(self): """ diff --git a/requests/packages/urllib3/util.py b/requests/packages/urllib3/util.py index b827bc4f5e..681cb6c991 100644 --- a/requests/packages/urllib3/util.py +++ b/requests/packages/urllib3/util.py @@ -8,6 +8,8 @@ from base64 import b64encode from collections import namedtuple from socket import error as SocketError +from hashlib import md5, sha1 +from binascii import hexlify, unhexlify try: from select import poll, POLLIN @@ -23,7 +25,7 @@ HAS_SNI = False import ssl - from ssl import wrap_socket, CERT_NONE, SSLError, PROTOCOL_SSLv23 + from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 from ssl import SSLContext # Modern SSL? from ssl import HAS_SNI # Has SNI? except ImportError: @@ -31,7 +33,7 @@ from .packages import six -from .exceptions import LocationParseError +from .exceptions import LocationParseError, SSLError class Url(namedtuple('Url', ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'])): @@ -232,7 +234,7 @@ def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, return headers -def is_connection_dropped(conn): +def is_connection_dropped(conn): # Platform-specific """ Returns True if the connection is dropped and should be closed. @@ -246,7 +248,7 @@ def is_connection_dropped(conn): if not sock: # Platform-specific: AppEngine return False - if not poll: # Platform-specific + if not poll: if not select: # Platform-specific: AppEngine return False @@ -302,6 +304,44 @@ def resolve_ssl_version(candidate): return candidate + +def assert_fingerprint(cert, fingerprint): + """ + Checks if given fingerprint matches the supplied certificate. + + :param cert: + Certificate as bytes object. + :param fingerprint: + Fingerprint as string of hexdigits, can be interspersed by colons. + """ + + # Maps the length of a digest to a possible hash function producing + # this digest. + hashfunc_map = { + 16: md5, + 20: sha1 + } + + fingerprint = fingerprint.replace(':', '').lower() + + digest_length, rest = divmod(len(fingerprint), 2) + + if rest or digest_length not in hashfunc_map: + raise SSLError('Fingerprint is of invalid length.') + + # We need encode() here for py32; works on py2 and p33. + fingerprint_bytes = unhexlify(fingerprint.encode()) + + hashfunc = hashfunc_map[digest_length] + + cert_digest = hashfunc(cert).digest() + + if not cert_digest == fingerprint_bytes: + raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' + .format(hexlify(fingerprint_bytes), + hexlify(cert_digest))) + + if SSLContext is not None: # Python 3.2+ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, ca_certs=None, server_hostname=None,
Contains a workaround (fingerprint, random hostname verification) and a solution (pyOpenSSL) for SNI on python 2. Intended to be included in 1.2 (#1267)
https://api.github.com/repos/psf/requests/pulls/1268
2013-03-28T12:53:24Z
2013-03-31T05:27:59Z
2013-03-31T05:27:59Z
2021-09-08T23:08:31Z
3,533
psf/requests
32,510
fix #2738
diff --git a/mitmproxy/tools/console/window.py b/mitmproxy/tools/console/window.py index 87680f6eef..6d49e8b1ef 100644 --- a/mitmproxy/tools/console/window.py +++ b/mitmproxy/tools/console/window.py @@ -234,28 +234,34 @@ def pop(self, *args, **kwargs): self.view_changed() self.focus_changed() - def current(self, keyctx): + def stacks_sorted_by_focus(self): """ - Returns the active widget, but only the current focus or overlay has - a matching key context. + Returns: + self.stacks, with the focused stack first. """ - t = self.focus_stack().top_widget() - if t.keyctx == keyctx: - return t + stacks = self.stacks.copy() + stacks.insert(0, stacks.pop(self.pane)) + return stacks - def current_window(self, keyctx): + def current(self, keyctx): """ - Returns the active window, ignoring overlays. + Returns the active widget with a matching key context, including overlays. + If multiple stacks have an active widget with a matching key context, + the currently focused stack is preferred. """ - t = self.focus_stack().top_window() - if t.keyctx == keyctx: - return t + for s in self.stacks_sorted_by_focus(): + t = s.top_widget() + if t.keyctx == keyctx: + return t - def any(self, keyctx): + def current_window(self, keyctx): """ - Returns the top window of either stack if they match the context. + Returns the active window with a matching key context, ignoring overlays. + If multiple stacks have an active widget with a matching key context, + the currently focused stack is preferred. """ - for t in [x.top_window() for x in self.stacks]: + for s in self.stacks_sorted_by_focus(): + t = s.top_window() if t.keyctx == keyctx: return t
This fixes #2738 by changing the semantics of `Window.current` and `Window.current_window`: If the focused pane shows a different widget, we now check if maybe the unfocused pane contains a matching widget and if so, return that. This is particularly nice because we can now use the command reference on the right pane to execute widget-specific actions on the left pane.
https://api.github.com/repos/mitmproxy/mitmproxy/pulls/2755
2018-01-04T14:59:46Z
2018-01-04T15:36:24Z
2018-01-04T15:36:24Z
2018-01-04T15:36:27Z
467
mitmproxy/mitmproxy
28,317
Update docs to reflect changes for Sentry 8.4
diff --git a/docs/cli/init/index.rst b/docs/cli/init/index.rst index ac54108f01087..ac0a0852d3a28 100644 --- a/docs/cli/init/index.rst +++ b/docs/cli/init/index.rst @@ -6,4 +6,5 @@ Initialize new configuration directory. Options ``````` +- ``--dev``: Use settings more conducive to local development. - ``--help``: print this help page. diff --git a/docs/inbound-mail.rst b/docs/inbound-mail.rst index f441f012b83c1..4d8627fbfa696 100644 --- a/docs/inbound-mail.rst +++ b/docs/inbound-mail.rst @@ -63,7 +63,7 @@ Add another supervisor config to run the Sentry ``smtp`` service:: [program:sentry-inbound-mail] directory=/www/sentry/ - command=/www/sentry/bin/sentry start smtp + command=/www/sentry/bin/sentry run smtp autostart=true autorestart=true stdout_logfile syslog diff --git a/docs/installation.rst b/docs/installation.rst index a7f35ffd7688b..b85090b197b94 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -300,11 +300,11 @@ get you off the ground quickly, also you can setup Sentry as WSGI application, in that case skip to section `Running Sentry as WSGI application`. -To start the built-in webserver run ``sentry start``: +To start the built-in webserver run ``sentry run web``: :: - SENTRY_CONF=/etc/sentry sentry start + SENTRY_CONF=/etc/sentry sentry run web You should now be able to test the web service by visiting `http://localhost:9000/`. diff --git a/docs/performance.rst b/docs/performance.rst index e505c3fd692c3..0c53ca8d56851 100644 --- a/docs/performance.rst +++ b/docs/performance.rst @@ -64,7 +64,7 @@ many cores you have on the machine. You can do this either by editing or can be passed through the command line as:: - $ sentry start -w 16 + $ sentry run web -w 16 See `uWSGI's official documentation <https://uwsgi-docs.readthedocs.org/en/latest/Options.html>`_ for more options that can be configured in ``SENTRY_WEB_OPTIONS``. @@ -103,14 +103,14 @@ e.g. if you had something like: ``` numprocs=1 -command=celery worker -c 64 +command=sentry celery worker -c 64 ``` change it to: ``` numprocs=16 -command=celery worker -c 4 +command=sentry celery worker -c 4 ``` diff --git a/docs/upgrading.rst b/docs/upgrading.rst index 7f81f30585529..1f115bebd0434 100644 --- a/docs/upgrading.rst +++ b/docs/upgrading.rst @@ -60,7 +60,7 @@ code changes will not be reflected until a restart. These services include: -- webserver -- ``sentry start`` +- webserver -- ``sentry run web`` - celery workers -- ``sentry celery worker`` - celery beat (cron) -- ``sentry celery beat`` diff --git a/docs/warnings.rst b/docs/warnings.rst index 512e5c15f878c..a7d6bace851b5 100644 --- a/docs/warnings.rst +++ b/docs/warnings.rst @@ -23,7 +23,7 @@ Now, ``SENTRY_CONF`` should be pointed to the parent directory that contains bot the python file and the yaml file. ``sentry init`` will generate the right structure needed for the future.:: - $ SENTRY_CONF=/etc/sentry sentry start + $ SENTRY_CONF=/etc/sentry sentry run web The following will be a simple mapping of old (``sentry.conf.py``) keys to new (``config.yml``). Old settings should be completely removed.
@getsentry/infrastructure
https://api.github.com/repos/getsentry/sentry/pulls/3149
2016-04-29T23:16:55Z
2016-05-02T17:22:26Z
2016-05-02T17:22:26Z
2020-12-23T16:53:24Z
960
getsentry/sentry
44,320
Add Subaru gearShifter to carstate
diff --git a/selfdrive/car/subaru/carstate.py b/selfdrive/car/subaru/carstate.py index a6cd6e9f319966..669624519e303f 100644 --- a/selfdrive/car/subaru/carstate.py +++ b/selfdrive/car/subaru/carstate.py @@ -1,5 +1,6 @@ import copy from cereal import car +from opendbc.can.can_define import CANDefine from selfdrive.config import Conversions as CV from selfdrive.car.interfaces import CarStateBase from opendbc.can.parser import CANParser @@ -11,6 +12,8 @@ def __init__(self, CP): super().__init__(CP) self.left_blinker_cnt = 0 self.right_blinker_cnt = 0 + can_define = CANDefine(DBC[CP.carFingerprint]['pt']) + self.shifter_values = can_define.dv["Transmission"]['Gear'] def update(self, cp, cp_cam): ret = car.CarState.new_message() @@ -35,6 +38,9 @@ def update(self, cp, cp_cam): self.right_blinker_cnt = 50 if cp.vl["Dashlights"]['RIGHT_BLINKER'] else max(self.right_blinker_cnt - 1, 0) ret.rightBlinker = self.right_blinker_cnt > 0 + can_gear = int(cp.vl["Transmission"]['Gear']) + ret.gearShifter = self.parse_gear_shifter(self.shifter_values.get(can_gear, None)) + ret.steeringAngle = cp.vl["Steering_Torque"]['Steering_Angle'] ret.steeringTorque = cp.vl["Steering_Torque"]['Steer_Torque_Sensor'] ret.steeringPressed = abs(ret.steeringTorque) > STEER_THRESHOLD[self.car_fingerprint] @@ -80,6 +86,7 @@ def get_can_parser(CP): ("DOOR_OPEN_RR", "BodyInfo", 1), ("DOOR_OPEN_RL", "BodyInfo", 1), ("Units", "Dash_State", 1), + ("Gear", "Transmission", 0), ] checks = [ diff --git a/selfdrive/car/subaru/interface.py b/selfdrive/car/subaru/interface.py index 353846634c978e..5cc3440997a654 100644 --- a/selfdrive/car/subaru/interface.py +++ b/selfdrive/car/subaru/interface.py @@ -66,7 +66,7 @@ def update(self, c, can_strings): be.type = car.CarState.ButtonEvent.Type.accelCruise buttonEvents.append(be) - events = self.create_common_events(ret, extra_gears=[car.CarState.GearShifter.unknown]) + events = self.create_common_events(ret) if ret.cruiseState.enabled and not self.cruise_enabled_prev: events.append(create_event('pcmEnable', [ET.ENABLE])) diff --git a/selfdrive/test/process_replay/ref_commit b/selfdrive/test/process_replay/ref_commit index f21a4476eab258..022d6d368f1d71 100644 --- a/selfdrive/test/process_replay/ref_commit +++ b/selfdrive/test/process_replay/ref_commit @@ -1 +1 @@ -63ee3eaab23b45821a3a04888ae7506ec27dedb3 +852b5b42981cf17a18c7eebc9b501db2f5b0c33b
This PR adds gearShifter to Subaru carstate. Prerequiste to merging is opendbc PR https://github.com/commaai/opendbc/pull/221 which adds Transmission message and Gears values to subaru_global_2017.dbc. Tested and working on Crosstrek 2018
https://api.github.com/repos/commaai/openpilot/pulls/1318
2020-04-05T18:42:32Z
2020-04-07T18:55:18Z
2020-04-07T18:55:18Z
2020-05-11T23:11:07Z
779
commaai/openpilot
9,774
kodi fanart fix (basic auth)
diff --git a/homeassistant/components/media_player/kodi.py b/homeassistant/components/media_player/kodi.py index 68161deea2f95c..9676fe451c75d0 100644 --- a/homeassistant/components/media_player/kodi.py +++ b/homeassistant/components/media_player/kodi.py @@ -76,11 +76,17 @@ def __init__(self, name, url, auth=None, turn_off_action=None): import jsonrpc_requests self._name = name self._url = url + self._basic_auth_url = None kwargs = {'timeout': 5} if auth is not None: kwargs['auth'] = auth + scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url) + self._basic_auth_url = \ + urllib.parse.urlunsplit((scheme, '{}:{}@{}'.format + (auth[0], auth[1], netloc), + path, query, fragment)) self._server = jsonrpc_requests.Server( '{}/jsonrpc'.format(self._url), **kwargs) @@ -195,6 +201,11 @@ def _get_image_url(self): url_components = urllib.parse.urlparse(self._item['thumbnail']) if url_components.scheme == 'image': + if self._basic_auth_url is not None: + return '{}/image/{}'.format( + self._basic_auth_url, + urllib.parse.quote_plus(self._item['thumbnail'])) + return '{}/image/{}'.format( self._url, urllib.parse.quote_plus(self._item['thumbnail']))
**Description:** Was unable to retrieve the fanart when username and password was used. Another url had to be used which included the username/password. **Related issue (if applicable):** fixes #3383 **Pull request in [home-assistant.github.io](https://github.com/home-assistant/home-assistant.github.io) with documentation (if applicable):** home-assistant/home-assistant.github.io#<home-assistant.github.io PR number goes here> **Example entry for `configuration.yaml` (if applicable):** ```yaml ``` **Checklist:** If user exposed functionality or configuration variables are added/changed: - [n/a] Documentation added/updated in [home-assistant.github.io](https://github.com/home-assistant/home-assistant.github.io) If the code communicates with devices, web services, or third-party tools: - [x] Local tests with `tox` run successfully. **Your PR cannot be merged unless tests pass** - [n/a] New dependencies have been added to the `REQUIREMENTS` variable ([example][ex-requir]). - [n/a] New dependencies are only imported inside functions that use them ([example][ex-import]). - [n/a] New dependencies have been added to `requirements_all.txt` by running `script/gen_requirements_all.py`. - [n/a] New files were added to `.coveragerc`. If the code does not interact with devices: - [x] Local tests with `tox` run successfully. **Your PR cannot be merged unless tests pass** - [n/a] Tests have been added to verify that the new code works. [ex-requir]: https://github.com/home-assistant/home-assistant/blob/dev/homeassistant/components/keyboard.py#L16 [ex-import]: https://github.com/home-assistant/home-assistant/blob/dev/homeassistant/components/keyboard.py#L51
https://api.github.com/repos/home-assistant/core/pulls/4930
2016-12-15T20:24:36Z
2016-12-16T05:35:01Z
2016-12-16T05:35:01Z
2017-03-17T16:18:13Z
351
home-assistant/core
39,381
Update run_code.py
diff --git a/metagpt/actions/run_code.py b/metagpt/actions/run_code.py index 1bc5cc13a..f14a6a8e7 100644 --- a/metagpt/actions/run_code.py +++ b/metagpt/actions/run_code.py @@ -27,7 +27,7 @@ Determine the ONE file to rewrite in order to fix the error, for example, xyz.py, or test_xyz.py ## Status: Determine if all of the code works fine, if so write PASS, else FAIL, -WRITE ONLY ONE WORD, PASS OR FAIL, IN THI SECTION +WRITE ONLY ONE WORD, PASS OR FAIL, IN THIS SECTION ## Send To: Please write Engineer if the errors are due to problematic development codes, and QaEngineer to problematic test codes, and NoOne if there are no errors, WRITE ONLY ONE WORD, Engineer OR QaEngineer OR NoOne, IN THIS SECTION.
Typo
https://api.github.com/repos/geekan/MetaGPT/pulls/144
2023-08-07T15:46:36Z
2023-08-10T12:18:51Z
2023-08-10T12:18:51Z
2023-08-10T12:18:52Z
200
geekan/MetaGPT
16,942
I: Enforcement fixes/completion
diff --git a/CppCoreGuidelines.md b/CppCoreGuidelines.md index 8df991e15..53afef356 100644 --- a/CppCoreGuidelines.md +++ b/CppCoreGuidelines.md @@ -1664,7 +1664,7 @@ so the default is "no ownership transfer." * (Simple) Warn on `delete` of a raw pointer that is not an `owner`. * (Simple) Warn on failure to either `reset` or explicitly `delete` an `owner` pointer on every code path. -* (Simple) Warn if the return value of `new` or a function call with return value of pointer type is assigned to a raw pointer. +* (Simple) Warn if the return value of `new` or a function call with an `owner` return value is assigned to a raw pointer or non-`owner` reference. ### <a name="Ri-nullptr"></a>I.12: Declare a pointer that must not be null as `not_null` @@ -1927,7 +1927,7 @@ This will force every derived class to compute a center -- even if that's non-tr ##### Enforcement -(Simple) Warn if a pointer to a class `C` is assigned to a pointer to a base of `C` and the base class contains data members. +(Simple) Warn if a pointer/reference to a class `C` is assigned to a pointer/reference to a base of `C` and the base class contains data members. ### <a name="Ri-abi"></a>I.26: If you want a cross-compiler ABI, use a C-style subset
https://api.github.com/repos/isocpp/CppCoreGuidelines/pulls/825
2017-01-01T01:02:32Z
2017-01-30T19:19:56Z
2017-01-30T19:19:56Z
2017-01-31T01:39:57Z
353
isocpp/CppCoreGuidelines
15,738
Watch all page scripts when installing local_source_watchers
diff --git a/lib/streamlit/watcher/local_sources_watcher.py b/lib/streamlit/watcher/local_sources_watcher.py index 97373230d4e4..0cf7a304cc03 100644 --- a/lib/streamlit/watcher/local_sources_watcher.py +++ b/lib/streamlit/watcher/local_sources_watcher.py @@ -24,6 +24,7 @@ from streamlit.logger import get_logger from streamlit.session_data import SessionData +from streamlit.source_util import get_pages from streamlit.watcher.file_watcher import ( get_default_file_watcher_class, NoOpFileWatcher, @@ -51,10 +52,11 @@ def __init__(self, session_data: SessionData): self._watched_modules: Dict[str, WatchedModule] = {} - self._register_watcher( - self._session_data.main_script_path, - module_name=None, # Only the root script has None here. - ) + for page_info in get_pages(self._session_data.main_script_path): + self._register_watcher( + page_info["script_path"], + module_name=None, # Only root scripts have their modules set to None + ) def register_file_change_callback(self, cb: Callable[[], None]) -> None: self._on_file_changed.append(cb) diff --git a/lib/tests/streamlit/watcher/local_sources_watcher_test.py b/lib/tests/streamlit/watcher/local_sources_watcher_test.py index a3d6bc8679be..d739fd2482b5 100644 --- a/lib/tests/streamlit/watcher/local_sources_watcher_test.py +++ b/lib/tests/streamlit/watcher/local_sources_watcher_test.py @@ -290,6 +290,26 @@ def test_namespace_package_unloaded(self, fob, _): del sys.modules["tests.streamlit.watcher.test_data.namespace_package"] + @patch( + "streamlit.watcher.local_sources_watcher.get_pages", + MagicMock( + return_value=[ + {"page_name": "streamlit_app", "script_path": "streamlit_app.py"}, + {"page_name": "streamlit_app2", "script_path": "streamlit_app2.py"}, + ] + ), + ) + @patch("streamlit.watcher.local_sources_watcher.FileWatcher") + def test_watches_all_page_scripts(self, fob, _): + lsw = local_sources_watcher.LocalSourcesWatcher(REPORT) + lsw.register_file_change_callback(NOOP_CALLBACK) + + args1, _ = fob.call_args_list[0] + args2, _ = fob.call_args_list[1] + + assert args1[0] == "streamlit_app.py" + assert args2[0] == "streamlit_app2.py" + def test_get_module_paths_outputs_abs_paths(): mock_module = MagicMock()
## 📚 Context In the multipage apps world, we will often have >1 "root" script to keep track of. This PR simply changes `local_sources_watcher` to watch the script file for each page returned by `get_pages`. - What kind of change does this PR introduce? - [x] Feature ## 🧠 Description of Changes - [x] This is a visible (user-facing) change ## 🧪 Testing Done - [x] Added/Updated unit tests ## 🌐 References - **Issue**: Closes (https://github.com/streamlit/streamlit-issues/issues/350)
https://api.github.com/repos/streamlit/streamlit/pulls/4481
2022-03-07T22:52:20Z
2022-03-09T00:05:13Z
2022-03-09T00:05:13Z
2022-03-09T00:05:16Z
635
streamlit/streamlit
21,915
Spaces have a compatible method which determines if 2 Spaces are of t…
diff --git a/gym/core.py b/gym/core.py index f0204e9dedb..5bbb1250f48 100644 --- a/gym/core.py +++ b/gym/core.py @@ -184,6 +184,24 @@ def from_jsonable(self, sample_n): # By default, assume identity is JSONable return sample_n + def compatible(self, space): + """ + Return boolean specifying if space is compatible with this Space + (equal shape structure, ignoring bounds). None matches any Space. + """ + # allow None to match with any space + if space is None: + return True + + # compare classes + if type(self) != type(space): + return False + + # TODO - compare dtypes? + + # compare shapes + return self.shape == space.shape + warn_once = True diff --git a/gym/spaces/dict_space.py b/gym/spaces/dict_space.py index 1c0d90af183..2ee1f929489 100644 --- a/gym/spaces/dict_space.py +++ b/gym/spaces/dict_space.py @@ -70,3 +70,19 @@ def from_jsonable(self, sample_n): entry[key] = value[i] ret.append(entry) return ret + + def compatible(self, space): + if not super(Dict, self).compatible(space): + return False + + # compare each subspace + for k, subspace_x in self.spaces.items(): + subspace_y = space.spaces[k] + + # allow None to match any Space + if subspace_x is None or subspace_y is None: + continue + + if not subspace_x.compatible(subspace_y): + return False + return True diff --git a/gym/spaces/tests/test_compatible_spaces.py b/gym/spaces/tests/test_compatible_spaces.py new file mode 100644 index 00000000000..2aa32f0f4c8 --- /dev/null +++ b/gym/spaces/tests/test_compatible_spaces.py @@ -0,0 +1,56 @@ +import pytest +import numpy as np +import copy +import random +from gym.spaces import Box, Dict, Tuple + + +def test_compatibility(): + # create all testable spaces + spaces = [ + Box(-1.0, 1.0, (20, ), np.float32), + Box(-1.0, 1.0, (40, ), np.float32), + Box(-1.0, 1.0, (20, 20), np.float32), + Box(-1.0, 1.0, (20, 24), np.float32), + Dict({'A': Box(-1.0, 1.0, (20, ), np.float32), 'B': Box(-1.0, 1.0, (20, ), np.float32)}), + Dict({'A': Box(-1.0, 1.0, (20, ), np.float32), 'B': Box(-1.0, 1.0, (40, ), np.float32)}), + Dict({'A': Box(-1.0, 1.0, (40, ), np.float32), 'B': Box(-1.0, 1.0, (20, ), np.float32)}), + Tuple([Box(-1.0, 1.0, (20, ), np.float32), Box(-1.0, 1.0, (20, ), np.float32)]), + Tuple([Box(-1.0, 1.0, (40, ), np.float32), Box(-1.0, 1.0, (20, ), np.float32)]), + Tuple([Box(-1.0, 1.0, (20, ), np.float32), Box(-1.0, 1.0, (40, ), np.float32)]), + ] + + # iterate and compare all combinations + spaces_range = range(len(spaces)) + for x in spaces_range: + space1 = spaces[x] + for y in spaces_range: + # create copy of Space with random bounds + space2 = randomize_space_bounds(spaces[y]) + + expected_compatible = x == y + actual_compatible = space1.compatible(space2) + assert expected_compatible == actual_compatible + +def randomize_space_bounds(space): + # copy space + space = copy.copy(space) + + # check if space contain subspaces + if hasattr(space, "spaces"): + # compare each sub-Space + subspaces = space.spaces + if hasattr(subspaces, "keys"): + iterable = subspaces.keys() + else: + iterable = range(len(subspaces)) + for k in iterable: + space.spaces[k] = randomize_space_bounds(space.spaces[k]) + + # randomize bounds + if hasattr(space, "low"): + space.low = random.uniform(-5.0, 5.0) + if hasattr(space, "high"): + space.high = random.uniform(-5.0, 5.0) + + return space diff --git a/gym/spaces/tuple_space.py b/gym/spaces/tuple_space.py index 453663781f4..74793a58571 100644 --- a/gym/spaces/tuple_space.py +++ b/gym/spaces/tuple_space.py @@ -30,3 +30,20 @@ def to_jsonable(self, sample_n): def from_jsonable(self, sample_n): return [sample for sample in zip(*[space.from_jsonable(sample_n[i]) for i, space in enumerate(self.spaces)])] + + def compatible(self, space): + if not super(Tuple, self).compatible(space): + return False + + # compare each subspace + for i in range(len(self.spaces)): + subspace_x = self.spaces[i] + subspace_y = space.spaces[i] + + # allow None to match any Space + if subspace_x is None or subspace_y is None: + continue + + if not subspace_x.compatible(subspace_y): + return False + return True
…he same shape, ignoring bounds.
https://api.github.com/repos/openai/gym/pulls/871
2018-02-08T18:49:47Z
2018-02-08T20:53:48Z
2018-02-08T20:53:48Z
2018-02-08T20:56:56Z
1,441
openai/gym
5,529
fix in "Layer.compute_output_shape" description
diff --git a/keras/engine/base_layer.py b/keras/engine/base_layer.py index 0482fe7bb52..3dac15be7e4 100644 --- a/keras/engine/base_layer.py +++ b/keras/engine/base_layer.py @@ -580,7 +580,7 @@ def compute_output_shape(self, input_shape): instead of an integer. # Returns - An input shape tuple. + An output shape tuple. """ return input_shape
<!-- Please make sure you've read and understood our contributing guidelines; https://github.com/keras-team/keras/blob/master/CONTRIBUTING.md --> ### Summary ### Related Issues Fixed a logical error in the description of "Layer.compute_output_shape" method ### PR Overview - [n] This PR requires new unit tests [y/n] (make sure tests are included) - [n] This PR requires to update the documentation [y/n] (make sure the docs are up-to-date) - [y] This PR is backwards compatible [y/n] - [n] This PR changes the current API [y/n] (all API changes need to be approved by fchollet)
https://api.github.com/repos/keras-team/keras/pulls/13210
2019-08-11T20:07:00Z
2019-09-11T20:39:40Z
2019-09-11T20:39:40Z
2019-09-11T20:39:40Z
112
keras-team/keras
47,778
[3.8] bpo-37819: Add Fraction.as_integer_ratio() (GH-15212)
diff --git a/Doc/library/fractions.rst b/Doc/library/fractions.rst index b5a818e1cafa61..58e7126b0bf212 100644 --- a/Doc/library/fractions.rst +++ b/Doc/library/fractions.rst @@ -94,6 +94,13 @@ another rational number, or from a string. Denominator of the Fraction in lowest term. + .. method:: as_integer_ratio() + + Return a tuple of two integers, whose ratio is equal + to the Fraction and with a positive denominator. + + .. versionadded:: 3.8 + .. method:: from_float(flt) This class method constructs a :class:`Fraction` representing the exact diff --git a/Lib/fractions.py b/Lib/fractions.py index 7443bd3e0c6af9..e774d58e403539 100644 --- a/Lib/fractions.py +++ b/Lib/fractions.py @@ -216,6 +216,14 @@ def from_decimal(cls, dec): (cls.__name__, dec, type(dec).__name__)) return cls(*dec.as_integer_ratio()) + def as_integer_ratio(self): + """Return the integer ratio as a tuple. + + Return a tuple of two integers, whose ratio is equal to the + Fraction and with a positive denominator. + """ + return (self._numerator, self._denominator) + def limit_denominator(self, max_denominator=1000000): """Closest Fraction to self with denominator at most max_denominator. diff --git a/Lib/test/test_fractions.py b/Lib/test/test_fractions.py index 27791622005149..18ab28cfebe0c8 100644 --- a/Lib/test/test_fractions.py +++ b/Lib/test/test_fractions.py @@ -302,6 +302,12 @@ def testFromDecimal(self): ValueError, "cannot convert NaN to integer ratio", F.from_decimal, Decimal("snan")) + def test_as_integer_ratio(self): + self.assertEqual(F(4, 6).as_integer_ratio(), (2, 3)) + self.assertEqual(F(-4, 6).as_integer_ratio(), (-2, 3)) + self.assertEqual(F(4, -6).as_integer_ratio(), (-2, 3)) + self.assertEqual(F(0, 6).as_integer_ratio(), (0, 1)) + def testLimitDenominator(self): rpi = F('3.1415926535897932') self.assertEqual(rpi.limit_denominator(10000), F(355, 113)) diff --git a/Misc/NEWS.d/next/Library/2019-08-11-10-34-19.bpo-37819.LVJls-.rst b/Misc/NEWS.d/next/Library/2019-08-11-10-34-19.bpo-37819.LVJls-.rst new file mode 100644 index 00000000000000..cfc1f1afb4f765 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-08-11-10-34-19.bpo-37819.LVJls-.rst @@ -0,0 +1,2 @@ +Add Fraction.as_integer_ratio() to match the corresponding methods in bool, +int, float, and decimal.
(cherry picked from commit f03b4c8a48f62134799d368b78da35301af466a3) Co-authored-by: Raymond Hettinger <rhettinger@users.noreply.github.com> <!-- issue-number: [bpo-37819](https://bugs.python.org/issue37819) --> https://bugs.python.org/issue37819 <!-- /issue-number -->
https://api.github.com/repos/python/cpython/pulls/15215
2019-08-11T21:41:09Z
2019-08-11T22:02:24Z
2019-08-11T22:02:24Z
2019-08-11T22:20:33Z
772
python/cpython
4,031
fixed download badge link
diff --git a/README.md b/README.md index 9d0b29af21..b257c333f0 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ <a href="https://coveralls.io/github/psf/black?branch=main"><img alt="Coverage Status" src="https://coveralls.io/repos/github/psf/black/badge.svg?branch=main"></a> <a href="https://github.com/psf/black/blob/main/LICENSE"><img alt="License: MIT" src="https://black.readthedocs.io/en/stable/_static/license.svg"></a> <a href="https://pypi.org/project/black/"><img alt="PyPI" src="https://img.shields.io/pypi/v/black"></a> -<a href="https://pepy.tech/project/black"><img alt="Downloads" src="https://pepy.tech/badge/black"></a> +<a href="https://pepy.tech/project/black"><img alt="Downloads" src="https://static.pepy.tech/badge/black"></a> <a href="https://anaconda.org/conda-forge/black/"><img alt="conda-forge" src="https://img.shields.io/conda/dn/conda-forge/black.svg?label=conda-forge"></a> <a href="https://github.com/psf/black"><img alt="Code style: black" src="https://img.shields.io/badge/code%20style-black-000000.svg"></a> </p>
<!-- Hello! Thanks for submitting a PR. To help make things go a bit more smoothly we would appreciate that you go through this template. --> ### Description Fixed the broken badge link in main readme, picture of how it was can be seen in the [issue](https://github.com/psf/black/issues/3852) ### Checklist - did you ... Changed from broken link to badge to the working one. Not sure if it earns a place in CHANGES.md - [ ] Add an entry in `CHANGES.md` if necessary? - [ ] Add / update tests if necessary? - [x] Add new / update outdated documentation? <!-- Just as a reminder, everyone in all psf/black spaces including PRs must follow the PSF Code of Conduct (link below). Finally, once again thanks for your time and effort. If you have any feedback in regards to your experience contributing here, please let us know! Helpful links: PSF COC: https://www.python.org/psf/conduct/ Contributing docs: https://black.readthedocs.io/en/latest/contributing/index.html Chat on Python Discord: https://discord.gg/RtVdv86PrH -->
https://api.github.com/repos/psf/black/pulls/3853
2023-08-22T19:24:55Z
2023-08-22T19:40:10Z
2023-08-22T19:40:10Z
2023-08-22T19:54:32Z
338
psf/black
23,940
Add ops.is_tensor to the public API
diff --git a/keras/ops/core.py b/keras/ops/core.py index 84bf88dcefe..217fd2b5c2c 100644 --- a/keras/ops/core.py +++ b/keras/ops/core.py @@ -10,6 +10,7 @@ convert_to_tensor convert_to_numpy cond +is_tensor """ import numpy as np @@ -402,7 +403,7 @@ def unstack(x, num=None, axis=0): def shape(x): """Gets the shape of the tensor input. - Note: On the tensorflow backend, when `x` is a `tf.Tensor` with dynamic + Note: On the TensorFlow backend, when `x` is a `tf.Tensor` with dynamic shape, dimensions which are dynamic in the context of a compiled function will have a `tf.Tensor` value instead of a static integer value. @@ -629,3 +630,19 @@ def vectorized_map(function, elements) a single list of tensor arguments. """ return backend.core.vectorized_map(function, elements) + + +@keras_export("keras.ops.is_tensor") +def is_tensor(x): + """Check whether the given object is a tensor. + + Note: This checks for backend specific tensors so passing a TensorFlow + tensor would return `False` if your backend is PyTorch or JAX. + + Args: + x: A variable. + + Returns: + `True` if `x` is a tensor, otherwise `False`. + """ + return backend.core.is_tensor(x) diff --git a/keras/ops/core_test.py b/keras/ops/core_test.py index 5b2789cadf1..aefe8e52da0 100644 --- a/keras/ops/core_test.py +++ b/keras/ops/core_test.py @@ -411,3 +411,11 @@ def fn(elems): self.assertAllClose( backend.convert_to_numpy(output), 2 * np.ones((2, 3)) ) + + def test_is_tensor(self): + np_x = np.array([[1, 2, 3], [3, 2, 1]]) + x = backend.convert_to_tensor(np_x) + if backend.backend() != "numpy": + self.assertFalse(ops.is_tensor(np_x)) + self.assertTrue(ops.is_tensor(x)) + self.assertFalse(ops.is_tensor([1, 2, 3]))
https://api.github.com/repos/keras-team/keras/pulls/18756
2023-11-09T22:59:27Z
2023-11-11T00:48:29Z
2023-11-11T00:48:29Z
2023-11-11T00:48:33Z
557
keras-team/keras
47,179
remove defaults to None if optional
diff --git a/examples/research_projects/wav2vec2/run_asr.py b/examples/research_projects/wav2vec2/run_asr.py index 5e62cb504eb12..410d5c2d3a622 100755 --- a/examples/research_projects/wav2vec2/run_asr.py +++ b/examples/research_projects/wav2vec2/run_asr.py @@ -144,7 +144,7 @@ class Orthography: Args: do_lower_case (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether or not to accept lowercase input and lowercase the output when decoding. - vocab_file (:obj:`str`, `optional`, defaults to :obj:`None`): + vocab_file (:obj:`str`, `optional`): File containing the vocabulary. word_delimiter_token (:obj:`str`, `optional`, defaults to :obj:`"|"`): The token used for delimiting words; it needs to be in the vocabulary. @@ -152,7 +152,7 @@ class Orthography: Table to use with `str.translate()` when preprocessing text (e.g., "-" -> " "). words_to_remove (:obj:`Set[str]`, `optional`, defaults to :obj:`set()`): Words to remove when preprocessing text (e.g., "sil"). - untransliterator (:obj:`Callable[[str], str]`, `optional`, defaults to :obj:`None`): + untransliterator (:obj:`Callable[[str], str]`, `optional`): Function that untransliterates text back into native writing system. """ diff --git a/src/transformers/debug_utils.py b/src/transformers/debug_utils.py index 45384a80134ba..537f897b49f84 100644 --- a/src/transformers/debug_utils.py +++ b/src/transformers/debug_utils.py @@ -118,7 +118,7 @@ class DebugUnderflowOverflow: How many frames back to record trace_batch_nums(:obj:`List[int]`, `optional`, defaults to ``[]``): Which batch numbers to trace (turns detection off) - abort_after_batch_num (:obj:`int`, `optional`, defaults to :obj:`None`): + abort_after_batch_num (:obj:`int`, `optional`): Whether to abort after a certain batch number has finished """ diff --git a/src/transformers/modeling_tf_utils.py b/src/transformers/modeling_tf_utils.py index 4bf12af5573cf..16af519e2345e 100644 --- a/src/transformers/modeling_tf_utils.py +++ b/src/transformers/modeling_tf_utils.py @@ -1128,7 +1128,7 @@ def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a git-based system for storing models and other artifacts on huggingface.co, so ``revision`` can be any identifier allowed by git. - mirror(:obj:`str`, `optional`, defaults to :obj:`None`): + mirror(:obj:`str`, `optional`): Mirror source to accelerate downloads in China. If you are from China and have an accessibility problem, you can set this option to resolve it. Note that we do not guarantee the timeliness or safety. Please refer to the mirror site for more information. diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index 4247f4c2a6dbd..ca8ae2267109d 100644 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -975,7 +975,7 @@ def from_pretrained(cls, pretrained_model_name_or_path: Optional[Union[str, os.P The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a git-based system for storing models and other artifacts on huggingface.co, so ``revision`` can be any identifier allowed by git. - mirror(:obj:`str`, `optional`, defaults to :obj:`None`): + mirror(:obj:`str`, `optional`): Mirror source to accelerate downloads in China. If you are from China and have an accessibility problem, you can set this option to resolve it. Note that we do not guarantee the timeliness or safety. Please refer to the mirror site for more information. diff --git a/src/transformers/models/albert/tokenization_albert_fast.py b/src/transformers/models/albert/tokenization_albert_fast.py index cb817ddcc01fd..9aa18317042da 100644 --- a/src/transformers/models/albert/tokenization_albert_fast.py +++ b/src/transformers/models/albert/tokenization_albert_fast.py @@ -172,7 +172,7 @@ def build_inputs_with_special_tokens( Args: token_ids_0 (:obj:`List[int]`): List of IDs to which the special tokens will be added - token_ids_1 (:obj:`List[int]`, `optional`, defaults to :obj:`None`): + token_ids_1 (:obj:`List[int]`, `optional`): Optional second list of IDs for sequence pairs. Returns: @@ -201,7 +201,7 @@ def create_token_type_ids_from_sequences( Args: token_ids_0 (:obj:`List[int]`): List of ids. - token_ids_1 (:obj:`List[int]`, `optional`, defaults to :obj:`None`): + token_ids_1 (:obj:`List[int]`, `optional`): Optional second list of IDs for sequence pairs. Returns: diff --git a/src/transformers/models/big_bird/tokenization_big_bird_fast.py b/src/transformers/models/big_bird/tokenization_big_bird_fast.py index cbe2b74133165..e5b1e5bab0e28 100644 --- a/src/transformers/models/big_bird/tokenization_big_bird_fast.py +++ b/src/transformers/models/big_bird/tokenization_big_bird_fast.py @@ -152,7 +152,7 @@ def build_inputs_with_special_tokens( Args: token_ids_0 (:obj:`List[int]`): List of IDs to which the special tokens will be added - token_ids_1 (:obj:`List[int]`, `optional`, defaults to :obj:`None`): + token_ids_1 (:obj:`List[int]`, `optional`): Optional second list of IDs for sequence pairs. Returns: @@ -174,7 +174,7 @@ def get_special_tokens_mask( Args: token_ids_0 (:obj:`List[int]`): List of ids. - token_ids_1 (:obj:`List[int]`, `optional`, defaults to :obj:`None`): + token_ids_1 (:obj:`List[int]`, `optional`): Optional second list of IDs for sequence pairs. already_has_special_tokens (:obj:`bool`, `optional`, defaults to :obj:`False`): Set to True if the token list is already formatted with special tokens for the model @@ -212,7 +212,7 @@ def create_token_type_ids_from_sequences( Args: token_ids_0 (:obj:`List[int]`): List of ids. - token_ids_1 (:obj:`List[int]`, `optional`, defaults to :obj:`None`): + token_ids_1 (:obj:`List[int]`, `optional`): Optional second list of IDs for sequence pairs. Returns: diff --git a/src/transformers/models/ibert/quant_modules.py b/src/transformers/models/ibert/quant_modules.py index 065a3fef6144d..d1da18686abd3 100644 --- a/src/transformers/models/ibert/quant_modules.py +++ b/src/transformers/models/ibert/quant_modules.py @@ -124,7 +124,7 @@ class QuantAct(nn.Module): Momentum for updating the activation quantization range. per_channel (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether to or not use channel-wise quantization. - channel_len (:obj:`int`, `optional`, defaults to :obj:`None`): + channel_len (:obj:`int`, `optional`): Specify the channel length when set the `per_channel` True. quant_mode (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether or not the layer is quantized. @@ -755,9 +755,9 @@ class FixedPointMul(Function): Quantization bitwidth. z_scaling_factor (:obj:`torch.Tensor`): Scaling factor of the output tensor. - identity (:obj:`torch.Tensor`, `optional`, defaults to :obj:`None`): + identity (:obj:`torch.Tensor`, `optional`): Identity tensor, if exists. - identity_scaling_factor (:obj:`torch.Tensor`, `optional`, defaults to :obj:`None`): + identity_scaling_factor (:obj:`torch.Tensor`, `optional`): Scaling factor of the identity tensor `identity`, if exists. Returns: diff --git a/src/transformers/models/mpnet/modeling_mpnet.py b/src/transformers/models/mpnet/modeling_mpnet.py index f1327a8719762..90ba92242bc62 100644 --- a/src/transformers/models/mpnet/modeling_mpnet.py +++ b/src/transformers/models/mpnet/modeling_mpnet.py @@ -444,7 +444,7 @@ def forward(self, hidden_states): details. `What are input IDs? <../glossary.html#input-ids>`__ - attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`, defaults to :obj:`None`): + attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`): Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``: - 1 for tokens that are **not masked**, diff --git a/src/transformers/models/mpnet/tokenization_mpnet.py b/src/transformers/models/mpnet/tokenization_mpnet.py index 98af763ade64a..7bbefb4946430 100644 --- a/src/transformers/models/mpnet/tokenization_mpnet.py +++ b/src/transformers/models/mpnet/tokenization_mpnet.py @@ -235,7 +235,7 @@ def build_inputs_with_special_tokens( Args: token_ids_0 (:obj:`List[int]`): List of IDs to which the special tokens will be added - token_ids_1 (:obj:`List[int]`, `optional`, defaults to :obj:`None`): + token_ids_1 (:obj:`List[int]`, `optional`): Optional second list of IDs for sequence pairs. Returns: diff --git a/src/transformers/models/xlm_prophetnet/tokenization_xlm_prophetnet.py b/src/transformers/models/xlm_prophetnet/tokenization_xlm_prophetnet.py index 9c2d90914a6d8..b2707f8dcb2a7 100644 --- a/src/transformers/models/xlm_prophetnet/tokenization_xlm_prophetnet.py +++ b/src/transformers/models/xlm_prophetnet/tokenization_xlm_prophetnet.py @@ -290,7 +290,7 @@ def build_inputs_with_special_tokens( Args: token_ids_0 (:obj:`List[int]`): List of IDs to which the special tokens will be added - token_ids_1 (:obj:`List[int]`, `optional`, defaults to :obj:`None`): + token_ids_1 (:obj:`List[int]`, `optional`): Optional second list of IDs for sequence pairs. Returns: diff --git a/src/transformers/pipelines/text2text_generation.py b/src/transformers/pipelines/text2text_generation.py index 96aaf3d19fb84..346f178bbc920 100644 --- a/src/transformers/pipelines/text2text_generation.py +++ b/src/transformers/pipelines/text2text_generation.py @@ -295,10 +295,10 @@ def __call__( Whether or not to include the decoded texts in the outputs. clean_up_tokenization_spaces (:obj:`bool`, `optional`, defaults to :obj:`False`): Whether or not to clean up the potential extra spaces in the text output. - src_lang (:obj:`str`, `optional`, defaults to :obj:`None`): + src_lang (:obj:`str`, `optional`): The language of the input. Might be required for multilingual models. Will not have any effect for single pair translation models - tgt_lang (:obj:`str`, `optional`, defaults to :obj:`None`): + tgt_lang (:obj:`str`, `optional`): The language of the desired output. Might be required for multilingual models. Will not have any effect for single pair translation models generate_kwargs:
PR to fix #11687
https://api.github.com/repos/huggingface/transformers/pulls/11703
2021-05-12T12:30:51Z
2021-05-12T13:11:11Z
2021-05-12T13:11:11Z
2021-05-12T13:11:11Z
2,929
huggingface/transformers
12,417
fixed regexp to add support of unicode for strings
diff --git a/metagpt/actions/rebuild_sequence_view.py b/metagpt/actions/rebuild_sequence_view.py index 0e67de908..2aac9bf20 100644 --- a/metagpt/actions/rebuild_sequence_view.py +++ b/metagpt/actions/rebuild_sequence_view.py @@ -486,7 +486,7 @@ def parse_participant(mermaid_sequence_diagram: str) -> List[str]: Returns: List[str]: A list of participants extracted from the sequence diagram. """ - pattern = r"participant ([a-zA-Z\.0-9_]+)" + pattern = r"participant ([\w\.]+)" matches = re.findall(pattern, mermaid_sequence_diagram) matches = [re.sub(r"[\\/'\"]+", "", i) for i in matches] return matches
**Features** <!-- Clear and direct description of the submit features. --> <!-- If it's a bug fix, please also paste the issue link. --> - fixed regexp to add support of unicode for strings **Feature Docs** <!-- The RFC, tutorial, or use cases about the feature if it's a pretty big update. If not, there is no need to fill. --> **Influence** <!-- Tell me the impact of the new feature and I'll focus on it. --> **Result** <!-- The screenshot/log of unittest/running result --> **Other** fixed regexp to add support of unicode for strings and for improving of readability
https://api.github.com/repos/geekan/MetaGPT/pulls/1079
2024-03-22T13:02:23Z
2024-04-05T14:08:40Z
2024-04-05T14:08:40Z
2024-04-05T14:08:40Z
180
geekan/MetaGPT
16,974
Add NFS path check for default deepspeed triton cache directory
diff --git a/deepspeed/ops/transformer/inference/triton/matmul_ext.py b/deepspeed/ops/transformer/inference/triton/matmul_ext.py index d6f72b4efb0b..ad6b297ff20d 100644 --- a/deepspeed/ops/transformer/inference/triton/matmul_ext.py +++ b/deepspeed/ops/transformer/inference/triton/matmul_ext.py @@ -13,12 +13,36 @@ import deepspeed from pathlib import Path import atexit +import subprocess # ----------------------------------------------------------------------------- # util class/functions for triton +def is_nfs_path(path): + # Normalize the path to get the absolute path + path = os.path.abspath(path) + + # Use the 'df' command to find the file system type for the given path + try: + output = subprocess.check_output(['df', '-T', path], encoding='utf-8') + except subprocess.CalledProcessError: + return False # Command failed + + # Process the output of 'df -T' to check for 'nfs' in the filesystem type column + lines = output.strip().split('\n') + if len(lines) > 1: # The first line is headers + fs_type = lines[1].split()[1].lower() # File system type is the second column + return 'nfs' in fs_type + return False + + def _default_cache_dir(): - return os.path.join(Path.home(), ".triton", "autotune") + tmp_path = os.path.join(Path.home(), ".triton", "autotune") + if is_nfs_path(tmp_path): + print( + f"Warning: The default cache directory for DeepSpeed Triton autotune, {tmp_path}, appears to be on an NFS system. While this is generally acceptable, if you experience slowdowns or hanging when DeepSpeed exits, it is recommended to set the TRITON_CACHE_DIR environment variable to a non-NFS path." + ) + return tmp_path def bias_add_activation(C, bias=None, activation=""):
This PR adds explanation and guidance for users whose default caching dir is on a NFS. Ref: https://github.com/microsoft/DeepSpeed/issues/5205
https://api.github.com/repos/microsoft/DeepSpeed/pulls/5323
2024-03-28T00:27:22Z
2024-03-28T20:54:44Z
2024-03-28T20:54:44Z
2024-04-01T02:55:18Z
478
microsoft/DeepSpeed
10,475
#697: Encode expanded script on Python 2
diff --git a/tests/output_readers/test_rerun.py b/tests/output_readers/test_rerun.py index 632606c6f..1d3a2c82c 100644 --- a/tests/output_readers/test_rerun.py +++ b/tests/output_readers/test_rerun.py @@ -30,6 +30,11 @@ def test_get_output_invalid_continuation_byte(self, popen_mock): actual = rerun.get_output('', '') assert actual == expected + @patch('thefuck.output_readers.rerun._wait_output') + def test_get_output_unicode_misspell(self, wait_output_mock): + rerun.get_output(u'pácman', u'pácman') + wait_output_mock.assert_called_once() + def test_wait_output_is_slow(self, settings): assert rerun._wait_output(Mock(), True) self.proc_mock.wait.assert_called_once_with(settings.wait_slow_command) diff --git a/thefuck/output_readers/read_log.py b/thefuck/output_readers/read_log.py index 4da63e04a..0224a0dd3 100644 --- a/thefuck/output_readers/read_log.py +++ b/thefuck/output_readers/read_log.py @@ -40,6 +40,9 @@ def _group_by_calls(log): def _get_script_group_lines(grouped, script): + if six.PY2: + script = script.encode('utf-8') + parts = shlex.split(script) for script_line, lines in reversed(grouped): diff --git a/thefuck/output_readers/rerun.py b/thefuck/output_readers/rerun.py index b50c0d3c5..b7ffe249d 100644 --- a/thefuck/output_readers/rerun.py +++ b/thefuck/output_readers/rerun.py @@ -1,5 +1,6 @@ import os import shlex +import six from subprocess import Popen, PIPE, STDOUT from psutil import AccessDenied, Process, TimeoutExpired from .. import logs @@ -53,6 +54,9 @@ def get_output(script, expanded): env = dict(os.environ) env.update(settings.env) + if six.PY2: + expanded = expanded.encode('utf-8') + split_expand = shlex.split(expanded) is_slow = split_expand[0] in settings.slow_commands if split_expand else False with logs.debug_time(u'Call: {}; with env: {}; is slow: {}'.format(
Fix #697
https://api.github.com/repos/nvbn/thefuck/pulls/1224
2021-08-02T18:24:04Z
2021-08-17T13:40:57Z
2021-08-17T13:40:56Z
2021-08-17T13:41:00Z
561
nvbn/thefuck
30,907
fix for cpp
diff --git a/test_tipc/configs/ch_ppocr_mobile_v2.0_det/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt b/test_tipc/configs/ch_ppocr_mobile_v2.0_det/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt index a19c8ee335..6e5cecf632 100644 --- a/test_tipc/configs/ch_ppocr_mobile_v2.0_det/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt +++ b/test_tipc/configs/ch_ppocr_mobile_v2.0_det/model_linux_gpu_normal_normal_serving_python_linux_gpu_cpu.txt @@ -15,4 +15,4 @@ op.det.local_service_conf.thread_num:1|6 op.det.local_service_conf.use_trt:False|True op.det.local_service_conf.precision:fp32|fp16|int8 pipline:pipeline_rpc_client.py|pipeline_http_client.py ---image_dir:../../doc/imgs \ No newline at end of file +--image_dir:../../doc/imgs diff --git a/test_tipc/prepare.sh b/test_tipc/prepare.sh index d152ef29d0..8876157ef8 100644 --- a/test_tipc/prepare.sh +++ b/test_tipc/prepare.sh @@ -201,8 +201,11 @@ fi if [ ${MODE} = "serving_infer" ];then # prepare serving env - python_name=$(func_parser_value "${lines[2]}") - wget https://paddle-serving.bj.bcebos.com/chain/paddle_serving_server_gpu-0.0.0.post101-py3-none-any.whl + python_name_list=$(func_parser_value "${lines[2]}") + IFS='|' + array=(${python_name_list}) + python_name=${array[0]} + wget -nc https://paddle-serving.bj.bcebos.com/chain/paddle_serving_server_gpu-0.0.0.post101-py3-none-any.whl ${python_name} -m pip install install paddle_serving_server_gpu-0.0.0.post101-py3-none-any.whl ${python_name} -m pip install paddle_serving_client==0.6.1 ${python_name} -m pip install paddle-serving-app==0.6.3 diff --git a/test_tipc/test_serving.sh b/test_tipc/test_serving.sh index c36935a60f..1318d012d4 100644 --- a/test_tipc/test_serving.sh +++ b/test_tipc/test_serving.sh @@ -10,7 +10,7 @@ lines=(${dataline}) # parser serving model_name=$(func_parser_value "${lines[1]}") -python=$(func_parser_value "${lines[2]}") +python_list=$(func_parser_value "${lines[2]}") trans_model_py=$(func_parser_value "${lines[3]}") infer_model_dir_key=$(func_parser_key "${lines[4]}") infer_model_dir_value=$(func_parser_value "${lines[4]}") @@ -54,14 +54,15 @@ function func_serving(){ set_serving_server=$(func_set_params "${serving_server_key}" "${serving_server_value}") set_serving_client=$(func_set_params "${serving_client_key}" "${serving_client_value}") set_image_dir=$(func_set_params "${image_dir_key}" "${image_dir_value}") - trans_model_cmd="${python} ${trans_model_py} ${set_dirname} ${set_model_filename} ${set_params_filename} ${set_serving_server} ${set_serving_client}" + python_list=(${python_list}) + trans_model_cmd="${python_list[0]} ${trans_model_py} ${set_dirname} ${set_model_filename} ${set_params_filename} ${set_serving_server} ${set_serving_client}" eval $trans_model_cmd cd ${serving_dir_value} echo $PWD unset https_proxy unset http_proxy - for python in ${python[*]}; do - if [ ${python} = "cpp"]; then + for python in ${python_list[*]}; do + if [ ${python} = "cpp" ]; then for use_gpu in ${web_use_gpu_list[*]}; do if [ ${use_gpu} = "null" ]; then web_service_cpp_cmd="${python} -m paddle_serving_server.serve --model ppocr_det_mobile_2.0_serving/ ppocr_rec_mobile_2.0_serving/ --port 9293" @@ -91,9 +92,6 @@ function func_serving(){ echo ${ues_gpu} if [ ${use_gpu} = "null" ]; then for use_mkldnn in ${web_use_mkldnn_list[*]}; do - if [ ${use_mkldnn} = "False" ]; then - continue - fi for threads in ${web_cpu_threads_list[*]}; do set_cpu_threads=$(func_set_params "${web_cpu_threads_key}" "${threads}") web_service_cmd="${python} ${web_service_py} ${web_use_gpu_key}=${use_gpu} ${web_use_mkldnn_key}=${use_mkldnn} ${set_cpu_threads} &" @@ -124,6 +122,9 @@ function func_serving(){ continue fi set_tensorrt=$(func_set_params "${web_use_trt_key}" "${use_trt}") + if [ ${use_trt} = True ]; then + device_type=2 + fi set_precision=$(func_set_params "${web_precision_key}" "${precision}") web_service_cmd="${python} ${web_service_py} ${web_use_gpu_key}=${use_gpu} ${set_tensorrt} ${set_precision} & " eval $web_service_cmd
att
https://api.github.com/repos/PaddlePaddle/PaddleOCR/pulls/4733
2021-11-23T11:03:12Z
2021-12-02T06:29:32Z
2021-12-02T06:29:32Z
2021-12-02T06:29:36Z
1,267
PaddlePaddle/PaddleOCR
41,822
Change links to SQLAlchemy 1.4
diff --git a/airflow/settings.py b/airflow/settings.py index 43dd84eee8bef..a278316d5aadf 100644 --- a/airflow/settings.py +++ b/airflow/settings.py @@ -311,7 +311,7 @@ def prepare_engine_args(disable_connection_pool=False, pool_class=None): # Typically, this is a simple statement like "SELECT 1", but may also make use # of some DBAPI-specific method to test the connection for liveness. # More information here: - # https://docs.sqlalchemy.org/en/13/core/pooling.html#disconnect-handling-pessimistic + # https://docs.sqlalchemy.org/en/14/core/pooling.html#disconnect-handling-pessimistic pool_pre_ping = conf.getboolean("database", "SQL_ALCHEMY_POOL_PRE_PING", fallback=True) log.debug( diff --git a/docs/apache-airflow/howto/set-up-database.rst b/docs/apache-airflow/howto/set-up-database.rst index 870c9edfb21d7..d9cf82b756980 100644 --- a/docs/apache-airflow/howto/set-up-database.rst +++ b/docs/apache-airflow/howto/set-up-database.rst @@ -217,7 +217,7 @@ If you use a current Postgres user with custom search_path, search_path can be c ALTER USER airflow_user SET search_path = public; -For more information regarding setup of the PostgreSQL connection, see `PostgreSQL dialect <https://docs.sqlalchemy.org/en/13/dialects/postgresql.html>`__ in SQLAlchemy documentation. +For more information regarding setup of the PostgreSQL connection, see `PostgreSQL dialect <https://docs.sqlalchemy.org/en/14/dialects/postgresql.html>`__ in SQLAlchemy documentation. .. note:: @@ -311,7 +311,7 @@ The connection string in this case should look like: mysql+mysqlconnector://<user>:<password>@<host>[:<port>]/<dbname> -If you want to use other drivers visit the `MySQL Dialect <https://docs.sqlalchemy.org/en/13/dialects/mysql.html>`__ in SQLAlchemy documentation for more information regarding download +If you want to use other drivers visit the `MySQL Dialect <https://docs.sqlalchemy.org/en/14/dialects/mysql.html>`__ in SQLAlchemy documentation for more information regarding download and setup of the SqlAlchemy connection. In addition, you also should pay particular attention to MySQL's encoding. Although the ``utf8mb4`` character set is more and more popular for MySQL (actually, ``utf8mb4`` becomes default character set in MySQL8.0), using the ``utf8mb4`` encoding requires additional setting in Airflow 2+ (See more details in `#7570 <https://github.com/apache/airflow/pull/7570>`__.). If you use ``utf8mb4`` as character set, you should also set ``sql_engine_collation_for_ids=utf8mb3_bin``.
<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <!-- Thank you for contributing! Please make sure that your code changes are covered with tests. And in case of new features or big changes remember to adjust the documentation. Feel free to ping committers for the review! In case of an existing issue, reference it using one of the following: closes: #ISSUE related: #ISSUE How to write a good git commit message: http://chris.beams.io/posts/git-commit/ --> <!-- Please keep an empty line above the dashes. --> --- **^ Add meaningful description above** Read the **[Pull Request Guidelines](https://github.com/apache/airflow/blob/main/CONTRIBUTING.rst#pull-request-guidelines)** for more information. In case of fundamental code changes, an Airflow Improvement Proposal ([AIP](https://cwiki.apache.org/confluence/display/AIRFLOW/Airflow+Improvement+Proposals)) is needed. In case of a new dependency, check compliance with the [ASF 3rd Party License Policy](https://www.apache.org/legal/resolved.html#category-x). In case of backwards incompatible changes please leave a note in a newsfragment file, named `{pr_number}.significant.rst` or `{issue_number}.significant.rst`, in [newsfragments](https://github.com/apache/airflow/tree/main/newsfragments).
https://api.github.com/repos/apache/airflow/pulls/34288
2023-09-11T22:49:35Z
2023-09-12T05:59:45Z
2023-09-12T05:59:45Z
2023-09-12T20:49:21Z
663
apache/airflow
14,472
[1.1.x] Update release notes for 1.1.4
diff --git a/docs/news.rst b/docs/news.rst index 0e9efe6f6a8..723a33a823c 100644 --- a/docs/news.rst +++ b/docs/news.rst @@ -3,6 +3,12 @@ Release notes ============= +1.1.4 (2017-03-03) +------------------ + +- Packaging fix: disallow unsupported Twisted versions in setup.py + + 1.1.3 (2016-09-22) ------------------ @@ -275,6 +281,12 @@ Bugfixes to same remote host (:issue:`1912`). +1.0.7 (2017-03-03) +------------------ + +- Packaging fix: disallow unsupported Twisted versions in setup.py + + 1.0.6 (2016-05-04) ------------------
https://api.github.com/repos/scrapy/scrapy/pulls/2619
2017-03-03T18:24:31Z
2017-03-03T18:33:50Z
2017-03-03T18:33:50Z
2017-03-03T18:34:09Z
196
scrapy/scrapy
34,209
[openload] rewrite extractor
diff --git a/youtube_dl/YoutubeDL.py b/youtube_dl/YoutubeDL.py index eb465c425a3..033b50702c7 100755 --- a/youtube_dl/YoutubeDL.py +++ b/youtube_dl/YoutubeDL.py @@ -86,6 +86,7 @@ write_string, YoutubeDLCookieProcessor, YoutubeDLHandler, + PhantomJSwrapper, ) from .cache import Cache from .extractor import get_info_extractor, gen_extractor_classes, _LAZY_LOADER @@ -2146,6 +2147,7 @@ def print_debug_header(self): exe_versions = FFmpegPostProcessor.get_versions(self) exe_versions['rtmpdump'] = rtmpdump_version() + exe_versions['phantomjs'] = PhantomJSwrapper._version() exe_str = ', '.join( '%s %s' % (exe, v) for exe, v in sorted(exe_versions.items()) diff --git a/youtube_dl/extractor/common.py b/youtube_dl/extractor/common.py index 9541e5b424b..76b5378e976 100644 --- a/youtube_dl/extractor/common.py +++ b/youtube_dl/extractor/common.py @@ -2406,10 +2406,12 @@ def _float(self, v, name, fatal=False, **kwargs): self._downloader.report_warning(msg) return res - def _set_cookie(self, domain, name, value, expire_time=None): + def _set_cookie(self, domain, name, value, expire_time=None, port=None, + path='/', secure=False, discard=False, rest={}, **kwargs): cookie = compat_cookiejar.Cookie( - 0, name, value, None, None, domain, None, - None, '/', True, False, expire_time, '', None, None, None) + 0, name, value, port, not port is None, domain, True, + domain.startswith('.'), path, True, secure, expire_time, + discard, None, None, rest) self._downloader.cookiejar.set_cookie(cookie) def _get_cookies(self, url): diff --git a/youtube_dl/extractor/openload.py b/youtube_dl/extractor/openload.py index d8036b54acd..292476ef86c 100644 --- a/youtube_dl/extractor/openload.py +++ b/youtube_dl/extractor/openload.py @@ -4,10 +4,11 @@ import re from .common import InfoExtractor -from ..compat import compat_chr from ..utils import ( determine_ext, ExtractorError, + get_element_by_id, + PhantomJSwrapper, ) @@ -58,6 +59,8 @@ class OpenloadIE(InfoExtractor): 'only_matching': True, }] + _USER_AGENT = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36' + @staticmethod def _extract_urls(webpage): return re.findall( @@ -66,47 +69,22 @@ def _extract_urls(webpage): def _real_extract(self, url): video_id = self._match_id(url) - webpage = self._download_webpage('https://openload.co/embed/%s/' % video_id, video_id) + url = 'https://openload.co/embed/%s/' % video_id + headers = { + 'User-Agent': self._USER_AGENT, + } + + webpage = self._download_webpage(url, video_id, headers=headers) if 'File not found' in webpage or 'deleted by the owner' in webpage: - raise ExtractorError('File not found', expected=True) - - ol_id = self._search_regex( - '<span[^>]+id="[^"]+"[^>]*>([0-9A-Za-z]+)</span>', - webpage, 'openload ID') - - decoded = '' - a = ol_id[0:24] - b = [] - for i in range(0, len(a), 8): - b.append(int(a[i:i + 8] or '0', 16)) - ol_id = ol_id[24:] - j = 0 - k = 0 - while j < len(ol_id): - c = 128 - d = 0 - e = 0 - f = 0 - _more = True - while _more: - if j + 1 >= len(ol_id): - c = 143 - f = int(ol_id[j:j + 2] or '0', 16) - j += 2 - d += (f & 127) << e - e += 7 - _more = f >= c - g = d ^ b[k % 3] - for i in range(4): - char_dec = (g >> 8 * i) & (c + 127) - char = compat_chr(char_dec) - if char != '#': - decoded += char - k += 1 - - video_url = 'https://openload.co/stream/%s?mime=true' - video_url = video_url % decoded + raise ExtractorError('File not found', expected=True, video_id=video_id) + + phantom = PhantomJSwrapper(self, required_version='2.0') + webpage, _ = phantom.get(url, html=webpage, video_id=video_id, headers=headers) + + decoded_id = get_element_by_id('streamurl', webpage) + + video_url = 'https://openload.co/stream/%s?mime=true' % decoded_id title = self._og_search_title(webpage, default=None) or self._search_regex( r'<span[^>]+class=["\']title["\'][^>]*>([^<]+)', webpage, @@ -114,15 +92,17 @@ def _real_extract(self, url): 'description', webpage, 'title', fatal=True) entries = self._parse_html5_media_entries(url, webpage, video_id) - subtitles = entries[0]['subtitles'] if entries else None + entry = entries[0] if entries else {} + subtitles = entry.get('subtitles') info_dict = { 'id': video_id, 'title': title, - 'thumbnail': self._og_search_thumbnail(webpage, default=None), + 'thumbnail': entry.get('thumbnail') or self._og_search_thumbnail(webpage, default=None), 'url': video_url, # Seems all videos have extensions in their titles 'ext': determine_ext(title, 'mp4'), 'subtitles': subtitles, + 'http_headers': headers, } return info_dict diff --git a/youtube_dl/utils.py b/youtube_dl/utils.py index 25bd228ab15..4d0685d83a3 100644 --- a/youtube_dl/utils.py +++ b/youtube_dl/utils.py @@ -3815,6 +3815,219 @@ def write_xattr(path, key, value): "or the 'xattr' binary.") +def cookie_to_dict(cookie): + cookie_dict = { + 'name': cookie.name, + 'value': cookie.value, + }; + if cookie.port_specified: + cookie_dict['port'] = cookie.port + if cookie.domain_specified: + cookie_dict['domain'] = cookie.domain + if cookie.path_specified: + cookie_dict['path'] = cookie.path + if not cookie.expires is None: + cookie_dict['expires'] = cookie.expires + if not cookie.secure is None: + cookie_dict['secure'] = cookie.secure + if not cookie.discard is None: + cookie_dict['discard'] = cookie.discard + try: + if (cookie.has_nonstandard_attr('httpOnly') or + cookie.has_nonstandard_attr('httponly') or + cookie.has_nonstandard_attr('HttpOnly')): + cookie_dict['httponly'] = True + except TypeError: + pass + return cookie_dict + + +def cookie_jar_to_list(cookie_jar): + return [cookie_to_dict(cookie) for cookie in cookie_jar] + + +class PhantomJSwrapper(object): + """PhantomJS wrapper class""" + + _TEMPLATE = r''' + phantom.onError = function(msg, trace) {{ + var msgStack = ['PHANTOM ERROR: ' + msg]; + if(trace && trace.length) {{ + msgStack.push('TRACE:'); + trace.forEach(function(t) {{ + msgStack.push(' -> ' + (t.file || t.sourceURL) + ': ' + t.line + + (t.function ? ' (in function ' + t.function +')' : '')); + }}); + }} + console.error(msgStack.join('\n')); + phantom.exit(1); + }}; + var page = require('webpage').create(); + var fs = require('fs'); + var read = {{ mode: 'r', charset: 'utf-8' }}; + var write = {{ mode: 'w', charset: 'utf-8' }}; + JSON.parse(fs.read("{cookies}", read)).forEach(function(x) {{ + phantom.addCookie(x); + }}); + page.settings.resourceTimeout = {timeout}; + page.settings.userAgent = "{ua}"; + page.onLoadStarted = function() {{ + page.evaluate(function() {{ + delete window._phantom; + delete window.callPhantom; + }}); + }}; + var saveAndExit = function() {{ + fs.write("{html}", page.content, write); + fs.write("{cookies}", JSON.stringify(phantom.cookies), write); + phantom.exit(); + }}; + page.onLoadFinished = function(status) {{ + if(page.url === "") {{ + page.setContent(fs.read("{html}", read), "{url}"); + }} + else {{ + {jscode} + }} + }}; + page.open(""); + ''' + + _TMP_FILE_NAMES = ['script', 'html', 'cookies'] + + @staticmethod + def _version(): + return get_exe_version('phantomjs', version_re=r'([0-9.]+)') + + def __init__(self, extractor, required_version=None, timeout=10000): + self.exe = check_executable('phantomjs', ['-v']) + if not self.exe: + raise ExtractorError('PhantomJS executable not found in PATH, ' + 'download it from http://phantomjs.org', + expected=True) + + self.extractor = extractor + + if required_version: + version = self._version() + if is_outdated_version(version, required_version): + self.extractor._downloader.report_warning( + 'Your copy of PhantomJS is outdated, update it to version ' + '%s or newer if you encounter any errors.' % required_version) + + self.options = { + 'timeout': timeout, + } + self._TMP_FILES = {} + for name in self._TMP_FILE_NAMES: + tmp = tempfile.NamedTemporaryFile(delete=False) + tmp.close() + self._TMP_FILES[name] = tmp + + def __del__(self): + for name in self._TMP_FILE_NAMES: + try: + os.remove(self._TMP_FILES[name].name) + except: + pass + + def _save_cookies(self, url): + cookies = cookie_jar_to_list(self.extractor._downloader.cookiejar) + for cookie in cookies: + if 'path' not in cookie: + cookie['path'] = '/' + if 'domain' not in cookie: + cookie['domain'] = compat_urlparse.urlparse(url).netloc + with open(self._TMP_FILES['cookies'].name, 'wb') as f: + f.write(json.dumps(cookies).encode('utf-8')) + + def _load_cookies(self): + with open(self._TMP_FILES['cookies'].name, 'rb') as f: + cookies = json.loads(f.read().decode('utf-8')) + for cookie in cookies: + if cookie['httponly'] is True: + cookie['rest'] = { 'httpOnly': None } + if 'expiry' in cookie: + cookie['expire_time'] = cookie['expiry'] + self.extractor._set_cookie(**cookie) + + def get(self, url, html=None, video_id=None, note=None, note2='Executing JS on webpage', headers={}, jscode='saveAndExit();'): + """ + Downloads webpage (if needed) and executes JS + + Params: + url: website url + html: optional, html code of website + video_id: video id + note: optional, displayed when downloading webpage + note2: optional, displayed when executing JS + headers: custom http headers + jscode: code to be executed when page is loaded + + Returns tuple with: + * downloaded website (after JS execution) + * anything you print with `console.log` (but not inside `page.execute`!) + + In most cases you don't need to add any `jscode`. + It is executed in `page.onLoadFinished`. + `saveAndExit();` is mandatory, use it instead of `phantom.exit()` + It is possible to wait for some element on the webpage, for example: + var check = function() { + var elementFound = page.evaluate(function() { + return document.querySelector('#b.done') !== null; + }); + if(elementFound) + saveAndExit(); + else + window.setTimeout(check, 500); + } + + page.evaluate(function(){ + document.querySelector('#a').click(); + }); + check(); + """ + if 'saveAndExit();' not in jscode: + raise ExtractorError('`saveAndExit();` not found in `jscode`') + if not html: + html = self.extractor._download_webpage(url, video_id, note=note, headers=headers) + with open(self._TMP_FILES['html'].name, 'wb') as f: + f.write(html.encode('utf-8')) + + self._save_cookies(url) + + replaces = self.options + replaces['url'] = url + user_agent = headers.get('User-Agent') or std_headers['User-Agent'] + replaces['ua'] = user_agent.replace('"', '\\"') + replaces['jscode'] = jscode + + for x in self._TMP_FILE_NAMES: + replaces[x] = self._TMP_FILES[x].name.replace('\\', '\\\\').replace('"', '\\"') + + with open(self._TMP_FILES['script'].name, 'wb') as f: + f.write(self._TEMPLATE.format(**replaces).encode('utf-8')) + + if video_id is None: + self.extractor.to_screen('%s' % (note2,)) + else: + self.extractor.to_screen('%s: %s' % (video_id, note2)) + + p = subprocess.Popen([self.exe, '--ssl-protocol=any', + self._TMP_FILES['script'].name], stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + out, err = p.communicate() + if p.returncode != 0: + raise ExtractorError('Executing JS failed\n:' + + encodeArgument(err)) + with open(self._TMP_FILES['html'].name, 'rb') as f: + html = f.read().decode('utf-8') + + self._load_cookies() + + return (html, encodeArgument(out)) + + def random_birthday(year_field, month_field, day_field): return { year_field: str(random.randint(1950, 1995)),
## Please follow the guide below - You will be asked some questions, please read them **carefully** and answer honestly - Put an `x` into all the boxes [ ] relevant to your *pull request* (like that [x]) - Use *Preview* tab to see how your *pull request* will actually look like --- ### Before submitting a *pull request* make sure you have: - [x] At least skimmed through [adding new extractor tutorial](https://github.com/rg3/youtube-dl#adding-support-for-a-new-site) and [youtube-dl coding conventions](https://github.com/rg3/youtube-dl#youtube-dl-coding-conventions) sections - [x] [Searched](https://github.com/rg3/youtube-dl/search?q=is%3Apr&type=Issues) the bugtracker for similar pull requests ### In order to be accepted and merged into youtube-dl each piece of code must be in public domain or released under [Unlicense](http://unlicense.org/). Check one of the following options: - [x] I am the original author of this code and I am willing to release it under [Unlicense](http://unlicense.org/) - [ ] I am not the original author of this code but it is in public domain or released under [Unlicense](http://unlicense.org/) (provide reliable evidence) ### What is the purpose of your *pull request*? - [x] Bug fix - [ ] Improvement - [ ] New extractor - [ ] New feature --- ### Description of your *pull request* and other information Rewritten extractor uses [PhantomJS](http://phantomjs.org/) to decode video URL. Tests by default fail on Travis because it has PhantomJS version 1.9.8 installed. When running on Trusty (`dist: trusty` in `.travis.yml`) there is PhantomJS 2.0.0. It works but MD5 hash mismatch. I've tested two files: one downloaded with PhantomJS method (fails tests) and another with pairing method (#12603, passes tests) and they are the same (identical MD5 of full file). So IDK why test fails...
https://api.github.com/repos/ytdl-org/youtube-dl/pulls/12754
2017-04-15T22:38:28Z
2017-08-26T08:17:31Z
2017-08-26T08:17:31Z
2018-08-15T11:35:24Z
3,596
ytdl-org/youtube-dl
50,591
Corrected typo. ceparated -> separated
diff --git a/doc/README-ja.md b/doc/README-ja.md index 6ce0cd9e..e1a0cd72 100644 --- a/doc/README-ja.md +++ b/doc/README-ja.md @@ -225,7 +225,7 @@ cheat.shサービスには独自のコマンドラインクライアント( ch update - self update (only if the scriptfile is writeable) version - show current cht.sh version /:help - service help - QUERY - space ceparated query staring (examples are below) + QUERY - space separated query staring (examples are below) cht.sh> python zip list cht.sh/python> zip list cht.sh/go> /python zip list diff --git a/share/cht.sh.txt b/share/cht.sh.txt index 828f4aa1..853372f5 100755 --- a/share/cht.sh.txt +++ b/share/cht.sh.txt @@ -575,7 +575,7 @@ stealth - stealth mode (automatic queries for selected text) update - self update (only if the scriptfile is writeable) version - show current cht.sh version /:help - service help -QUERY - space ceparated query staring (examples are below) +QUERY - space separated query staring (examples are below) cht.sh> python zip list cht.sh/python> zip list cht.sh/go> /python zip list diff --git a/tests/results/8 b/tests/results/8 index 828f4aa1..853372f5 100644 --- a/tests/results/8 +++ b/tests/results/8 @@ -575,7 +575,7 @@ stealth - stealth mode (automatic queries for selected text) update - self update (only if the scriptfile is writeable) version - show current cht.sh version /:help - service help -QUERY - space ceparated query staring (examples are below) +QUERY - space separated query staring (examples are below) cht.sh> python zip list cht.sh/python> zip list cht.sh/go> /python zip list
My first pull request :)
https://api.github.com/repos/chubin/cheat.sh/pulls/152
2019-08-18T21:26:11Z
2019-08-19T05:16:47Z
2019-08-19T05:16:47Z
2019-08-19T05:17:00Z
491
chubin/cheat.sh
15,129
Fix a bug of kie as null
diff --git a/PPOCRLabel/PPOCRLabel.py b/PPOCRLabel/PPOCRLabel.py index 1b902484e2..5c89e6f894 100644 --- a/PPOCRLabel/PPOCRLabel.py +++ b/PPOCRLabel/PPOCRLabel.py @@ -1471,7 +1471,7 @@ def showBoundingBoxFromPPlabel(self, filePath): # box['ratio'] of the shapes saved in lockedShapes contains the ratio of the # four corner coordinates of the shapes to the height and width of the image for box in self.canvas.lockedShapes: - key_cls = None if not self.kie_mode else box['key_cls'] + key_cls = 'None' if not self.kie_mode else box['key_cls'] if self.canvas.isInTheSameImage: shapes.append((box['transcription'], [[s[0] * width, s[1] * height] for s in box['ratio']], DEFAULT_LOCK_COLOR, key_cls, box['difficult'])) @@ -1480,7 +1480,7 @@ def showBoundingBoxFromPPlabel(self, filePath): DEFAULT_LOCK_COLOR, key_cls, box['difficult'])) if imgidx in self.PPlabel.keys(): for box in self.PPlabel[imgidx]: - key_cls = None if not self.kie_mode else box.get('key_cls', 'None') + key_cls = 'None' if not self.kie_mode else box.get('key_cls', 'None') shapes.append((box['transcription'], box['points'], None, key_cls, box.get('difficult', False))) self.loadLabels(shapes) @@ -2266,7 +2266,7 @@ def cellreRecognition(self): rec_res = self.ocr.ocr(patch, det=False, rec=True, cls=False) text = rec_res[0][0] if text != '': - texts += text + (' ' if text[0].isalpha() else '') # add space between english word + texts += text + ('' if text[0].isalpha() else ' ') # add space between english word probs += rec_res[0][1] probs = probs / len(bboxes) result = [(texts.strip(), probs)]
Fix a bug of kie as null
https://api.github.com/repos/PaddlePaddle/PaddleOCR/pulls/6186
2022-05-09T01:38:23Z
2022-05-09T01:38:31Z
2022-05-09T01:38:31Z
2022-05-09T01:38:31Z
498
PaddlePaddle/PaddleOCR
42,690
fix: adjust parameters for upscale fast 2x
diff --git a/modules/async_worker.py b/modules/async_worker.py index 2c029cfbe..908cc8c26 100644 --- a/modules/async_worker.py +++ b/modules/async_worker.py @@ -553,8 +553,8 @@ def handler(async_task): direct_return = False if direct_return: - d = [('Upscale (Fast)', '2x')] - uov_input_image_path = log(uov_input_image, d, output_format) + d = [('Upscale (Fast)', 'upscale_fast', '2x')] + uov_input_image_path = log(uov_input_image, d, output_format=output_format) yield_result(async_task, uov_input_image_path, do_not_show_finished_images=True) return
add missing key for upscale fast 2x for private log.html, use correct parameter for output_format
https://api.github.com/repos/lllyasviel/Fooocus/pulls/2411
2024-03-02T18:04:11Z
2024-03-02T18:05:11Z
2024-03-02T18:05:11Z
2024-03-03T20:16:37Z
171
lllyasviel/Fooocus
7,261
Skip on pyOpenSSL
diff --git a/tests/conftest.py b/tests/conftest.py index 11e0d3486e..7ca172a867 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,3 @@ -import os import socket import pytest @@ -8,6 +7,7 @@ HTTPBIN_WITH_CHUNKED_SUPPORT_DOMAIN, HTTPBIN_WITH_CHUNKED_SUPPORT, REMOTE_HTTPBIN_DOMAIN, + IS_PYOPENSSL, mock_env ) from .utils.plugins_cli import ( # noqa @@ -81,7 +81,7 @@ def pyopenssl_inject(): Injects `pyOpenSSL` module to make sure `requests` will use it. <https://github.com/psf/requests/pull/5443#issuecomment-645740394> """ - if os.getenv('HTTPIE_TEST_WITH_PYOPENSSL', '0') == '1': + if IS_PYOPENSSL: try: import urllib3.contrib.pyopenssl urllib3.contrib.pyopenssl.inject_into_urllib3() diff --git a/tests/test_ssl.py b/tests/test_ssl.py index fc587064d7..ef72e2be40 100644 --- a/tests/test_ssl.py +++ b/tests/test_ssl.py @@ -1,4 +1,3 @@ -import os import ssl import pytest @@ -11,7 +10,7 @@ from httpie.ssl_ import AVAILABLE_SSL_VERSION_ARG_MAPPING, DEFAULT_SSL_CIPHERS from httpie.status import ExitStatus -from .utils import HTTP_OK, TESTS_ROOT, http +from .utils import HTTP_OK, TESTS_ROOT, IS_PYOPENSSL, http try: @@ -152,6 +151,7 @@ def test_ciphers(httpbin_secure): assert HTTP_OK in r +@pytest.mark.skipif(IS_PYOPENSSL, reason='pyOpenSSL uses a different message format.') def test_ciphers_none_can_be_selected(httpbin_secure): r = http( httpbin_secure.url + '/get', @@ -169,8 +169,7 @@ def test_ciphers_none_can_be_selected(httpbin_secure): def test_pyopenssl_presence(): - using_pyopenssl = os.getenv('HTTPIE_TEST_WITH_PYOPENSSL', '0') - if using_pyopenssl == '0': + if not IS_PYOPENSSL: assert not urllib3.util.ssl_.IS_PYOPENSSL assert not urllib3.util.IS_PYOPENSSL else: diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py index 8a575aef03..7d2557b612 100644 --- a/tests/utils/__init__.py +++ b/tests/utils/__init__.py @@ -1,6 +1,7 @@ """Utilities for HTTPie test suite.""" import re import shlex +import os import sys import time import json @@ -30,6 +31,7 @@ HTTPBIN_WITH_CHUNKED_SUPPORT_DOMAIN = 'pie.dev' HTTPBIN_WITH_CHUNKED_SUPPORT = 'http://' + HTTPBIN_WITH_CHUNKED_SUPPORT_DOMAIN +IS_PYOPENSSL = os.getenv('HTTPIE_TEST_WITH_PYOPENSSL', '0') == '1' TESTS_ROOT = Path(__file__).parent.parent CRLF = '\r\n'
It might be one of the underlying dependencies, but when tested on both my environment and the latest CI this test constantly fails (from the time it was introduced to now). So either this test was never properly ran before or something changed in the SSL dependencies. (The failure is an error message mismatch, no behavioral change).
https://api.github.com/repos/httpie/cli/pulls/1376
2022-04-28T09:29:53Z
2022-04-28T12:18:21Z
2022-04-28T12:18:21Z
2022-04-28T12:18:21Z
726
httpie/cli
33,743