hexsha
stringlengths
40
40
size
int64
2
1.05M
ext
stringclasses
9 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
4
193
max_stars_repo_name
stringlengths
6
109
max_stars_repo_head_hexsha
stringlengths
40
78
max_stars_repo_licenses
sequence
max_stars_count
int64
1
36.6k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
4
193
max_issues_repo_name
stringlengths
6
109
max_issues_repo_head_hexsha
stringlengths
40
78
max_issues_repo_licenses
sequence
max_issues_count
int64
1
29.8k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
4
193
max_forks_repo_name
stringlengths
6
109
max_forks_repo_head_hexsha
stringlengths
40
78
max_forks_repo_licenses
sequence
max_forks_count
int64
1
11.2k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
2
1.05M
avg_line_length
float64
1
404k
max_line_length
int64
1
1.03M
alphanum_fraction
float64
0
1
73c8e15fa2b3e7fcdd8af48bbb0099fe3d0332ff
2,891
py
Python
wemake_python_styleguide/options/validation.py
Roxe322/wemake-python-styleguide
b3b6f992729f259f31cfb673c55be7d2381c3e63
[ "MIT" ]
null
null
null
wemake_python_styleguide/options/validation.py
Roxe322/wemake-python-styleguide
b3b6f992729f259f31cfb673c55be7d2381c3e63
[ "MIT" ]
null
null
null
wemake_python_styleguide/options/validation.py
Roxe322/wemake-python-styleguide
b3b6f992729f259f31cfb673c55be7d2381c3e63
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from typing import List, Optional import attr from typing_extensions import final from wemake_python_styleguide.types import ConfigurationOptions def _min_max( min: Optional[int] = None, # noqa: A002 max: Optional[int] = None, # noqa: A002 ): """Validator to check that value is in bounds.""" def factory(instance, attribute, field_value): min_contract = min is not None and field_value < min max_contract = max is not None and field_value > max if min_contract or max_contract: raise ValueError('Option {0} is out of bounds: {1}'.format( attribute.name, field_value, )) return factory @final @attr.dataclass(slots=True) class _ValidatedOptions(object): """ Here we write all the required structured validation for the options. It is an internal class and is not used anywhere else. """ # General: min_name_length: int = attr.ib(validator=[_min_max(min=1)]) i_control_code: bool max_name_length: int = attr.ib(validator=[_min_max(min=1)]) # Complexity: max_arguments: int = attr.ib(validator=[_min_max(min=1)]) max_local_variables: int = attr.ib(validator=[_min_max(min=1)]) max_returns: int = attr.ib(validator=[_min_max(min=1)]) max_expressions: int = attr.ib(validator=[_min_max(min=1)]) max_module_members: int = attr.ib(validator=[_min_max(min=1)]) max_methods: int = attr.ib(validator=[_min_max(min=1)]) max_line_complexity: int = attr.ib(validator=[_min_max(min=1)]) max_jones_score: int = attr.ib(validator=[_min_max(min=1)]) max_imports: int = attr.ib(validator=[_min_max(min=1)]) max_imported_names: int = attr.ib(validator=[_min_max(min=1)]) max_base_classes: int = attr.ib(validator=[_min_max(min=1)]) max_decorators: int = attr.ib(validator=[_min_max(min=1)]) max_string_usages: int = attr.ib(validator=[_min_max(min=1)]) max_awaits: int = attr.ib(validator=[_min_max(min=1)]) max_try_body_length: int = attr.ib(validator=[_min_max(min=1)]) max_module_expressions: int = attr.ib(validator=[_min_max(min=1)]) max_function_expressions: int = attr.ib(validator=[_min_max(min=1)]) max_asserts: int = attr.ib(validator=[_min_max(min=1)]) max_access_level: int = attr.ib(validator=[_min_max(min=1)]) max_attributes: int = attr.ib(validator=[_min_max(min=1)]) nested_classes_whitelist: List[str] def validate_options(options: ConfigurationOptions) -> _ValidatedOptions: """Validates all options from ``flake8``, uses a subset of them.""" fields_to_validate = [ field.name for field in attr.fields(_ValidatedOptions) ] options_subset = { field: getattr(options, field, None) for field in fields_to_validate } return _ValidatedOptions(**options_subset) # raises TypeError
38.039474
73
0.686268
73c90da079dc91568291215c350f18369483b7b2
1,591
py
Python
sdk/cognitiveservices/azure-cognitiveservices-vision-computervision/azure/cognitiveservices/vision/computervision/models/object_hierarchy.py
pjquirk/azure-sdk-for-python
cbf02ec4f177b96eae1dbbba87c34c2c93880150
[ "MIT" ]
1
2021-09-07T18:36:04.000Z
2021-09-07T18:36:04.000Z
sdk/cognitiveservices/azure-cognitiveservices-vision-computervision/azure/cognitiveservices/vision/computervision/models/object_hierarchy.py
pjquirk/azure-sdk-for-python
cbf02ec4f177b96eae1dbbba87c34c2c93880150
[ "MIT" ]
2
2019-10-02T23:37:38.000Z
2020-10-02T01:17:31.000Z
azure-cognitiveservices-vision-computervision/azure/cognitiveservices/vision/computervision/models/object_hierarchy.py
xiafu-msft/azure-sdk-for-python
4d9560cfd519ee60667f3cc2f5295a58c18625db
[ "MIT" ]
null
null
null
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class ObjectHierarchy(Model): """An object detected inside an image. :param object_property: Label for the object. :type object_property: str :param confidence: Confidence score of having observed the object in the image, as a value ranging from 0 to 1. :type confidence: float :param parent: The parent object, from a taxonomy perspective. The parent object is a more generic form of this object. For example, a 'bulldog' would have a parent of 'dog'. :type parent: ~azure.cognitiveservices.vision.computervision.models.ObjectHierarchy """ _attribute_map = { 'object_property': {'key': 'object', 'type': 'str'}, 'confidence': {'key': 'confidence', 'type': 'float'}, 'parent': {'key': 'parent', 'type': 'ObjectHierarchy'}, } def __init__(self, **kwargs): super(ObjectHierarchy, self).__init__(**kwargs) self.object_property = kwargs.get('object_property', None) self.confidence = kwargs.get('confidence', None) self.parent = kwargs.get('parent', None)
38.804878
77
0.62225
73c933a9efbaa1d927ecc4a1eb98d5abf600823c
1,478
py
Python
ros2_ws/src/utils/logger/launch/logger.launch.py
FastSense/rosbot-ros2
c2d274ce179534fec5b2786a6f96b6d638019ac4
[ "MIT" ]
null
null
null
ros2_ws/src/utils/logger/launch/logger.launch.py
FastSense/rosbot-ros2
c2d274ce179534fec5b2786a6f96b6d638019ac4
[ "MIT" ]
2
2021-07-05T14:50:09.000Z
2021-09-14T15:21:11.000Z
ros2_ws/src/utils/logger/launch/logger.launch.py
FastSense/metalbot
063c897a16129d9aa88c2c7c52bdf6547af894e4
[ "MIT" ]
null
null
null
import os import launch from launch import LaunchDescription from launch_ros.actions import Node # from ament_index_python.packages import get_package_share_directory def generate_launch_description(): """ I can't use get_package_share_directory, cause it will return 'ros2_ws/install/logger/share/logger', BUT when using 'ros2 launch' ROS launches from its workspace so I can easy get desired directory * I checked it works well when launch from any directory * """ output_dir = os.path.join(os.getcwd(), 'src/logger/output_data/') output_path = launch.substitutions.LaunchConfiguration( 'output_path', default=output_dir ) return LaunchDescription([ launch.actions.DeclareLaunchArgument( 'output_path', default_value=output_dir, description='output_folder path' ), # launh Logger node Node( package='logger', executable='logger', name='logger', output='screen', emulate_tty=True, parameters=[ {"output_path": output_path}, {"control_topic": "/cmd_vel"}, {"tf_topic": "/tf"}, {"parent_frame": "odom"}, {"robot_frame": "base_link"}, {"kinetic_model_frame": "model_link"}, {"nn_model_frame": "nn_model_link"} ] ), ])
30.791667
72
0.585927
73c95a1f58d051bfa70e65fc0b35e6c53542cc04
4,570
py
Python
recordwhat/records/vs.py
mrakitin/recordwhat
c68b8fca69836bdba0075726e829325f2c8918a8
[ "BSD-3-Clause" ]
1
2016-06-08T15:14:15.000Z
2016-06-08T15:14:15.000Z
recordwhat/records/vs.py
mrakitin/recordwhat
c68b8fca69836bdba0075726e829325f2c8918a8
[ "BSD-3-Clause" ]
12
2016-02-11T15:01:05.000Z
2019-09-23T17:28:32.000Z
recordwhat/records/vs.py
mrakitin/recordwhat
c68b8fca69836bdba0075726e829325f2c8918a8
[ "BSD-3-Clause" ]
4
2016-06-08T15:03:07.000Z
2019-09-23T17:05:38.000Z
from ophyd import (EpicsSignal, EpicsSignalRO) from .. import (RecordBase, _register_record_type, FieldComponent as Cpt) @_register_record_type('vs') class VsRecord(RecordBase): alarm_status = Cpt(EpicsSignalRO, '.STAT') changed_control = Cpt(EpicsSignalRO, '.CHGC') controller_err_cnt = Cpt(EpicsSignal, '.ERR') controller_type = Cpt(EpicsSignal, '.TYPE') conv_a_log10_pressure = Cpt(EpicsSignalRO, '.LCAP') conv_b_log10_pressure = Cpt(EpicsSignalRO, '.LCBP') convectron_a_pressure = Cpt(EpicsSignalRO, '.CGAP') convectron_b_pressure = Cpt(EpicsSignalRO, '.CGBP') degas_read = Cpt(EpicsSignalRO, '.DGSR') fault_read = Cpt(EpicsSignalRO, '.FLTR') gauge_pressure = Cpt(EpicsSignalRO, '.PRES') ig_last_value_alarmed = Cpt(EpicsSignalRO, '.LALM') ig_log10_pressure = Cpt(EpicsSignalRO, '.LPRS') ion_gauge_1_read = Cpt(EpicsSignalRO, '.IG1R') ion_gauge_2_read = Cpt(EpicsSignalRO, '.IG2R') sp_1_readback = Cpt(EpicsSignalRO, '.SP1R') sp_1_setpoint_set = Cpt(EpicsSignal, '.SP1S') sp_2_readback = Cpt(EpicsSignalRO, '.SP2R') sp_2_setpoint_set = Cpt(EpicsSignal, '.SP2S') sp_3_readback = Cpt(EpicsSignalRO, '.SP3R') sp_3_setpoint_set = Cpt(EpicsSignal, '.SP3S') sp_4_readback = Cpt(EpicsSignalRO, '.SP4R') sp_4_setpoint_set = Cpt(EpicsSignal, '.SP4S') set_point_1 = Cpt(EpicsSignalRO, '.SP1') set_point_2 = Cpt(EpicsSignalRO, '.SP2') set_point_3 = Cpt(EpicsSignalRO, '.SP3') set_point_4 = Cpt(EpicsSignalRO, '.SP4') set_point_5 = Cpt(EpicsSignalRO, '.SP5') set_point_6 = Cpt(EpicsSignalRO, '.SP6') prev_conv_a_log10_pres = Cpt(EpicsSignalRO, '.PLCA') prev_conv_a_pres = Cpt(EpicsSignalRO, '.PCGA') prev_conv_b_log10_pres = Cpt(EpicsSignalRO, '.PLCB') prev_conv_b_pres = Cpt(EpicsSignalRO, '.PCGB') prev_degas = Cpt(EpicsSignalRO, '.PDGS') prev_degas_pdss = Cpt(EpicsSignalRO, '.PDSS') prev_fault = Cpt(EpicsSignalRO, '.PFLT') prev_gauge_pres = Cpt(EpicsSignalRO, '.PPRE') prev_gauge_pres_pval = Cpt(EpicsSignalRO, '.PVAL') prev_ig_log10_pres = Cpt(EpicsSignalRO, '.PLPE') prev_ion_gauge_1 = Cpt(EpicsSignalRO, '.PI1S') prev_ion_gauge_1_pig1 = Cpt(EpicsSignalRO, '.PIG1') prev_ion_gauge_2 = Cpt(EpicsSignalRO, '.PI2S') prev_ion_gauge_2_pig2 = Cpt(EpicsSignalRO, '.PIG2') prev_sp1_readback = Cpt(EpicsSignalRO, '.PS1R') prev_sp1_set = Cpt(EpicsSignalRO, '.PS1S') prev_sp2_readback = Cpt(EpicsSignalRO, '.PS2R') prev_sp2_set = Cpt(EpicsSignalRO, '.PS2S') prev_sp3_readback = Cpt(EpicsSignalRO, '.PS3R') prev_sp3_set = Cpt(EpicsSignalRO, '.PS3S') prev_sp4_readback = Cpt(EpicsSignalRO, '.PS4R') prev_sp4_set = Cpt(EpicsSignalRO, '.PS4S') prev_set_point_1 = Cpt(EpicsSignalRO, '.PSP1') prev_set_point_2 = Cpt(EpicsSignalRO, '.PSP2') prev_set_point_3 = Cpt(EpicsSignalRO, '.PSP3') prev_set_point_4 = Cpt(EpicsSignalRO, '.PSP4') prev_set_point_5 = Cpt(EpicsSignalRO, '.PSP5') prev_set_point_6 = Cpt(EpicsSignalRO, '.PSP6') # - alarms ion_gauge_1_set = Cpt(EpicsSignal, '.IG1S') ion_gauge_2_set = Cpt(EpicsSignal, '.IG2S') # - bits1 degas_set = Cpt(EpicsSignal, '.DGSS') # - common device_specification = Cpt(EpicsSignalRO, '.INP$', string=True) # - display ig_alarm_deadband = Cpt(EpicsSignal, '.HYST') ig_high_alarm = Cpt(EpicsSignal, '.HIGH') ig_high_severity = Cpt(EpicsSignal, '.HSV') ig_hihi_alarm = Cpt(EpicsSignal, '.HIHI') ig_hihi_severity = Cpt(EpicsSignal, '.HHSV') ig_lolo_alarm = Cpt(EpicsSignal, '.LOLO') ig_lolo_severity = Cpt(EpicsSignal, '.LLSV') ig_low_alarm = Cpt(EpicsSignal, '.LOW') ig_low_severity = Cpt(EpicsSignal, '.LSV') # - hist ig_pres_high_display = Cpt(EpicsSignal, '.HOPR') # - inputs ig_pres_low_display = Cpt(EpicsSignal, '.LOPR') # - links ig_log10_high_display = Cpt(EpicsSignal, '.HLPR') # - mbb ig_log10_low_display = Cpt(EpicsSignal, '.LLPR') # - output cga_pres_high_display = Cpt(EpicsSignal, '.HAPR') # - pid cga_pres_low_display = Cpt(EpicsSignal, '.LAPR') # - pulse cga_log10_high_display = Cpt(EpicsSignal, '.HALR') # - select cga_log10_low_display = Cpt(EpicsSignal, '.LALR') # - seq2 cgb_pres_high_display = Cpt(EpicsSignal, '.HBPR') # - seq3 cgb_pres_low_display = Cpt(EpicsSignal, '.LBPR') # - sub cgb_log10_high_display = Cpt(EpicsSignal, '.HBLR') # - timer cgb_log10_low_display = Cpt(EpicsSignal, '.LBLR')
37.154472
67
0.683589
73c95d59ed68b828d540b11ceb1b89333fe5fd39
3,226
py
Python
server/lib/resolvers.py
eliasmarkc/girder_wholetale
7e6f117309c97c24756d5663e89d2c213a4a9978
[ "BSD-3-Clause" ]
null
null
null
server/lib/resolvers.py
eliasmarkc/girder_wholetale
7e6f117309c97c24756d5663e89d2c213a4a9978
[ "BSD-3-Clause" ]
null
null
null
server/lib/resolvers.py
eliasmarkc/girder_wholetale
7e6f117309c97c24756d5663e89d2c213a4a9978
[ "BSD-3-Clause" ]
null
null
null
import re from .entity import Entity from typing import Optional import contextlib from urllib.request import HTTPRedirectHandler, build_opener, Request """Regex that matches: http://dx.doi.org/doi:10.24431/rw1k118 http://dx.doi.org/10.24431/rw1k118 https://dx.doi.org/doi:10.24431/rw1k118 http://doi.org/doi:10.24431/rw1k118 https://hdl.handle.net/doi:10.24431/rw1k118 doi:10.24431/rw1k118 10.24431/rw1k118 http://dx.doi.org/10.24431/rw1k118 http://dx.doi.org/10.24431/rw1k118 https://dx.doi.org/10.24431/rw1k118 https://doi.org/10.24431/rw1k118 http://hdl.handle.net/10.24431/rw1k118 """ _DOI_RESOLVERS_RE = re.compile( r'^(|https?://(dx.doi.org|doi.org|hdl.handle.net)/)(doi:)?(10.\d{4,9}/[-._;()/:A-Z0-9]+)$', re.IGNORECASE ) class RedirectHandler(HTTPRedirectHandler): last_url = None def redirect_request(self, req, fp, code, msg, hdrs, newurl): self.last_url = newurl r = HTTPRedirectHandler.redirect_request( self, req, fp, code, msg, hdrs, newurl) r.get_method = lambda: 'HEAD' return r class Resolver: def __init__(self): pass def resolve(self, entity: Entity) -> Entity: raise NotImplementedError() class Resolvers: def __init__(self): self.resolvers = [] def add(self, resolver: Resolver): self.resolvers.append(resolver) def resolve(self, entity: Entity) -> Optional[Entity]: while True: for resolver in self.resolvers: result = resolver.resolve(entity) if result is None: return entity class ResolutionException(Exception): def __init__(self, message: str, prev: Exception = None): self.message = message self.prev = prev def __str__(self): return self.message class DOIResolver(Resolver): @staticmethod def follow_redirects(link): """Follow redirects recursively.""" redirect_handler = RedirectHandler() opener = build_opener(redirect_handler) req = Request(link) req.get_method = lambda: 'HEAD' try: with contextlib.closing(opener.open(req, timeout=5)) as site: return site.url except Exception: return redirect_handler.last_url if redirect_handler.last_url else link @staticmethod def extractDOI(url: str): doi_match = _DOI_RESOLVERS_RE.match(url) if doi_match: return doi_match.groups()[-1] def resolve(self, entity: Entity) -> Optional[Entity]: value = entity.getValue() doi = DOIResolver.extractDOI(value) if doi is None: return None else: self.resolveDOI(entity, doi) return entity def resolveDOI(self, entity: Entity, doi: str): # Expect a redirect. Basically, don't do anything fancy because I don't know # if I can correctly resolve a DOI using the structured record url = 'https://doi.org/%s' % doi resolved_url = self.follow_redirects(url) if url == resolved_url: raise ResolutionException('Could not resolve DOI %s' % (doi,)) entity.setValue(resolved_url) entity['DOI'] = doi
28.803571
95
0.640422
73c9738466caf50ff6cf6ddd7658deedfcba2f73
2,698
py
Python
airflow/operators/latest_only_operator.py
harishjami1382/test2
f778cc7290904a84bed06f65fa5dbb49a63639f0
[ "Apache-2.0", "BSD-2-Clause", "MIT", "ECL-2.0", "BSD-3-Clause" ]
null
null
null
airflow/operators/latest_only_operator.py
harishjami1382/test2
f778cc7290904a84bed06f65fa5dbb49a63639f0
[ "Apache-2.0", "BSD-2-Clause", "MIT", "ECL-2.0", "BSD-3-Clause" ]
null
null
null
airflow/operators/latest_only_operator.py
harishjami1382/test2
f778cc7290904a84bed06f65fa5dbb49a63639f0
[ "Apache-2.0", "BSD-2-Clause", "MIT", "ECL-2.0", "BSD-3-Clause" ]
null
null
null
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """ This module contains an operator to run downstream tasks only for the latest scheduled DagRun """ from typing import Dict, Iterable, Union import pendulum from airflow.operators.branch_operator import BaseBranchOperator class LatestOnlyOperator(BaseBranchOperator): """ Allows a workflow to skip tasks that are not running during the most recent schedule interval. If the task is run outside of the latest schedule interval (i.e. external_trigger), all directly downstream tasks will be skipped. Note that downstream tasks are never skipped if the given DAG_Run is marked as externally triggered. """ ui_color = '#e9ffdb' # nyanza def choose_branch(self, context: Dict) -> Union[str, Iterable[str]]: # If the DAG Run is externally triggered, then return without # skipping downstream tasks if context['dag_run'] and context['dag_run'].external_trigger: self.log.info( "Externally triggered DAG_Run: allowing execution to proceed.") return context['task'].get_direct_relative_ids(upstream=False) now = pendulum.now('UTC') left_window = context['dag'].following_schedule( context['execution_date']) right_window = context['dag'].following_schedule(left_window) self.log.info( 'Checking latest only with left_window: %s right_window: %s now: %s', left_window, right_window, now ) if not left_window < now <= right_window: self.log.info('Not latest execution, skipping downstream.') # we return an empty list, thus the parent BaseBranchOperator # won't exclude any downstream tasks from skipping. return [] else: self.log.info('Latest, allowing execution to proceed.') return context['task'].get_direct_relative_ids(upstream=False)
39.676471
87
0.703484
73c9842828913b0f35093e030e4b84f02086516d
4,931
py
Python
alipay/aop/api/domain/AlipayInsMarketingGiftQueryModel.py
antopen/alipay-sdk-python-all
8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c
[ "Apache-2.0" ]
null
null
null
alipay/aop/api/domain/AlipayInsMarketingGiftQueryModel.py
antopen/alipay-sdk-python-all
8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c
[ "Apache-2.0" ]
null
null
null
alipay/aop/api/domain/AlipayInsMarketingGiftQueryModel.py
antopen/alipay-sdk-python-all
8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- import json from alipay.aop.api.constant.ParamConstants import * class AlipayInsMarketingGiftQueryModel(object): def __init__(self): self._channel = None self._entrance = None self._gift_prod_code = None self._insured_user_id = None self._relation_to_apply = None self._right_no_list = None self._source = None self._user_id = None @property def channel(self): return self._channel @channel.setter def channel(self, value): self._channel = value @property def entrance(self): return self._entrance @entrance.setter def entrance(self, value): self._entrance = value @property def gift_prod_code(self): return self._gift_prod_code @gift_prod_code.setter def gift_prod_code(self, value): self._gift_prod_code = value @property def insured_user_id(self): return self._insured_user_id @insured_user_id.setter def insured_user_id(self, value): self._insured_user_id = value @property def relation_to_apply(self): return self._relation_to_apply @relation_to_apply.setter def relation_to_apply(self, value): self._relation_to_apply = value @property def right_no_list(self): return self._right_no_list @right_no_list.setter def right_no_list(self, value): if isinstance(value, list): self._right_no_list = list() for i in value: self._right_no_list.append(i) @property def source(self): return self._source @source.setter def source(self, value): self._source = value @property def user_id(self): return self._user_id @user_id.setter def user_id(self, value): self._user_id = value def to_alipay_dict(self): params = dict() if self.channel: if hasattr(self.channel, 'to_alipay_dict'): params['channel'] = self.channel.to_alipay_dict() else: params['channel'] = self.channel if self.entrance: if hasattr(self.entrance, 'to_alipay_dict'): params['entrance'] = self.entrance.to_alipay_dict() else: params['entrance'] = self.entrance if self.gift_prod_code: if hasattr(self.gift_prod_code, 'to_alipay_dict'): params['gift_prod_code'] = self.gift_prod_code.to_alipay_dict() else: params['gift_prod_code'] = self.gift_prod_code if self.insured_user_id: if hasattr(self.insured_user_id, 'to_alipay_dict'): params['insured_user_id'] = self.insured_user_id.to_alipay_dict() else: params['insured_user_id'] = self.insured_user_id if self.relation_to_apply: if hasattr(self.relation_to_apply, 'to_alipay_dict'): params['relation_to_apply'] = self.relation_to_apply.to_alipay_dict() else: params['relation_to_apply'] = self.relation_to_apply if self.right_no_list: if isinstance(self.right_no_list, list): for i in range(0, len(self.right_no_list)): element = self.right_no_list[i] if hasattr(element, 'to_alipay_dict'): self.right_no_list[i] = element.to_alipay_dict() if hasattr(self.right_no_list, 'to_alipay_dict'): params['right_no_list'] = self.right_no_list.to_alipay_dict() else: params['right_no_list'] = self.right_no_list if self.source: if hasattr(self.source, 'to_alipay_dict'): params['source'] = self.source.to_alipay_dict() else: params['source'] = self.source if self.user_id: if hasattr(self.user_id, 'to_alipay_dict'): params['user_id'] = self.user_id.to_alipay_dict() else: params['user_id'] = self.user_id return params @staticmethod def from_alipay_dict(d): if not d: return None o = AlipayInsMarketingGiftQueryModel() if 'channel' in d: o.channel = d['channel'] if 'entrance' in d: o.entrance = d['entrance'] if 'gift_prod_code' in d: o.gift_prod_code = d['gift_prod_code'] if 'insured_user_id' in d: o.insured_user_id = d['insured_user_id'] if 'relation_to_apply' in d: o.relation_to_apply = d['relation_to_apply'] if 'right_no_list' in d: o.right_no_list = d['right_no_list'] if 'source' in d: o.source = d['source'] if 'user_id' in d: o.user_id = d['user_id'] return o
32.019481
85
0.591361
73c9857a162f742f6e5bc00714fa17c4ae136658
593
py
Python
modin/engines/dask/task_wrapper.py
manesioz/modin
637e148dba354825307f3f131fa2185ad5a6b54a
[ "Apache-2.0" ]
null
null
null
modin/engines/dask/task_wrapper.py
manesioz/modin
637e148dba354825307f3f131fa2185ad5a6b54a
[ "Apache-2.0" ]
null
null
null
modin/engines/dask/task_wrapper.py
manesioz/modin
637e148dba354825307f3f131fa2185ad5a6b54a
[ "Apache-2.0" ]
null
null
null
from modin import __execution_engine__ if __execution_engine__ == "Dask": from distributed.client import _get_global_client class DaskTask: @classmethod def deploy(cls, func, num_return_vals, kwargs): client = _get_global_client() remote_task_future = client.submit(func, **kwargs) return [ client.submit(lambda l: l[i], remote_task_future) for i in range(num_return_vals) ] @classmethod def materialize(cls, future): client = _get_global_client() return client.gather(future)
28.238095
62
0.647555
73c998fab621468c9d4df2cc8b7b15471035d348
11,918
py
Python
salt/states/supervisord.py
skrobul/salt
ef7fb71082cce7a9783e00b9c65062fefae09263
[ "Apache-2.0" ]
2
2017-09-17T21:10:35.000Z
2019-08-26T03:00:12.000Z
salt/states/supervisord.py
skrobul/salt
ef7fb71082cce7a9783e00b9c65062fefae09263
[ "Apache-2.0" ]
null
null
null
salt/states/supervisord.py
skrobul/salt
ef7fb71082cce7a9783e00b9c65062fefae09263
[ "Apache-2.0" ]
3
2021-02-23T08:12:48.000Z
2021-02-23T08:13:13.000Z
# -*- coding: utf-8 -*- ''' Interaction with the Supervisor daemon ====================================== .. code-block:: yaml wsgi_server: supervisord: - running - require: - pkg: supervisor - watch: - file: /etc/nginx/sites-enabled/wsgi_server.conf ''' # Import python libs import logging # Import salt libs import salt.utils log = logging.getLogger(__name__) def _check_error(result, success_message): ret = {} if 'ERROR' in result: ret['comment'] = result ret['result'] = False else: ret['comment'] = success_message return ret def _is_stopped_state(state): return state in ('STOPPED', 'STOPPING', 'EXITED', 'FATAL') def running(name, restart=False, update=False, user=None, runas=None, conf_file=None, bin_env=None): ''' Ensure the named service is running. name Service name as defined in the supervisor configuration file restart Whether to force a restart update Whether to update the supervisor configuration. runas Name of the user to run the supervisorctl command .. deprecated:: 0.17.0 user Name of the user to run the supervisorctl command .. versionadded:: 0.17.0 conf_file path to supervisorctl config file bin_env path to supervisorctl bin or path to virtualenv with supervisor installed ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} salt.utils.warn_until( 'Lithium', 'Please remove \'runas\' support at this stage. \'user\' support was ' 'added in 0.17.0', _dont_call_warnings=True ) if runas: # Warn users about the deprecation ret.setdefault('warnings', []).append( 'The \'runas\' argument is being deprecated in favor of \'user\', ' 'please update your state files.' ) if user is not None and runas is not None: # user wins over runas but let warn about the deprecation. ret.setdefault('warnings', []).append( 'Passed both the \'runas\' and \'user\' arguments. Please don\'t. ' '\'runas\' is being ignored in favor of \'user\'.' ) runas = None elif runas is not None: # Support old runas usage user = runas runas = None if 'supervisord.status' not in __salt__: ret['result'] = False ret['comment'] = 'Supervisord module not activated. Do you need to install supervisord?' return ret all_processes = __salt__['supervisord.status']( user=user, conf_file=conf_file, bin_env=bin_env ) # parse process groups process_groups = set() for proc in all_processes: if ':' in proc: process_groups.add(proc[:proc.index(':') + 1]) process_groups = sorted(process_groups) matches = {} if name in all_processes: matches[name] = (all_processes[name]['state'].lower() == 'running') elif name in process_groups: for process in (x for x in all_processes if x.startswith(name)): matches[process] = ( all_processes[process]['state'].lower() == 'running' ) to_add = not bool(matches) if __opts__['test']: if not to_add: # Process/group already present, check if any need to be started to_start = [x for x, y in matches.iteritems() if y is False] if to_start: ret['result'] = None if name.endswith(':'): # Process group if len(to_start) == len(matches): ret['comment'] = ( 'All services in group {0!r} will be started' .format(name) ) else: ret['comment'] = ( 'The following services will be started: {0}' .format(' '.join(to_start)) ) else: # Single program ret['comment'] = 'Service {0} will be started'.format(name) else: if name.endswith(':'): # Process group ret['comment'] = ( 'All services in group {0!r} are already running' .format(name) ) else: ret['comment'] = ('Service {0} is already running' .format(name)) else: ret['result'] = None # Process/group needs to be added if name.endswith(':'): _type = 'Group {0!r}'.format(name) else: _type = 'Service {0}'.format(name) ret['comment'] = '{0} will be added and started'.format(_type) return ret changes = [] just_updated = False if to_add: comment = 'Adding service: {0}'.format(name) __salt__['supervisord.reread']( user=user, conf_file=conf_file, bin_env=bin_env ) result = __salt__['supervisord.add']( name, user=user, conf_file=conf_file, bin_env=bin_env ) ret.update(_check_error(result, comment)) changes.append(comment) log.debug(comment) elif update: comment = 'Updating supervisor' result = __salt__['supervisord.update']( user=user, conf_file=conf_file, bin_env=bin_env ) ret.update(_check_error(result, comment)) log.debug(comment) if '{0}: updated'.format(name) in result: just_updated = True is_stopped = None process_type = None if name in process_groups: process_type = 'group' # check if any processes in this group are stopped is_stopped = False for proc in all_processes: if proc.startswith(name) \ and _is_stopped_state(all_processes[proc]['state']): is_stopped = True break elif name in all_processes: process_type = 'service' if _is_stopped_state(all_processes[name]['state']): is_stopped = True else: is_stopped = False if is_stopped is False: if restart and not just_updated: comment = 'Restarting{0}: {1}'.format( process_type is not None and ' {0}'.format(process_type) or '', name ) log.debug(comment) result = __salt__['supervisord.restart']( name, user=user, conf_file=conf_file, bin_env=bin_env ) ret.update(_check_error(result, comment)) changes.append(comment) elif just_updated: comment = 'Not starting updated{0}: {1}'.format( process_type is not None and ' {0}'.format(process_type) or '', name ) result = comment ret.update({'comment': comment}) else: comment = 'Not starting already running{0}: {1}'.format( process_type is not None and ' {0}'.format(process_type) or '', name ) result = comment ret.update({'comment': comment}) elif not just_updated: comment = 'Starting{0}: {1}'.format( process_type is not None and ' {0}'.format(process_type) or '', name ) changes.append(comment) log.debug(comment) result = __salt__['supervisord.start']( name, user=runas, conf_file=conf_file, bin_env=bin_env ) ret.update(_check_error(result, comment)) log.debug(unicode(result)) if ret['result'] and len(changes): ret['changes'][name] = ' '.join(changes) return ret def dead(name, user=None, runas=None, conf_file=None, bin_env=None): ''' Ensure the named service is dead (not running). name Service name as defined in the supervisor configuration file runas Name of the user to run the supervisorctl command .. deprecated:: 0.17.0 user Name of the user to run the supervisorctl command .. versionadded:: 0.17.0 conf_file path to supervisorctl config file bin_env path to supervisorctl bin or path to virtualenv with supervisor installed ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} salt.utils.warn_until( 'Lithium', 'Please remove \'runas\' support at this stage. \'user\' support was ' 'added in 0.17.0', _dont_call_warnings=True ) if runas: # Warn users about the deprecation ret.setdefault('warnings', []).append( 'The \'runas\' argument is being deprecated in favor of \'user\', ' 'please update your state files.' ) if user is not None and runas is not None: # user wins over runas but let warn about the deprecation. ret.setdefault('warnings', []).append( 'Passed both the \'runas\' and \'user\' arguments. Please don\'t. ' '\'runas\' is being ignored in favor of \'user\'.' ) runas = None elif runas is not None: # Support old runas usage user = runas runas = None if __opts__['test']: ret['result'] = None ret['comment'] = ( 'Service {0} is set to be stopped'.format(name)) else: comment = 'Stopping service: {0}'.format(name) log.debug(comment) all_processes = __salt__['supervisord.status']( user=runas, conf_file=conf_file, bin_env=bin_env ) # parse process groups process_groups = [] for proc in all_processes: if ':' in proc: process_groups.append(proc[:proc.index(':') + 1]) process_groups = list(set(process_groups)) is_stopped = None if name in process_groups: # check if any processes in this group are stopped is_stopped = False for proc in all_processes: if proc.startswith(name) \ and _is_stopped_state(all_processes[proc]['state']): is_stopped = True break elif name in all_processes: if _is_stopped_state(all_processes[name]['state']): is_stopped = True else: is_stopped = False else: # process name doesn't exist ret['comment'] = "Service {0} doesn't exist".format(name) if is_stopped is True: ret['comment'] = "Service {0} is not running".format(name) else: result = {name: __salt__['supervisord.stop']( name, user=user, conf_file=conf_file, bin_env=bin_env )} ret.update(_check_error(result, comment)) log.debug(unicode(result)) return ret def mod_watch(name, restart=True, update=False, user=None, runas=None, conf_file=None, bin_env=None): # Always restart on watch return running( name, restart=restart, update=update, user=user, runas=runas, conf_file=conf_file, bin_env=bin_env )
29.210784
96
0.525004
73c99bcd066cd6f6c56f040968aeceaf16be44f2
9,548
py
Python
synapse/tests/test_eventbus.py
larrycameron80/synapse
24bf21c40b4a467e5dc28c8204aecaf502d5cddf
[ "Apache-2.0" ]
null
null
null
synapse/tests/test_eventbus.py
larrycameron80/synapse
24bf21c40b4a467e5dc28c8204aecaf502d5cddf
[ "Apache-2.0" ]
4
2017-10-03T21:50:40.000Z
2017-11-20T15:49:38.000Z
synapse/tests/test_eventbus.py
larrycameron80/synapse
24bf21c40b4a467e5dc28c8204aecaf502d5cddf
[ "Apache-2.0" ]
null
null
null
import signal import multiprocessing import synapse.common as s_common import synapse.eventbus as s_eventbus import synapse.lib.threads as s_threads from synapse.tests.common import * @firethread def send_sig(pid, sig): ''' Sent a signal to a process. Args: pid (int): Process id to send the signal too. sig (int): Signal to send. Returns: None ''' os.kill(pid, sig) def block_processing(evt1, evt2): ''' Function to make an eventbus and call main(). Used as a Process target. Args: evt1 (multiprocessing.Event): event to twiddle evt2 (multiprocessing.Event): event to twiddle ''' bus = s_eventbus.EventBus() def onMain(mesg): evt1.set() def onFini(): evt2.set() bus.on('ebus:main', onMain) bus.onfini(onFini) bus.main() sys.exit(137) class EventBusTest(SynTest): def test_eventbus_basics(self): bus = s_eventbus.EventBus() def foo(event): x = event[1].get('x') y = event[1].get('y') event[1]['ret'] = x + y bus.on('woot', foo) event = bus.fire('woot', x=3, y=5, ret=[]) self.eq(event[1]['ret'], 8) def test_eventbus_link(self): bus1 = s_eventbus.EventBus() bus2 = s_eventbus.EventBus() bus1.link(bus2.dist) data = {} def woot(event): data['woot'] = True bus2.on('woot', woot) bus1.fire('woot') self.true(data.get('woot')) def test_evenbus_unlink(self): bus = s_eventbus.EventBus() mesgs = [] def woot(mesg): mesgs.append(mesg) bus.link(woot) bus.fire('haha') self.eq(len(mesgs), 1) bus.unlink(woot) bus.fire('haha') self.eq(len(mesgs), 1) bus.fini() def test_eventbus_withfini(self): data = {'count': 0} def onfini(): data['count'] += 1 with s_eventbus.EventBus() as bus: bus.onfini(onfini) self.eq(data['count'], 1) def test_eventbus_finionce(self): data = {'count': 0} def onfini(): data['count'] += 1 bus = s_eventbus.EventBus() bus.onfini(onfini) bus.fini() bus.fini() self.eq(data['count'], 1) def test_eventbus_consume(self): bus = s_eventbus.EventBus() wait = self.getTestWait(bus, 2, 'woot') bus.consume([('haha', {}), ('hehe', {}), ('woot', {}), ('woot', {})]) wait.wait() bus.fini() def test_eventbus_off(self): bus = s_eventbus.EventBus() data = {'count': 0} def woot(mesg): data['count'] += 1 bus.on('hehe', woot) bus.fire('hehe') bus.off('hehe', woot) bus.fire('hehe') bus.fini() self.eq(data['count'], 1) def test_eventbus_waiter(self): bus0 = s_eventbus.EventBus() wait0 = bus0.waiter(3, 'foo:bar') bus0.fire('foo:bar') bus0.fire('foo:bar') bus0.fire('foo:bar') evts = wait0.wait(timeout=3) self.eq(len(evts), 3) wait1 = bus0.waiter(3, 'foo:baz') evts = wait1.wait(timeout=0.1) self.none(evts) def test_eventbus_filt(self): bus = s_eventbus.EventBus() def wootfunc(mesg): mesg[1]['woot'] = True bus.on('lol', wootfunc) bus.on('rofl', wootfunc, foo=10) mesg = bus.fire('lol') self.true(mesg[1].get('woot')) mesg = bus.fire('rofl') self.false(mesg[1].get('woot')) mesg = bus.fire('rofl', foo=20) self.false(mesg[1].get('woot')) mesg = bus.fire('rofl', foo=10) self.true(mesg[1].get('woot')) def test_eventbus_log(self): logs = [] with s_eventbus.EventBus() as ebus: ebus.on('log', logs.append) ebus.log(100, 'omg woot', foo=10) mesg = logs[0] self.eq(mesg[0], 'log') self.eq(mesg[1].get('foo'), 10) self.eq(mesg[1].get('mesg'), 'omg woot') self.eq(mesg[1].get('level'), 100) def test_eventbus_exc(self): logs = [] with s_eventbus.EventBus() as ebus: ebus.on('log', logs.append) try: raise s_common.NoSuchObj(name='hehe') except Exception as e: ebus.exc(e) mesg = logs[0] self.eq(mesg[1].get('err'), 'NoSuchObj') def test_eventbus_busref(self): bref = s_eventbus.BusRef() bus0 = s_eventbus.EventBus() bus1 = s_eventbus.EventBus() bus2 = s_eventbus.EventBus() bref.put('foo', bus0) bref.put('bar', bus1) bref.put('baz', bus2) bus1.fini() self.nn(bref.get('foo')) self.none(bref.get('bar')) self.len(2, list(bref)) self.true(bref.pop('baz') is bus2) self.len(1, list(bref)) bref.fini() self.true(bus0.isfini) def test_eventbus_waitfini(self): ebus = s_eventbus.EventBus() self.false(ebus.waitfini(timeout=0.1)) def callfini(): time.sleep(0.1) ebus.fini() thr = s_threads.worker(callfini) # actually wait... self.true(ebus.waitfini(timeout=0.3)) # bounce off the isfini block self.true(ebus.waitfini(timeout=0.3)) def test_eventbus_refcount(self): ebus = s_eventbus.EventBus() self.eq(ebus.incref(), 2) self.eq(ebus.fini(), 1) self.false(ebus.isfini) self.eq(ebus.fini(), 0) self.true(ebus.isfini) def test_eventbus_busref_gen(self): with s_eventbus.BusRef() as refs: self.raises(NoSuchCtor, refs.gen, 'woot') def ctor(name): return s_eventbus.EventBus() with s_eventbus.BusRef(ctor=ctor) as refs: self.none(refs.get('woot')) woot = refs.gen('woot') self.eq(1, woot._syn_refs) self.nn(woot) self.true(refs.gen('woot') is woot) self.eq(2, woot._syn_refs) woot.fini() self.false(woot.isfini) self.true(refs.get('woot') is woot) self.eq(1, woot._syn_refs) woot.fini() self.eq(0, woot._syn_refs) self.true(woot.isfini) self.false(refs.get('woot') is woot) self.eq(0, woot._syn_refs) def test_eventbus_main_sigterm(self): self.thisHostMustNot(platform='windows') # We have no reliable way to test this on windows evt1 = multiprocessing.Event() evt1.clear() evt2 = multiprocessing.Event() evt2.clear() proc = multiprocessing.Process(target=block_processing, args=(evt1, evt2)) proc.start() self.true(evt1.wait(timeout=10)) foo = send_sig(proc.pid, signal.SIGTERM) self.true(evt2.wait(timeout=10)) proc.join(timeout=10) foo.join() self.eq(proc.exitcode, 137) def test_eventbus_main_sigint(self): self.thisHostMustNot(platform='windows') # We have no reliable way to test this on windows evt1 = multiprocessing.Event() evt1.clear() evt2 = multiprocessing.Event() evt2.clear() proc = multiprocessing.Process(target=block_processing, args=(evt1, evt2)) proc.start() self.true(evt1.wait(timeout=10)) foo = send_sig(proc.pid, signal.SIGINT) self.true(evt2.wait(timeout=10)) proc.join(timeout=10) foo.join() self.eq(proc.exitcode, 137) def test_eventbus_onwith(self): ebus = s_eventbus.EventBus() l0 = [] l1 = [] def onHehe0(mesg): l0.append(mesg) def onHehe1(mesg): l1.append(mesg) ebus.on('hehe', onHehe0) # Temporarily set the 'hehe' callback with ebus.onWith('hehe', onHehe1) as e: self.true(e is ebus) ebus.fire('hehe') self.len(1, l0) self.len(1, l1) # subsequent fires do not call onHehe1 ebus.fire('hehe') self.len(2, l0) self.len(1, l1) # onWith works across Telepath Proxy's and with filts l2 = [] def onNodeForm(mesg): l2.append(mesg) with self.getDmonCore() as core: with core.onWith('node:form', onNodeForm, form='strform'): t0 = core.formTufoByProp('strform', 'hehe') self.nn(t0) t1 = core.formTufoByProp('intform', 1234) self.nn(t1) self.len(1, l2) def test_eventbus_busref_items(self): bref = s_eventbus.BusRef() bus0 = s_eventbus.EventBus() bus1 = s_eventbus.EventBus() bus2 = s_eventbus.EventBus() bref.put('foo', bus0) bref.put('bar', bus1) bref.put('baz', bus2) items = bref.items() self.isin(('foo', bus0), items) self.isin(('bar', bus1), items) self.isin(('baz', bus2), items) bus1.fini() items = bref.items() self.isin(('foo', bus0), items) self.isin(('baz', bus2), items) bus2.fini() items = bref.items() self.isin(('foo', bus0), items) bus0.fini() items = bref.items() self.eq(items, []) bref.fini() items = bref.items() self.eq(items, [])
23.118644
82
0.533515
73c9a38189f14d8b88ae406f866086c86bdc9646
484
py
Python
CCBC_Library/ccbclib/migrations/0005_auto_20150315_1149.py
comsaint/ccbc
594838d6356a7aaeea1cda759781716c58c18824
[ "MIT" ]
null
null
null
CCBC_Library/ccbclib/migrations/0005_auto_20150315_1149.py
comsaint/ccbc
594838d6356a7aaeea1cda759781716c58c18824
[ "MIT" ]
null
null
null
CCBC_Library/ccbclib/migrations/0005_auto_20150315_1149.py
comsaint/ccbc
594838d6356a7aaeea1cda759781716c58c18824
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('ccbclib', '0004_auto_20150315_1142'), ] operations = [ migrations.AlterField( model_name='transaction', name='renew_manager', field=models.CharField(default='', blank=True, max_length=32, null=True), preserve_default=True, ), ]
23.047619
85
0.617769
73c9da9f979ad5b032e6a069f100a409aa067801
3,572
py
Python
tests/test_researchtool/summarization.py
ChristophBeckmann/ResearchTool
178ae87a23ea7fcc908505a8469075be3c2205ee
[ "MIT" ]
null
null
null
tests/test_researchtool/summarization.py
ChristophBeckmann/ResearchTool
178ae87a23ea7fcc908505a8469075be3c2205ee
[ "MIT" ]
null
null
null
tests/test_researchtool/summarization.py
ChristophBeckmann/ResearchTool
178ae87a23ea7fcc908505a8469075be3c2205ee
[ "MIT" ]
null
null
null
import nltk # Natural Language Toolkit: https://www.nltk.org/ import ssl # Secure Sockets Layer - Internet Protocol from nltk.cluster.util import cosine_distance # Necessary for calculate Cosine similarity from nltk.corpus import stopwords # nltk stopwords files import numpy as np import networkx as nx # Network analysis from langdetect import detect # Language Detection in ISO639-1 import iso639 # Transform ISO639-1 to ISO language name for stopwords file # Nltk Downloader is broken. There is a workaround to download the required "stopwords" package. # This code disable the SSL Certificate Verification. # Found solution on: https://github.com/gunthercox/ChatterBot/issues/930#issuecomment-322111087 def install_stopwords(): try: _create_unverified_https_context = ssl._create_unverified_context except AttributeError: # Legacy Python that doesn't verify HTTPS certificates by default pass else: # Handle target environment that doesn't support HTTPS verification ssl._create_default_https_context = _create_unverified_https_context nltk.download('stopwords') # Adapted to download only the required package. # Automatic detection of the language and conversion from "de" to "Deutsch" using the packages Langdetect and ISO639. language = "English" def detectlanguage(text): global language language = iso639.to_name(detect(text)) return language def read(text): file = open(text, "r") data = file.readlines() sentences_split = data[0].split(". ") sentences = [] for sentence in sentences_split: sentences.append(sentence.replace("[^a-zA-Z]", " ").split(" ")) sentences.pop() return sentences def sentence_similarity(sentence1, sentence2, stopwords=None): if stopwords is None: stopwords = [] sentence1 = [w.lower() for w in sentence1] sentence2 = [w.lower() for w in sentence2] all_words = list(set(sentence1+sentence2)) vector1 = [0] * len(all_words) vector2 = [0] * len(all_words) for w in sentence1: if w in stopwords: continue vector1[all_words.index(w)] += 1 for w in sentence2: if w in stopwords: continue vector2[all_words.index(w)] += 1 return 1-cosine_distance(vector1, vector2) def gen_sim_matrix(sentences, stop_words=None): similarity_matrix = np.zeros((len(sentences), len(sentences))) for idx1 in range(len(sentences)): for idx2 in range(len(sentences)): if idx1 == idx2: continue similarity_matrix[idx1][idx2] = sentence_similarity(sentences[idx1], sentences[idx2], stop_words) return similarity_matrix def build_summary(file_location, top_n=5): install_stopwords() summarized_text = [] sentences = read(file_location) stop_words = stopwords.words(language) sentence_similarity_matrix = gen_sim_matrix(sentences, stop_words) sentence_similarity_graph = nx.from_numpy_array(sentence_similarity_matrix) scores = nx.pagerank(sentence_similarity_graph) ranked_sentence = sorted(((scores[i], s) for i, s in enumerate(sentences)), reverse=True) for i in range(top_n): summarized_text.append(" ".join(ranked_sentence[i][1])) print(". ".join(summarized_text)) print(build_summary("../tests/test_researchtool/text.txt", 1))
38.408602
117
0.671613
73c9ea2ff8f5afbcc30a92ca2f59a5f96b7233b0
883
py
Python
moma_example/__init__.py
gadio/moma-django
13265379be1dbab18697e5f42f38b3b37f928aa9
[ "Apache-2.0" ]
12
2015-03-29T05:31:25.000Z
2019-06-13T16:17:37.000Z
moma_example/__init__.py
antoniotaranto/moma-django
13265379be1dbab18697e5f42f38b3b37f928aa9
[ "Apache-2.0" ]
8
2015-09-04T21:00:50.000Z
2021-06-10T17:39:44.000Z
moma_example/__init__.py
antoniotaranto/moma-django
13265379be1dbab18697e5f42f38b3b37f928aa9
[ "Apache-2.0" ]
3
2015-03-25T21:52:14.000Z
2021-01-11T03:02:29.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- #========================================================================== # Copyright 2012 Lucidel, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this work except in compliance with the License. # You may obtain a copy of the License in the LICENSE file, or at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #========================================================================== __author__ = 'Gadi Oren' __email__ = 'gadi.oren.1@gmail.com' __version__ = '0.1.1'
35.32
75
0.611552
73ca0431be382c3e3e91e92014caea0452bbceb6
9,220
py
Python
translation/main.py
ab3llini/News2Title
80b5117d5495890425ddeaddd77ef64624df5855
[ "MIT" ]
null
null
null
translation/main.py
ab3llini/News2Title
80b5117d5495890425ddeaddd77ef64624df5855
[ "MIT" ]
null
null
null
translation/main.py
ab3llini/News2Title
80b5117d5495890425ddeaddd77ef64624df5855
[ "MIT" ]
1
2019-11-04T01:17:40.000Z
2019-11-04T01:17:40.000Z
# https://blog.keras.io/a-ten-minute-introduction-to-sequence-to-sequence-learning-in-keras.html '''Sequence to sequence example in Keras (character-level). This script demonstrates how to implement a basic character-level sequence-to-sequence model. We apply it to translating short English sentences into short French sentences, character-by-character. Note that it is fairly unusual to do character-level machine translation, as word-level models are more common in this domain. # Summary of the algorithm - We start with input sequences from a domain (e.g. English sentences) and corresponding target sequences from another domain (e.g. French sentences). - An encoder LSTM turns input sequences to 2 state vectors (we keep the last LSTM state and discard the outputs). - A decoder LSTM is trained to turn the target sequences into the same sequence but offset by one timestep in the future, a training process called "teacher forcing" in this context. Is uses as initial state the state vectors from the encoder. Effectively, the decoder learns to generate `targets[t+1...]` given `targets[...t]`, conditioned on the input sequence. - In inference mode, when we want to decode unknown input sequences, we: - Encode the input sequence into state vectors - Start with a target sequence of size 1 (just the start-of-sequence character) - Feed the state vectors and 1-char target sequence to the decoder to produce predictions for the next character - Sample the next character using these predictions (we simply use argmax). - Append the sampled character to the target sequence - Repeat until we generate the end-of-sequence character or we hit the character limit. # Data download English to French sentence pairs. http://www.manythings.org/anki/fra-eng.zip Lots of neat sentence pairs datasets can be found at: http://www.manythings.org/anki/ # References - Sequence to Sequence Learning with Neural Networks https://arxiv.org/abs/1409.3215 - Learning Phrase Representations using RNN Encoder-Decoder for Statistical Machine Translation https://arxiv.org/abs/1406.1078 ''' from __future__ import print_function from keras.models import Model from keras.layers import Input, LSTM, Dense import numpy as np batch_size = 64 # Batch size for training. epochs = 100 # Number of epochs to train for. latent_dim = 256 # Latent dimensionality of the encoding space. num_samples = 10000 # Number of samples to train on. # Path to the data txt file on disk. data_path = 'ita.txt' # Vectorize the data. input_texts = [] target_texts = [] input_characters = set() target_characters = set() with open(data_path, 'r', encoding='utf-8') as f: lines = f.read().split('\n') for line in lines[: min(num_samples, len(lines) - 1)]: input_text, target_text = line.split('\t') # We use "tab" as the "start sequence" character # for the targets, and "\n" as "end sequence" character. target_text = '\t' + target_text + '\n' input_texts.append(input_text) target_texts.append(target_text) for char in input_text: if char not in input_characters: input_characters.add(char) for char in target_text: if char not in target_characters: target_characters.add(char) input_characters = sorted(list(input_characters)) target_characters = sorted(list(target_characters)) num_encoder_tokens = len(input_characters) num_decoder_tokens = len(target_characters) max_encoder_seq_length = max([len(txt) for txt in input_texts]) max_decoder_seq_length = max([len(txt) for txt in target_texts]) print('Number of samples:', len(input_texts)) print('Number of unique input tokens:', num_encoder_tokens) print('Number of unique output tokens:', num_decoder_tokens) print('Max sequence length for inputs:', max_encoder_seq_length) print('Max sequence length for outputs:', max_decoder_seq_length) input_token_index = dict( [(char, i) for i, char in enumerate(input_characters)]) target_token_index = dict( [(char, i) for i, char in enumerate(target_characters)]) encoder_input_data = np.zeros( (len(input_texts), max_encoder_seq_length, num_encoder_tokens), dtype='float32') decoder_input_data = np.zeros( (len(input_texts), max_decoder_seq_length, num_decoder_tokens), dtype='float32') decoder_target_data = np.zeros( (len(input_texts), max_decoder_seq_length, num_decoder_tokens), dtype='float32') for i, (input_text, target_text) in enumerate(zip(input_texts, target_texts)): for t, char in enumerate(input_text): encoder_input_data[i, t, input_token_index[char]] = 1. for t, char in enumerate(target_text): # decoder_target_data is ahead of decoder_input_data by one timestep decoder_input_data[i, t, target_token_index[char]] = 1. if t > 0: # decoder_target_data will be ahead by one timestep # and will not include the start character. decoder_target_data[i, t - 1, target_token_index[char]] = 1. # Define an input sequence and process it. encoder_inputs = Input(shape=(None, num_encoder_tokens)) encoder = LSTM(latent_dim, return_state=True) encoder_outputs, state_h, state_c = encoder(encoder_inputs) # We discard `encoder_outputs` and only keep the states. encoder_states = [state_h, state_c] # Set up the decoder, using `encoder_states` as initial state. decoder_inputs = Input(shape=(None, num_decoder_tokens)) # We set up our decoder to return full output sequences, # and to return internal states as well. We don't use the # return states in the training model, but we will use them in inference. decoder_lstm = LSTM(latent_dim, return_sequences=True, return_state=True) decoder_outputs, _, _ = decoder_lstm(decoder_inputs, initial_state=encoder_states) decoder_dense = Dense(num_decoder_tokens, activation='softmax') decoder_outputs = decoder_dense(decoder_outputs) # Define the model that will turn # `encoder_input_data` & `decoder_input_data` into `decoder_target_data` model = Model([encoder_inputs, decoder_inputs], decoder_outputs) # Run training model.compile(optimizer='rmsprop', loss='categorical_crossentropy') model.summary() model.fit([encoder_input_data, decoder_input_data], decoder_target_data, batch_size=batch_size, epochs=epochs, validation_split=0.2) # Save model model.save('s2s.h5') # Next: inference mode (sampling). # Here's the drill: # 1) encode input and retrieve initial decoder state # 2) run one step of decoder with this initial state # and a "start of sequence" token as target. # Output will be the next target token # 3) Repeat with the current target token and current states # Define sampling models encoder_model = Model(encoder_inputs, encoder_states) decoder_state_input_h = Input(shape=(latent_dim,)) decoder_state_input_c = Input(shape=(latent_dim,)) decoder_states_inputs = [decoder_state_input_h, decoder_state_input_c] decoder_outputs, state_h, state_c = decoder_lstm( decoder_inputs, initial_state=decoder_states_inputs) decoder_states = [state_h, state_c] decoder_outputs = decoder_dense(decoder_outputs) decoder_model = Model( [decoder_inputs] + decoder_states_inputs, [decoder_outputs] + decoder_states) # Reverse-lookup token index to decode sequences back to # something readable. reverse_input_char_index = dict( (i, char) for char, i in input_token_index.items()) reverse_target_char_index = dict( (i, char) for char, i in target_token_index.items()) def decode_sequence(input_seq): # Encode the input as state vectors. states_value = encoder_model.predict(input_seq) # Generate empty target sequence of length 1. target_seq = np.zeros((1, 1, num_decoder_tokens)) # Populate the first character of target sequence with the start character. target_seq[0, 0, target_token_index['\t']] = 1. # Sampling loop for a batch of sequences # (to simplify, here we assume a batch of size 1). stop_condition = False decoded_sentence = '' while not stop_condition: output_tokens, h, c = decoder_model.predict( [target_seq] + states_value) # Sample a token sampled_token_index = np.argmax(output_tokens[0, -1, :]) sampled_char = reverse_target_char_index[sampled_token_index] decoded_sentence += sampled_char # Exit condition: either hit max length # or find stop character. if (sampled_char == '\n' or len(decoded_sentence) > max_decoder_seq_length): stop_condition = True # Update the target sequence (of length 1). target_seq = np.zeros((1, 1, num_decoder_tokens)) target_seq[0, 0, sampled_token_index] = 1. # Update states states_value = [h, c] return decoded_sentence for seq_index in range(100): # Take one sequence (part of the training set) # for trying out decoding. input_seq = encoder_input_data[seq_index: seq_index + 1] decoded_sentence = decode_sequence(input_seq) print('-') print('Input sentence:', input_texts[seq_index]) print('Decoded sentence:', decoded_sentence)
39.570815
96
0.734382
73cab4041a51025a679609f26a5e0a1625f64903
3,412
py
Python
lreid/data/datasets/__init__.py
TPCD/LifelongReID
cb33f9c29fe398e7546db345fab1c338dda8252f
[ "MIT" ]
63
2021-03-20T15:33:11.000Z
2022-03-30T03:04:14.000Z
lreid/data/datasets/__init__.py
TPCD/LifelongReID
cb33f9c29fe398e7546db345fab1c338dda8252f
[ "MIT" ]
5
2021-03-23T08:04:21.000Z
2022-03-10T02:28:43.000Z
lreid/data/datasets/__init__.py
TPCD/LifelongReID
cb33f9c29fe398e7546db345fab1c338dda8252f
[ "MIT" ]
10
2021-04-30T11:14:10.000Z
2022-03-18T16:44:55.000Z
from __future__ import print_function, absolute_import from .image import ( GRID, PRID, CUHK01, CUHK02, CUHK03, MSMT17, VIPeR, SenseReID, Market1501, DukeMTMCreID, iLIDS ) from .video import PRID2011, Mars, DukeMTMCVidReID, iLIDSVID from .dataset import Dataset, ImageDataset, VideoDataset __image_datasets = { 'market1501': Market1501, 'cuhk03': CUHK03, 'dukemtmcreid': DukeMTMCreID, 'msmt17': MSMT17, 'viper': VIPeR, 'grid': GRID, 'cuhk01': CUHK01, 'ilids': iLIDS, 'sensereid': SenseReID, 'prid': PRID, 'cuhk02': CUHK02 } __video_datasets = { 'mars': Mars, 'ilidsvid': iLIDSVID, 'prid2011': PRID2011, 'dukemtmcvidreid': DukeMTMCVidReID } def init_image_dataset(name, **kwargs): """Initializes an image dataset.""" avai_datasets = list(__image_datasets.keys()) if name not in avai_datasets: raise ValueError( 'Invalid dataset name. Received "{}", ' 'but expected to be one of {}'.format(name, avai_datasets) ) return __image_datasets[name](**kwargs) def init_video_dataset(name, **kwargs): """Initializes a video dataset.""" avai_datasets = list(__video_datasets.keys()) if name not in avai_datasets: raise ValueError( 'Invalid dataset name. Received "{}", ' 'but expected to be one of {}'.format(name, avai_datasets) ) return __video_datasets[name](**kwargs) def register_image_dataset(name, dataset): """Registers a new image dataset. Args: name (str): key corresponding to the new dataset. dataset (Dataset): the new dataset class. Examples:: import lreid import NewDataset lreid.data.register_image_dataset('new_dataset', NewDataset) # single dataset case datamanager = lreid.data.ImageDataManager( root='reid-data', sources='new_dataset' ) # multiple dataset case datamanager = lreid.data.ImageDataManager( root='reid-data', sources=['new_dataset', 'dukemtmcreid'] ) """ global __image_datasets curr_datasets = list(__image_datasets.keys()) if name in curr_datasets: raise ValueError( 'The given name already exists, please choose ' 'another name excluding {}'.format(curr_datasets) ) __image_datasets[name] = dataset def register_video_dataset(name, dataset): """Registers a new video dataset. Args: name (str): key corresponding to the new dataset. dataset (Dataset): the new dataset class. Examples:: import lreid import NewDataset lreid.data.register_video_dataset('new_dataset', NewDataset) # single dataset case datamanager = lreid.data.VideoDataManager( root='reid-data', sources='new_dataset' ) # multiple dataset case datamanager = lreid.data.VideoDataManager( root='reid-data', sources=['new_dataset', 'ilidsvid'] ) """ global __video_datasets curr_datasets = list(__video_datasets.keys()) if name in curr_datasets: raise ValueError( 'The given name already exists, please choose ' 'another name excluding {}'.format(curr_datasets) ) __video_datasets[name] = dataset
28.915254
77
0.62837
73cac0da392b5ba49d4464ab3318805d5d1357b7
10,387
py
Python
BOOTSTRAPS/TODO_extract_thermal_slopes_BOOTSTRAP_PROTEINS.py
sergpolly/Thermal_adapt_scripts
faca767209dc7fa66f35b4738dcdd1b7d3bbac93
[ "MIT" ]
2
2018-12-05T07:43:39.000Z
2018-12-05T07:43:42.000Z
BOOTSTRAPS/TODO_extract_thermal_slopes_BOOTSTRAP_PROTEINS.py
sergpolly/Thermal_adapt_scripts
faca767209dc7fa66f35b4738dcdd1b7d3bbac93
[ "MIT" ]
null
null
null
BOOTSTRAPS/TODO_extract_thermal_slopes_BOOTSTRAP_PROTEINS.py
sergpolly/Thermal_adapt_scripts
faca767209dc7fa66f35b4738dcdd1b7d3bbac93
[ "MIT" ]
null
null
null
import re import os import sys from Bio import Seq from Bio import SeqIO from Bio import SeqUtils import pandas as pd import numpy as np from functools import partial import time from scipy import stats as st import copy from multiprocessing import Pool # for plotting ... import matplotlib.pyplot as plt import matplotlib as mpl font = {'family' : 'sans-serif', #'weight' : 'bold', 'size' :9} mpl.rc('font', **font) # data lopading ... # # reset the seed here ... (?!) not thread safe (?!) np.random.seed() root_path = os.path.expanduser('~') bact_path = os.path.join(root_path,'GENOMES_BACTER_RELEASE69/genbank') arch_path = os.path.join(root_path,'GENOMES_ARCH_SEP2015') # SOME ARCHAEAL DATA ... arch = pd.read_csv(os.path.join(arch_path,'summary_organisms_interest.dat')) arch_nohalo = pd.read_csv(os.path.join(arch_path,'summary_organisms_interest_no_halop.dat')) # SOME BACTERIAL DATA ... # complete genomes only ... bact = pd.read_csv(os.path.join(bact_path,'env_catalog_compgenome.dat')) ############################################### # complete_CDS_CAI_DNA.dat same thing ... arch_cai_fname = os.path.join(arch_path,"complete_arch_CDS_CAI_DNA.dat") bact_cai_fname = os.path.join(bact_path,"complete_CDS_CAI_DNA.dat") ############################## arch_cai = pd.read_csv(arch_cai_fname) bact_cai = pd.read_csv(bact_cai_fname) ############################## bact_cai_by_org = bact_cai.groupby('GenomicID') arch_cai_by_org = arch_cai.groupby('assembly_accession') ############################## aacids = sorted('ACDEFGHIKLMNPQRSTVWY') ############################## print "Arch and Bact data is loaded ..." ############################## argv_org = sys.argv[1] argv_cds = sys.argv[2] argv_iters = int(sys.argv[3]) ############################## FRACTION = 0.4 ITERATIONS = argv_iters PERCENTILE = 0.1 PROT_SAMPLE_SIZE = 50 # foremost for the ribosomal proteins modeling ... # ######################################################### # ######################################################### # ######################################################### # PERCENTILE = 0.1 # FRACTION = 0.4 # num_iterations = 50 # dat_size = valid_dat_subset.index.size # slopes_generated = {} # for iteration in xrange(num_iterations): # sample_indicies = np.random.choice(valid_dat_subset.index,int(dat_size*FRACTION)) # # get the subsample here ... # subsample = valid_dat_subset.loc[sample_indicies] # # check: # print # print "check ..." # print subsample.index.size,subsample.index.get_values() # print subsample['topt'].min(),subsample['topt'].max(),subsample['topt'].mean() # ######################################################### # ######################################################### # ######################################################### # generate 'dataset': # select CDS translations (protein sequences) for a given criteria ... # criteria includes CAI top 10%, all proteins, TrOp, noTrOp, ribosomal ... # 2 types of criteria: organismal and CDS-level ... def get_random_slopeset(all_cds,dat,uid_key,cds_criteria='all', org_criteria='random', calculate_trop=False, random_trop_fraction=0.5, topt='OptimumTemperature', prot_random_regime='PERCENTILE'): # def get_one_trop(idx): org_cds = all_cds.get_group(idx) # check if TrOp ... # for a given organism(id) all TrOp values must be same trop_vals = org_cds['TrOp'].unique() assert trop_vals.size == 1 # then just figure out TrOp value after unpacking ... trop, = trop_vals if pd.isnull(trop): # special return - not enough ribosomal proteins ... return 'none' if not trop: # False, return False return 'false' elif trop == True: # if it's True just return ... return 'true' else: raise ValueError ####################### # # ACHTUNG !!! # the way we treated organisms with too little ribosomal proteins (~<24), makes it hard # to distinguish between non-TrOp and the ones the former ones. # This can cause downstream 'nan' evaluations as a result ... # To avoid troubles: treat the TrOp==np.nan differently ! def extract_aausage(uid,criteria,cds_trans_key='protein'): local_cds = all_cds.get_group(uid) # select protein according to the criteria ... if criteria == 'cai': # cai10 for cai related criteria ... cai10 = local_cds['CAI'].quantile(q=1.0 - PERCENTILE) selected_aa = local_cds[local_cds['CAI'] >= cai10][cds_trans_key] elif criteria == 'ribo': selected_aa = local_cds[local_cds['ribosomal']][cds_trans_key] elif criteria == 'cai_noribo': # cai10 for cai related criteria ... cai10 = local_cds['CAI'].quantile(q=1.0 - PERCENTILE) selected_aa = local_cds[(local_cds['CAI'] >= cai10)&(~local_cds['ribosomal'])][cds_trans_key] elif criteria == 'all': selected_aa = local_cds[cds_trans_key] # the 'random' criteria for BOOTSTRAPIGN and shuffling ... elif criteria == 'random': # grab PERCENTILE fraction of CDSes from the proteome and pretend those are top10 CAI # for bootstrapping purposes ... if prot_random_regime == 'PERCENTILE': cds_subsample_size = int(local_cds.shape[0]*PERCENTILE) elif PROT_SAMPLE_SIZE < local_cds.shape[0]: cds_subsample_size = PROT_SAMPLE_SIZE else: raise ValueError('Not enough CDS to draw random sample from: %s.'%uid) cds_subsample_idx = np.random.choice( local_cds.index, cds_subsample_size ) selected_aa = local_cds.loc[cds_subsample_idx][cds_trans_key] else: raise ValueError('CDS criteria must be either cai,ribo,cai_noribo,all or random!') # selected_aa = ''.join(selected_aa) total_aacount = float(len(selected_aa)) # return freuqencies of aa usage get_aacount_str = lambda seq: np.asarray([seq.count(aa) for aa in aacids]) return get_aacount_str(selected_aa)*100.0/total_aacount # # for each dataset get some slopes info ... def get_slopes(dat,topt='OptimumTemperature'): # exp_T = dat[dat[topt]>=50][aacids].mean() # exp_M = dat[(dat[topt]>=20)&(dat[topt]<=40)][aacids].mean() # exp_A = dat[aacids].mean() exp_D = dat[aacids].apply(lambda x: x.cov(dat[topt])/dat[topt].var()) # check exp_D, just in case ... exp_D_check = dat[aacids].apply(lambda x: st.linregress(dat[topt],x)[0]) if ( np.abs(exp_D - exp_D_check) > 1e-7 ).any(): raise ValueError('exp_D caluclation failed!') # returning ... return exp_D ####################### # Calculate or check if we have the TrOp info for each organism ... if calculate_trop: dat['TrOp'] = [get_one_trop(idx) for idx in dat[uid_key]] else: assert 'TrOp' in dat.columns # the only criteria that makes sense ... if org_criteria == 'all': the_aausage = [ extract_aausage(uid,criteria=cds_criteria) for uid in dat[dat['TrOp']!='none'][uid_key]] the_topt = dat[dat['TrOp']!='none'][topt] elif org_criteria == 'trop': the_aausage = [ extract_aausage(uid,criteria=cds_criteria) for uid in dat[dat['TrOp']=='true'][uid_key] ] the_topt = dat[dat['TrOp']=='true'][topt] elif org_criteria == 'random': #################################### #################################### ### # CAI-able data is here ... ### ### dat[dat['TrOp']!='none'] ### #################################### #################################### # instead of calculating TrOp we shoud do random subsampling of organisms ... subsample_size = int(dat[dat['TrOp']!='none'].shape[0]*random_trop_fraction) subsample_idx = np.random.choice( dat[dat['TrOp']!='none'].index, subsample_size ) # use subsample indexes to get the subsample ... the_aausage = [ extract_aausage(uid,criteria=cds_criteria) for uid in dat.loc[subsample_idx][uid_key] ] # it's a resources wasting, but the code is cleaner this way ... the_topt = dat.loc[subsample_idx][topt] else: raise ValueError("Organism criteria must be 'all','trop' or 'random'!") # transform the aausage the_aausage = np.asarray(the_aausage) the_aausage = pd.DataFrame(the_aausage,columns=aacids) the_aausage[topt] = the_topt.reset_index(drop=True) # return get_slopes(the_aausage,topt=topt) ################################################### # cds_crits = ['cai','ribo','cai_noribo','all'] # org_crits = ['random'] ################################################### # cds_criteria = 'cai' # cds_criteria = 'cai' # org_criteria = 'random' cds_criteria = argv_cds org_criteria = argv_org # for iteration in range(ITERATIONS): arch_iter_slopes = [] bact_iter_slopes = [] for iteration in range(ITERATIONS): one_arch_iter_slope = get_random_slopeset(arch_cai_by_org,arch_nohalo,'assembly_accession',cds_criteria,org_criteria,FRACTION,prot_random_regime='NOT_PERCENTILE') one_bact_iter_slope = get_random_slopeset(bact_cai_by_org,bact,'GenomicID',cds_criteria,org_criteria,FRACTION,prot_random_regime='NOT_PERCENTILE') one_arch_iter_slope.name = 'iter%d'%iteration one_bact_iter_slope.name = 'iter%d'%iteration arch_iter_slopes.append(one_arch_iter_slope) bact_iter_slopes.append(one_bact_iter_slope) arch_boostrat_slopes = pd.concat(arch_iter_slopes,axis=1) bact_boostrat_slopes = pd.concat(bact_iter_slopes,axis=1) arch_fname = "BOOTSTRAP_%s_org-%s_cds-%s.dat"%('arch',org_criteria,cds_criteria) bact_fname = "BOOTSTRAP_%s_org-%s_cds-%s.dat"%('bact',org_criteria,cds_criteria) # csv export's default header=True, index=True must work just fine ... arch_boostrat_slopes.to_csv(arch_fname) bact_boostrat_slopes.to_csv(bact_fname) print "Bootstrap for CSD:%s ORG:%s is over! "%(cds_criteria,org_criteria)
41.714859
166
0.600751
73caf58c1574502766fc229ebee0666cf35b6186
1,939
py
Python
tests/test_util.py
DavidKatz-il/pdpipe
5ddd066425d99886bfc51cf19ab78b2bf8c7791a
[ "MIT" ]
438
2017-03-02T16:55:34.000Z
2019-12-20T03:29:36.000Z
tests/test_util.py
DavidKatz-il/pdpipe
5ddd066425d99886bfc51cf19ab78b2bf8c7791a
[ "MIT" ]
68
2019-12-21T12:51:51.000Z
2022-03-13T13:06:14.000Z
tests/test_util.py
DavidKatz-il/pdpipe
5ddd066425d99886bfc51cf19ab78b2bf8c7791a
[ "MIT" ]
30
2019-12-21T12:18:18.000Z
2022-03-17T05:53:19.000Z
"""Testing pdpipe util module.""" import pandas as pd import pytest from pdpipe.util import out_of_place_col_insert def _test_df(): return pd.DataFrame( data=[[1, 'a'], [2, 'b']], index=[1, 2], columns=['num', 'char'] ) def test_out_of_place_col_insert_all_params(): """Testing the ColDrop pipeline stage.""" df = _test_df() series = pd.Series( data=[10, 20], index=[1, 2], name='tens') result_df = out_of_place_col_insert(df, series, 1, 'Tigers') assert 'tens' not in result_df.columns assert 'Tigers' in result_df.columns assert result_df.columns.get_loc('Tigers') == 1 assert result_df['Tigers'][1] == 10 assert result_df['Tigers'][2] == 20 def test_out_of_place_col_insert_no_col_name(): """Testing the ColDrop pipeline stage.""" df = _test_df() series = pd.Series( data=[10, 20], index=[1, 2], name='tens') result_df = out_of_place_col_insert(df, series, 1) assert 'tens' in result_df.columns assert result_df.columns.get_loc('tens') == 1 assert result_df['tens'][1] == 10 assert result_df['tens'][2] == 20 def test_out_of_place_col_insert_nameless_error(): """Testing the ColDrop pipeline stage.""" df = _test_df() series = pd.Series( data=[10, 20], index=[1, 2]) with pytest.raises(ValueError): out_of_place_col_insert(df, series, 1) def test_out_of_place_col_last_position(): """Testing the ColDrop pipeline stage.""" df = _test_df() series = pd.Series( data=[10, 20], index=[1, 2], name='tens') result_df = out_of_place_col_insert(df, series, len(df.columns), 'Tigers') assert 'tens' not in result_df.columns assert 'Tigers' in result_df.columns assert result_df.columns.get_loc('Tigers') == 2 assert result_df['Tigers'][1] == 10 assert result_df['Tigers'][2] == 20
26.561644
78
0.62919
73cb15c538fe6c0823a57ad6d3deb570ff3c4f52
40,938
py
Python
round01/30_03_CNN_revived_baseline_complexer.py
phunc20/rlcomp2020
c37f8f05cc86d55fca2648bf5491d6a2218c2cad
[ "MIT" ]
null
null
null
round01/30_03_CNN_revived_baseline_complexer.py
phunc20/rlcomp2020
c37f8f05cc86d55fca2648bf5491d6a2218c2cad
[ "MIT" ]
1
2022-02-10T02:27:10.000Z
2022-02-10T02:27:10.000Z
round01/30_03_CNN_revived_baseline_complexer.py
phunc20/rlcomp2020
c37f8f05cc86d55fca2648bf5491d6a2218c2cad
[ "MIT" ]
null
null
null
import sys import numpy as np #import pandas as pd import datetime import json from array import * import os import math from random import randrange import random from tensorflow.keras.models import Sequential from tensorflow.keras.models import model_from_json from tensorflow.keras.layers import Dense, Activation from tensorflow.keras import optimizers import tensorflow.keras as keras #import tensorflow.compat.v1 as tf #from tensorflow.compat.v1.keras import backend as K #tf.disable_v2_behavior() import tensorflow as tf from tensorflow.keras import backend as K import constants import non_RL_agent import non_RL_agent02 import non_RL_agent03 import non_RL_agent04 import non_RL_agent05 import non_RL_agent06 n_episodes = 50_000_000 #n_epsilon_decay = int(n_episodes*.7) n_epsilon_decay = 10**6 / 0.99 n_episodes_buf_fill = 10_000 batch_size = 32 discount_rate = 0.95 lr_optimizer = 2.5e-4 loss_fn = keras.losses.mean_squared_error max_replay_len = 1_000_000 #loaded_h5 = None loaded_h5 = "models/30_CNN_revived_baseline_complexer/episode-41157-gold-300-step-15-20200822-2348.h5" #epsilon_start = 1 epsilon_start = 0.959 best_score = 300 #Classes in GAME_SOCKET_DUMMY.py class ObstacleInfo: # initial energy for obstacles: Land (key = 0): -1, Forest(key = -1): 0 (random), Trap(key = -2): -10, Swamp (key = -3): -5 types = {0: -1, -1: 0, -2: -10, -3: -5} def __init__(self): self.type = 0 self.posx = 0 self.posy = 0 self.value = 0 class GoldInfo: def __init__(self): self.posx = 0 self.posy = 0 self.amount = 0 def loads(self, data): golds = [] for gd in data: g = GoldInfo() g.posx = gd["posx"] g.posy = gd["posy"] g.amount = gd["amount"] golds.append(g) return golds class PlayerInfo: STATUS_PLAYING = 0 STATUS_ELIMINATED_WENT_OUT_MAP = 1 STATUS_ELIMINATED_OUT_OF_ENERGY = 2 STATUS_ELIMINATED_INVALID_ACTION = 3 STATUS_STOP_EMPTY_GOLD = 4 STATUS_STOP_END_STEP = 5 def __init__(self, id): self.playerId = id self.score = 0 self.energy = 0 self.posx = 0 self.posy = 0 self.lastAction = -1 self.status = PlayerInfo.STATUS_PLAYING self.freeCount = 0 class GameInfo: def __init__(self): self.numberOfPlayers = 1 self.width = 0 self.height = 0 self.steps = 100 self.golds = [] self.obstacles = [] def loads(self, data): m = GameInfo() m.width = data["width"] m.height = data["height"] m.golds = GoldInfo().loads(data["golds"]) m.obstacles = data["obstacles"] m.numberOfPlayers = data["numberOfPlayers"] m.steps = data["steps"] return m class UserMatch: def __init__(self): self.playerId = 1 self.posx = 0 self.posy = 0 self.energy = 50 self.gameinfo = GameInfo() def to_json(self): return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=4) class StepState: def __init__(self): self.players = [] self.golds = [] self.changedObstacles = [] def to_json(self): return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=4) #Main class in GAME_SOCKET_DUMMY.py class GameSocket: bog_energy_chain = {-5: -20, -20: -40, -40: -100, -100: -100} def __init__(self): self.stepCount = 0 self.maxStep = 0 self.mapdir = "Maps" # where to load all pre-defined maps self.mapid = "" self.userMatch = UserMatch() self.user = PlayerInfo(1) self.stepState = StepState() self.maps = {} # key: map file name, value: file content self.map = [] # running map info: 0->Land, -1->Forest, -2->Trap, -3:Swamp, >0:Gold self.energyOnMap = [] # self.energyOnMap[x][y]: <0, amount of energy which player will consume if it move into (x,y) self.E = 50 self.resetFlag = True self.craftUsers = [] # players that craft at current step - for calculating amount of gold self.bots = [] self.craftMap = {} # cells that players craft at current step, key: x_y, value: number of players that craft at (x,y) def init_bots(self): self.bots = [Bot1(2), Bot2(3), Bot3(4)] # use bot1(id=2), bot2(id=3), bot3(id=4) #for (bot) in self.bots: # at the beginning, all bots will have same position, energy as player for bot in self.bots: # at the beginning, all bots will have same position, energy as player bot.info.posx = self.user.posx bot.info.posy = self.user.posy bot.info.energy = self.user.energy bot.info.lastAction = -1 bot.info.status = PlayerInfo.STATUS_PLAYING bot.info.score = 0 self.stepState.players.append(bot.info) self.userMatch.gameinfo.numberOfPlayers = len(self.stepState.players) #print("numberOfPlayers: ", self.userMatch.gameinfo.numberOfPlayers) def reset(self, requests): # load new game by given request: [map id (filename), posx, posy, initial energy] # load new map self.reset_map(requests[0]) self.userMatch.posx = int(requests[1]) self.userMatch.posy = int(requests[2]) self.userMatch.energy = int(requests[3]) self.userMatch.gameinfo.steps = int(requests[4]) self.maxStep = self.userMatch.gameinfo.steps # init data for players self.user.posx = self.userMatch.posx # in self.user.posy = self.userMatch.posy self.user.energy = self.userMatch.energy self.user.status = PlayerInfo.STATUS_PLAYING self.user.score = 0 self.stepState.players = [self.user] self.E = self.userMatch.energy self.resetFlag = True self.init_bots() self.stepCount = 0 def reset_map(self, id): # load map info self.mapId = id self.map = json.loads(self.maps[self.mapId]) self.userMatch = self.map_info(self.map) self.stepState.golds = self.userMatch.gameinfo.golds self.map = json.loads(self.maps[self.mapId]) self.energyOnMap = json.loads(self.maps[self.mapId]) for x in range(len(self.map)): for y in range(len(self.map[x])): if self.map[x][y] > 0: # gold self.energyOnMap[x][y] = -4 else: # obstacles self.energyOnMap[x][y] = ObstacleInfo.types[self.map[x][y]] def connect(self): # simulate player's connect request print("Connected to server.") for mapid in range(len(Maps)): filename = "map" + str(mapid) print("Found: " + filename) self.maps[filename] = str(Maps[mapid]) def map_info(self, map): # get map info # print(map) userMatch = UserMatch() userMatch.gameinfo.height = len(map) userMatch.gameinfo.width = len(map[0]) i = 0 while i < len(map): j = 0 while j < len(map[i]): if map[i][j] > 0: # gold g = GoldInfo() g.posx = j g.posy = i g.amount = map[i][j] userMatch.gameinfo.golds.append(g) else: # obstacles o = ObstacleInfo() o.posx = j o.posy = i o.type = -map[i][j] o.value = ObstacleInfo.types[map[i][j]] userMatch.gameinfo.obstacles.append(o) j += 1 i += 1 return userMatch def receive(self): # send data to player (simulate player's receive request) if self.resetFlag: # for the first time -> send game info self.resetFlag = False data = self.userMatch.to_json() for (bot) in self.bots: bot.new_game(data) # print(data) return data else: # send step state self.stepCount = self.stepCount + 1 if self.stepCount >= self.maxStep: for player in self.stepState.players: player.status = PlayerInfo.STATUS_STOP_END_STEP data = self.stepState.to_json() #for (bot) in self.bots: # update bots' state for bot in self.bots: # update bots' state bot.new_state(data) # print(data) return data def send(self, message): # receive message from player (simulate send request from player) if message.isnumeric(): # player send action self.resetFlag = False self.stepState.changedObstacles = [] action = int(message) # print("Action = ", action) self.user.lastAction = action self.craftUsers = [] self.step_action(self.user, action) for bot in self.bots: if bot.info.status == PlayerInfo.STATUS_PLAYING: action = bot.next_action() bot.info.lastAction = action # print("Bot Action: ", action) self.step_action(bot.info, action) self.action_5_craft() for c in self.stepState.changedObstacles: self.map[c["posy"]][c["posx"]] = -c["type"] self.energyOnMap[c["posy"]][c["posx"]] = c["value"] else: # reset game requests = message.split(",") #print("Reset game: ", requests[:3], end='') self.reset(requests) def step_action(self, user, action): switcher = { 0: self.action_0_left, 1: self.action_1_right, 2: self.action_2_up, 3: self.action_3_down, 4: self.action_4_free, 5: self.action_5_craft_pre } func = switcher.get(action, self.invalidAction) func(user) def action_5_craft_pre(self, user): # collect players who craft at current step user.freeCount = 0 if self.map[user.posy][user.posx] <= 0: # craft at the non-gold cell user.energy -= 10 if user.energy <= 0: user.status = PlayerInfo.STATUS_ELIMINATED_OUT_OF_ENERGY user.lastAction = 6 #eliminated else: user.energy -= 5 if user.energy > 0: self.craftUsers.append(user) key = str(user.posx) + "_" + str(user.posy) if key in self.craftMap: count = self.craftMap[key] self.craftMap[key] = count + 1 else: self.craftMap[key] = 1 else: user.status = PlayerInfo.STATUS_ELIMINATED_OUT_OF_ENERGY user.lastAction = 6 #eliminated def action_0_left(self, user): # user go left user.freeCount = 0 user.posx = user.posx - 1 if user.posx < 0: user.status = PlayerInfo.STATUS_ELIMINATED_WENT_OUT_MAP user.lastAction = 6 #eliminated else: self.go_to_pos(user) def action_1_right(self, user): # user go right user.freeCount = 0 user.posx = user.posx + 1 if user.posx >= self.userMatch.gameinfo.width: user.status = PlayerInfo.STATUS_ELIMINATED_WENT_OUT_MAP user.lastAction = 6 #eliminated else: self.go_to_pos(user) def action_2_up(self, user): # user go up user.freeCount = 0 user.posy = user.posy - 1 if user.posy < 0: user.status = PlayerInfo.STATUS_ELIMINATED_WENT_OUT_MAP user.lastAction = 6 #eliminated else: self.go_to_pos(user) def action_3_down(self, user): # user go right user.freeCount = 0 user.posy = user.posy + 1 if user.posy >= self.userMatch.gameinfo.height: user.status = PlayerInfo.STATUS_ELIMINATED_WENT_OUT_MAP user.lastAction = 6 #eliminated else: self.go_to_pos(user) def action_4_free(self, user): # user free user.freeCount += 1 if user.freeCount == 1: user.energy += int(self.E / 4) elif user.freeCount == 2: user.energy += int(self.E / 3) elif user.freeCount == 3: user.energy += int(self.E / 2) else: user.energy = self.E if user.energy > self.E: user.energy = self.E def action_5_craft(self): craftCount = len(self.craftUsers) # print ("craftCount",craftCount) if (craftCount > 0): for user in self.craftUsers: x = user.posx y = user.posy key = str(user.posx) + "_" + str(user.posy) c = self.craftMap[key] m = min(math.ceil(self.map[y][x] / c), 50) user.score += m # print ("user", user.playerId, m) for user in self.craftUsers: x = user.posx y = user.posy key = str(user.posx) + "_" + str(user.posy) if key in self.craftMap: c = self.craftMap[key] del self.craftMap[key] m = min(math.ceil(self.map[y][x] / c), 50) self.map[y][x] -= m * c if self.map[y][x] < 0: self.map[y][x] = 0 self.energyOnMap[y][x] = ObstacleInfo.types[0] for g in self.stepState.golds: if g.posx == x and g.posy == y: g.amount = self.map[y][x] if g.amount == 0: self.stepState.golds.remove(g) self.add_changed_obstacle(x, y, 0, ObstacleInfo.types[0]) if len(self.stepState.golds) == 0: for player in self.stepState.players: player.status = PlayerInfo.STATUS_STOP_EMPTY_GOLD break; self.craftMap = {} def invalidAction(self, user): user.status = PlayerInfo.STATUS_ELIMINATED_INVALID_ACTION user.lastAction = 6 #eliminated def go_to_pos(self, user): # player move to cell(x,y) if self.map[user.posy][user.posx] == -1: user.energy -= randrange(16) + 5 elif self.map[user.posy][user.posx] == 0: user.energy += self.energyOnMap[user.posy][user.posx] elif self.map[user.posy][user.posx] == -2: user.energy += self.energyOnMap[user.posy][user.posx] self.add_changed_obstacle(user.posx, user.posy, 0, ObstacleInfo.types[0]) elif self.map[user.posy][user.posx] == -3: user.energy += self.energyOnMap[user.posy][user.posx] self.add_changed_obstacle(user.posx, user.posy, 3, self.bog_energy_chain[self.energyOnMap[user.posy][user.posx]]) else: user.energy -= 4 if user.energy <= 0: user.status = PlayerInfo.STATUS_ELIMINATED_OUT_OF_ENERGY user.lastAction = 6 #eliminated def add_changed_obstacle(self, x, y, t, v): added = False for o in self.stepState.changedObstacles: if o["posx"] == x and o["posy"] == y: added = True break if added == False: o = {} o["posx"] = x o["posy"] = y o["type"] = t o["value"] = v self.stepState.changedObstacles.append(o) def close(self): print("Close socket.") class Bot1: ACTION_GO_LEFT = 0 ACTION_GO_RIGHT = 1 ACTION_GO_UP = 2 ACTION_GO_DOWN = 3 ACTION_FREE = 4 ACTION_CRAFT = 5 def __init__(self, id): self.state = State() self.info = PlayerInfo(id) def get_state(self): view = np.zeros([self.state.mapInfo.max_y + 1, self.state.mapInfo.max_x + 1], dtype=int) for x in range(self.state.mapInfo.max_x + 1): for y in range(self.state.mapInfo.max_y + 1): if self.state.mapInfo.get_obstacle(x, y) == TreeID: # Tree view[y, x] = -TreeID if self.state.mapInfo.get_obstacle(x, y) == TrapID: # Trap view[y, x] = -TrapID if self.state.mapInfo.get_obstacle(x, y) == SwampID: # Swamp view[y, x] = -SwampID if self.state.mapInfo.gold_amount(x, y) > 0: view[y, x] = self.state.mapInfo.gold_amount(x, y) DQNState = view.flatten().tolist() #Flattening the map matrix to a vector #DQNState.append(self.state.x) #DQNState.append(self.state.y) #DQNState.append(self.state.energy) DQNState.append(self.info.posx) DQNState.append(self.info.posy) DQNState.append(self.info.energy) for player in self.state.players: # self.info.playerId is the id of the current bot if player["playerId"] != self.info.playerId: DQNState.append(player["posx"]) DQNState.append(player["posy"]) DQNState = np.array(DQNState) return DQNState def next_action(self): s = self.get_state() #return int(greedy_policy(s)) return int(non_RL_agent.greedy_policy(s)) def get_score(self): return [player["score"] for player in minerEnv.socket.bots[1].state.players if player["playerId"] == self.info.playerId][0] def new_game(self, data): try: self.state.init_state(data) except Exception as e: import traceback traceback.print_exc() def new_state(self, data): # action = self.next_action(); # self.socket.send(action) try: self.state.update_state(data) except Exception as e: import traceback traceback.print_exc() class Bot2: ACTION_GO_LEFT = 0 ACTION_GO_RIGHT = 1 ACTION_GO_UP = 2 ACTION_GO_DOWN = 3 ACTION_FREE = 4 ACTION_CRAFT = 5 def __init__(self, id): self.state = State() self.info = PlayerInfo(id) def get_state(self): view = np.zeros([self.state.mapInfo.max_y + 1, self.state.mapInfo.max_x + 1], dtype=int) for x in range(self.state.mapInfo.max_x + 1): for y in range(self.state.mapInfo.max_y + 1): if self.state.mapInfo.get_obstacle(x, y) == TreeID: # Tree view[y, x] = -TreeID if self.state.mapInfo.get_obstacle(x, y) == TrapID: # Trap view[y, x] = -TrapID if self.state.mapInfo.get_obstacle(x, y) == SwampID: # Swamp view[y, x] = -SwampID if self.state.mapInfo.gold_amount(x, y) > 0: view[y, x] = self.state.mapInfo.gold_amount(x, y) DQNState = view.flatten().tolist() #Flattening the map matrix to a vector #DQNState.append(self.state.x) #DQNState.append(self.state.y) #DQNState.append(self.state.energy) DQNState.append(self.info.posx) DQNState.append(self.info.posy) DQNState.append(self.info.energy) for player in self.state.players: # self.info.playerId is the id of the current bot if player["playerId"] != self.info.playerId: DQNState.append(player["posx"]) DQNState.append(player["posy"]) DQNState = np.array(DQNState) return DQNState def next_action(self): s = self.get_state() #return int(non_RL_agent03.greedy_policy(s)) return int(non_RL_agent.greedy_policy(s, how_gold=non_RL_agent.find_worthiest_gold)) #if self.state.mapInfo.gold_amount(self.info.posx, self.info.posy) > 0: # if self.info.energy >= 6: # return self.ACTION_CRAFT # else: # return self.ACTION_FREE #if self.info.energy < 5: # return self.ACTION_FREE #else: # action = np.random.randint(0, 4) # return action def new_game(self, data): try: self.state.init_state(data) except Exception as e: import traceback traceback.print_exc() def new_state(self, data): # action = self.next_action(); # self.socket.send(action) try: self.state.update_state(data) except Exception as e: import traceback traceback.print_exc() def get_score(self): return [player["score"] for player in minerEnv.socket.bots[1].state.players if player["playerId"] == self.info.playerId][0] class Bot3: ACTION_GO_LEFT = 0 ACTION_GO_RIGHT = 1 ACTION_GO_UP = 2 ACTION_GO_DOWN = 3 ACTION_FREE = 4 ACTION_CRAFT = 5 def __init__(self, id): self.state = State() self.info = PlayerInfo(id) def get_state(self): view = np.zeros([self.state.mapInfo.max_y + 1, self.state.mapInfo.max_x + 1], dtype=int) for x in range(self.state.mapInfo.max_x + 1): for y in range(self.state.mapInfo.max_y + 1): if self.state.mapInfo.get_obstacle(x, y) == TreeID: # Tree view[y, x] = -TreeID if self.state.mapInfo.get_obstacle(x, y) == TrapID: # Trap view[y, x] = -TrapID if self.state.mapInfo.get_obstacle(x, y) == SwampID: # Swamp view[y, x] = -SwampID if self.state.mapInfo.gold_amount(x, y) > 0: view[y, x] = self.state.mapInfo.gold_amount(x, y) DQNState = view.flatten().tolist() #Flattening the map matrix to a vector #DQNState.append(self.state.x) #DQNState.append(self.state.y) #DQNState.append(self.state.energy) DQNState.append(self.info.posx) DQNState.append(self.info.posy) DQNState.append(self.info.energy) for player in self.state.players: # self.info.playerId is the id of the current bot if player["playerId"] != self.info.playerId: DQNState.append(player["posx"]) DQNState.append(player["posy"]) DQNState = np.array(DQNState) return DQNState def next_action(self): s = self.get_state() return int(non_RL_agent02.greedy_policy(s)) #if self.state.mapInfo.gold_amount(self.info.posx, self.info.posy) > 0: # if self.info.energy >= 6: # return self.ACTION_CRAFT # else: # return self.ACTION_FREE #if self.info.energy < 5: # return self.ACTION_FREE #else: # action = self.ACTION_GO_LEFT # if self.info.posx % 2 == 0: # if self.info.posy < self.state.mapInfo.max_y: # action = self.ACTION_GO_DOWN # else: # if self.info.posy > 0: # action = self.ACTION_GO_UP # else: # action = self.ACTION_GO_RIGHT # return action def new_game(self, data): try: self.state.init_state(data) except Exception as e: import traceback traceback.print_exc() def new_state(self, data): # action = self.next_action(); # self.socket.send(action) try: self.state.update_state(data) except Exception as e: import traceback traceback.print_exc() def get_score(self): return [player["score"] for player in minerEnv.socket.bots[1].state.players if player["playerId"] == self.info.playerId][0] #MinerState.py def str_2_json(str): return json.loads(str, encoding="utf-8") class MapInfo: def __init__(self): self.max_x = 0 #Width of the map self.max_y = 0 #Height of the map self.golds = [] #List of the golds in the map self.obstacles = [] self.numberOfPlayers = 0 self.maxStep = 0 #The maximum number of step is set for this map def init_map(self, gameInfo): #Initialize the map at the begining of each episode self.max_x = gameInfo["width"] - 1 self.max_y = gameInfo["height"] - 1 self.golds = gameInfo["golds"] self.obstacles = gameInfo["obstacles"] self.maxStep = gameInfo["steps"] self.numberOfPlayers = gameInfo["numberOfPlayers"] def update(self, golds, changedObstacles): #Update the map after every step self.golds = golds for cob in changedObstacles: newOb = True for ob in self.obstacles: if cob["posx"] == ob["posx"] and cob["posy"] == ob["posy"]: newOb = False #print("cell(", cob["posx"], ",", cob["posy"], ") change type from: ", ob["type"], " -> ", # cob["type"], " / value: ", ob["value"], " -> ", cob["value"]) ob["type"] = cob["type"] ob["value"] = cob["value"] break if newOb: self.obstacles.append(cob) #print("new obstacle: ", cob["posx"], ",", cob["posy"], ", type = ", cob["type"], ", value = ", # cob["value"]) def get_min_x(self): return min([cell["posx"] for cell in self.golds]) def get_max_x(self): return max([cell["posx"] for cell in self.golds]) def get_min_y(self): return min([cell["posy"] for cell in self.golds]) def get_max_y(self): return max([cell["posy"] for cell in self.golds]) def is_row_has_gold(self, y): return y in [cell["posy"] for cell in self.golds] def is_column_has_gold(self, x): return x in [cell["posx"] for cell in self.golds] def gold_amount(self, x, y): #Get the amount of golds at cell (x,y) for cell in self.golds: if x == cell["posx"] and y == cell["posy"]: return cell["amount"] return 0 def get_obstacle(self, x, y): # Get the kind of the obstacle at cell(x,y) for cell in self.obstacles: if x == cell["posx"] and y == cell["posy"]: return cell["type"] return -1 # No obstacle at the cell (x,y) class State: STATUS_PLAYING = 0 STATUS_ELIMINATED_WENT_OUT_MAP = 1 STATUS_ELIMINATED_OUT_OF_ENERGY = 2 STATUS_ELIMINATED_INVALID_ACTION = 3 STATUS_STOP_EMPTY_GOLD = 4 STATUS_STOP_END_STEP = 5 def __init__(self): self.end = False self.score = 0 self.lastAction = None self.id = 0 self.x = 0 self.y = 0 self.energy = 0 self.energy_pre = 0 self.mapInfo = MapInfo() self.players = [] self.stepCount = 0 self.status = State.STATUS_PLAYING def init_state(self, data): #parse data from server into object game_info = str_2_json(data) self.end = False self.score = 0 self.lastAction = None self.id = game_info["playerId"] self.x = game_info["posx"] self.y = game_info["posy"] self.energy = game_info["energy"] self.mapInfo.init_map(game_info["gameinfo"]) self.stepCount = 0 self.status = State.STATUS_PLAYING self.players = [{"playerId": 2, "posx": self.x, "posy": self.y}, {"playerId": 3, "posx": self.x, "posy": self.y}, {"playerId": 4, "posx": self.x, "posy": self.y}] def update_state(self, data): new_state = str_2_json(data) for player in new_state["players"]: if player["playerId"] == self.id: self.x = player["posx"] self.y = player["posy"] self.energy_pre = self.energy self.energy = player["energy"] self.score = player["score"] self.lastAction = player["lastAction"] self.status = player["status"] self.mapInfo.update(new_state["golds"], new_state["changedObstacles"]) self.players = new_state["players"] for i in range(len(self.players), 4, 1): self.players.append({"playerId": i, "posx": self.x, "posy": self.y}) self.stepCount = self.stepCount + 1 #MinerEnv.py TreeID = 1 TrapID = 2 SwampID = 3 class MinerEnv: def __init__(self): self.socket = GameSocket() self.state = State() self.score_pre = self.state.score#Storing the last score for designing the reward function def start(self): #connect to server self.socket.connect() def end(self): #disconnect server self.socket.close() def send_map_info(self, request):#tell server which map to run self.socket.send(request) def reset(self): #start new game try: message = self.socket.receive() #receive game info from server self.state.init_state(message) #init state except Exception as e: import traceback traceback.print_exc() def step(self, action): #step process self.socket.send(action) #send action to server try: message = self.socket.receive() #receive new state from server self.state.update_state(message) #update to local state except Exception as e: import traceback traceback.print_exc() def get_state(self): """ Fuse `view` and `energyOnMap` into a single matrix to have a simple and concise state/observation. We want a matrix showing the following: `gold`: The amount of gold `all the others`: The energy that each type of terrain is going to take if being stepped into, e.g. `land` => -1, `trap` => -10, etc. """ view = np.zeros([self.state.mapInfo.max_y + 1, self.state.mapInfo.max_x + 1], dtype=int) for x in range(self.state.mapInfo.max_x + 1): for y in range(self.state.mapInfo.max_y + 1): if self.state.mapInfo.get_obstacle(x, y) == TreeID: # Tree view[y, x] = -TreeID if self.state.mapInfo.get_obstacle(x, y) == TrapID: # Trap view[y, x] = -TrapID if self.state.mapInfo.get_obstacle(x, y) == SwampID: # Swamp view[y, x] = -SwampID if self.state.mapInfo.gold_amount(x, y) > 0: view[y, x] = self.state.mapInfo.gold_amount(x, y) energyOnMap = np.array(self.socket.energyOnMap) # `view` will contribute only to the type of terrain of `gold` view[view <= 0] = -9999 # Just a dummy large negative number to be got rid of later # `energyOnMap` will contribute to the types of terrain of `land`, `trap`, `forest` and `swamp`. # Recall. `forest` was designated by BTC to the value of 0, to mean random integer btw [5..20]. energyOnMap[energyOnMap == 0] = - constants.forest_energy channel0 = np.maximum(view, energyOnMap) # Finish channel 0 # Channel 1 will contain the position of the agent channel1 = np.zeros_like(channel0) x_agent_out_of_map = self.state.x < 0 or self.state.x >= constants.width y_agent_out_of_map = self.state.y < 0 or self.state.y >= constants.height if x_agent_out_of_map or y_agent_out_of_map: pass else: channel1[self.state.y, self.state.x] = self.state.energy state = np.stack((channel0, channel1), axis=-1) return state def get_reward(self): # Initialize reward reward = 0 score_action = self.state.score - self.score_pre self.score_pre = self.state.score if score_action > 0: #reward += score_action*(100 - self.state.stepCount) reward += score_action #if self.state.mapInfo.get_obstacle(self.state.x, self.state.y) == TreeID: # Tree # reward -= TreeID #if self.state.mapInfo.get_obstacle(self.state.x, self.state.y) == TrapID: # Trap # reward -= TrapID #if self.state.mapInfo.get_obstacle(self.state.x, self.state.y) == SwampID: # Swamp # reward -= SwampID # if self.state.lastAction == 4: # reward -= 40 #if self.state.status == State.STATUS_ELIMINATED_WENT_OUT_MAP: if self.state.status == constants.agent_state_str2id["out_of_MAP"]: #if self.state.stepCount < 50: # reward += -5*(50 - self.state.stepCount) reward -= 2000 else: s = self.get_state() #print(f"self.state.x, self.state.y = {self.state.x}, {self.state.y} ") terrain_now = s[self.state.y, self.state.x, 0] if terrain_now < 0 and self.state.lastAction != constants.rest: # This substract the same amount of reward as energy when the agent steps into terrain_now, except for gold reward += terrain_now #if self.state.status == State.STATUS_STOP_END_STEP: if self.state.status == constants.agent_state_str2id["no_more_STEP"]: #reward += (self.state.score/total_gold) * 100 pass #if self.state.status == State.STATUS_ELIMINATED_OUT_OF_ENERGY: if self.state.status == constants.agent_state_str2id["no_more_ENERGY"]: if self.state.lastAction != constants.rest: reward -= 500 #if self.state.status == State.STATUS_PLAYING: if self.state.status == constants.agent_state_str2id["PLAYing"]: reward += 1 # We punish surplus `rest` if self.state.energy_pre == constants.max_energy and self.state.lastAction == constants.rest: reward -= 50 return reward def check_terminate(self): #Checking the status of the game #it indicates the game ends or is playing return self.state.status != State.STATUS_PLAYING Maps = [constants.maps[i] for i in range(1, 6)] env = MinerEnv() # Creating a communication environment between the DQN model and the game environment env.start() # Connect to the game #eliminated = [] #def pictorial_state(obs): # pictorial = np.zeros((constants.height, constants.width, 1+4), dtype=np.float32) # # 1+4 is +1 for map and +1 for each of the players = 5 channels # # dtype=np.float32 because pictorial will later be carried into tensorflow CNN # pictorial[..., 0] = obs[:constants.n_px].reshape((constants.height, constants.width)) # # position of agent: we put the energy value at the coordinate where stands the agent, the whole in channel 1, the channel for the agent. # x_agent, y_agent = obs[constants.n_px], obs[constants.n_px+1] # if x_agent >= constants.width or y_agent >= constants.height: # pass # else: # pictorial[y_agent, x_agent, 1] = obs[constants.n_px+2] # # position of bots: we put -1 on the coord of the bots # for i in range(1, 3+1): # if i in eliminated: # continue # y = obs[constants.n_px+(2*i+2)] # x = obs[constants.n_px+(2*i+1)] # if x >= constants.width or y >= constants.height: # eliminated.append(i) # continue # pictorial[y, x, i+1] = -1 # return pictorial from tensorflow.keras.layers import Dense, Conv2D, MaxPooling2D, Dropout, Flatten tf.random.set_seed(42) np.random.seed(42) #input_shape = [constants.height, constants.width, 1+4] input_shape = [constants.height, constants.width, 1+1] n_outputs = 6 model = keras.models.Sequential([ Conv2D(8, 3, activation="relu", padding="same", input_shape=input_shape), #MaxPooling2D(2), Conv2D(8, 3, activation="relu", padding="same"), #MaxPooling2D(2), #Conv2D(128, 3, activation="relu", padding="same"), #Conv2D(128, 3, activation="relu", padding="same"), #MaxPooling2D(2), Flatten(), #Dense(128, activation="elu"), Dense(128, activation="elu"), Dense(128, activation="elu"), Dense(64, activation="elu"), Dense(n_outputs) ]) from collections import deque replay_memory = deque(maxlen=max_replay_len) def sample_experiences(batch_size): indices = np.random.randint(len(replay_memory), size=batch_size) batch = [replay_memory[index] for index in indices] states, actions, rewards, next_states, dones = [ np.array([experience[field_index] for experience in batch]) for field_index in range(5)] return states, actions, rewards, next_states, dones def epsilon_greedy_policy(state, epsilon=0, n_actions=6): if np.random.rand() < epsilon: return np.random.randint(n_actions) else: #pictorial = pictorial_state(state) #Q_values = model.predict(pictorial[np.newaxis]) Q_values = model.predict(state[np.newaxis]) return np.argmax(Q_values[0]) def play_one_step(env, state, epsilon): action = epsilon_greedy_policy(state, epsilon) #next_state, reward, done, info = env.step(action) env.step(str(action)) next_state = env.get_state() reward = env.get_reward() done = env.check_terminate() replay_memory.append((state, action, reward, next_state, done)) return next_state, reward, done #optimizer = keras.optimizers.Adam(lr=1e-3) #optimizer = keras.optimizers.Adam(lr=2.5e-4) optimizer = keras.optimizers.Adam(lr=lr_optimizer) def training_step(batch_size): experiences = sample_experiences(batch_size) states, actions, rewards, next_states, dones = experiences #pictorials = np.array([pictorial_state(s) for s in states]) #next_pictorials = np.array([pictorial_state(next_s) for next_s in next_states]) #next_Q_values = model.predict(next_pictorials) next_Q_values = model.predict(next_states) max_next_Q_values = np.max(next_Q_values, axis=1) target_Q_values = rewards + (1 - dones) * discount_rate * max_next_Q_values mask = tf.one_hot(actions, n_outputs) with tf.GradientTape() as tape: #all_Q_values = model(pictorials) all_Q_values = model(states) Q_values = tf.reduce_sum(all_Q_values * mask, axis=1, keepdims=True) loss = tf.reduce_mean(loss_fn(target_Q_values, Q_values)) grads = tape.gradient(loss, model.trainable_variables) optimizer.apply_gradients(zip(grads, model.trainable_variables)) np.random.seed(42) tf.random.set_seed(42) scores = [] if loaded_h5: model = keras.models.load_model(loaded_h5) from constants import n_allowed_steps now = datetime.datetime.now() now_str = now.strftime("%Y%m%d-%H%M") script_name = __file__.split('.')[0] save_path = os.path.join("models", script_name) os.makedirs(save_path, exist_ok=True) with open(os.path.join(save_path, f"log-{now_str}.txt"), 'w') as log: for episode in range(n_episodes): eliminated = [] mapID = np.random.randint(0, 5) posID_x = np.random.randint(constants.width) posID_y = np.random.randint(constants.height) request = "map{},{},{},50,100".format(mapID, posID_x, posID_y) env.send_map_info(request) env.reset() obs = env.get_state() undiscounted_return = 0 for step in range(n_allowed_steps): #epsilon = max(1 - episode / n_epsilon_decay, 0.01) epsilon = max(epsilon_start - episode / n_epsilon_decay, 0.01) obs, reward, done = play_one_step(env, obs, epsilon) undiscounted_return += reward if done: break #score = env.state.score*(n_allowed_steps - step) score = env.state.score scores.append(score) if score > best_score: #best_weights = model.get_weights() best_score = score model.save(os.path.join(save_path, f"episode-{episode+1}-gold-{env.state.score}-step-{step+1}-{now_str}.h5")) message = "(Episode {: 5d}/{}) Gold: {: 4d} undiscounted_return: {: 6d} Steps: {: 3d} eps: {:.3f} ({})\n".format(episode+1, n_episodes, env.state.score, undiscounted_return, step + 1, epsilon, constants.agent_state_id2str[env.state.status]) print(message, end='') log.write(message) #if episode > 500: if episode > n_episodes_buf_fill: training_step(batch_size) np.save(f"scores-{now_str}", np.array(scores))
36.914337
256
0.577972
73cb26281f32da2701a55ab7fe5fc9eed0e2cae8
2,395
py
Python
operators/relu.py
ngiambla/nnflex
7c8bf46218ea70c6dad1efedf9e2069e41c4c3fa
[ "MIT" ]
null
null
null
operators/relu.py
ngiambla/nnflex
7c8bf46218ea70c6dad1efedf9e2069e41c4c3fa
[ "MIT" ]
null
null
null
operators/relu.py
ngiambla/nnflex
7c8bf46218ea70c6dad1efedf9e2069e41c4c3fa
[ "MIT" ]
null
null
null
''' relu.py: Implement's the ReLU ONNX node as a flexnode (for use with any accelerator) ''' import uuid import numpy as np from operators.flexnode import FlexNode from core.defines import Operator from core.messaging import Message class ReLU(FlexNode): def __init__(self, onnx_node, inputs, outputs): FlexNode.__init__(self, onnx_node, inputs, outputs) self._in1_flat = None self._out_flat = None self._in1_offset = 0 self._out_offset = 0 self._operation = Operator.MAX def map(self, memory_mapper): in1 = self._inputs[0] self._in1_flat = in1.flatten() self._length = len(self._in1_flat) out = self._outputs[0] self._out_flat = out.flatten() self._in1_offset = memory_mapper.map(self._in1_flat) self._out_offset = memory_mapper.map(self._out_flat) self._inputs2mem(memory_mapper) def unmap(self, memory_mapper): self._mem2output(memory_mapper) memory_mapper.unmap(self._in1_flat) memory_mapper.unmap(self._out_flat) def _inputs2mem(self, memory_xfer_engine): memory_xfer_engine.sys2mem(self._in1_flat, self._in1_offset) def _mem2output(self, memory_xfer_engine): memory_xfer_engine.mem2sys(self._out_flat, self._out_offset) for i in range(len(self._out_flat)): multi_index = self.unravel_index(i, self._outputs[0].shape) self._outputs[0][multi_index] = self._out_flat[i] def compile(self, source, destinations): ''' ''' tile_commands = list() num_destinations = len(destinations) which_dest = 0 for i in range(self._length): op1_addr = self._in1_offset+i res_addr = self._out_offset+i destination = destinations[which_dest] attributes = { "op1_addr" : op1_addr, "op2" : 0, "res_addr" : res_addr, "operation" : self._operation, "dtype" : self._out_flat.dtype } message_stamp = uuid.uuid4() tile_command = Message(source, destination, Message.TileCmd, message_stamp, attributes=attributes) tile_commands.append(tile_command) which_dest += 1 which_dest = which_dest % num_destinations return tile_commands
28.511905
110
0.629645
73cb44585cf13ed9df47a8884836e6146d7ea7ab
2,800
py
Python
src/fortrace/utility/rc4.py
dasec/ForTrace
b8187522a2c83fb661e5a1a5f403da8f40a31ead
[ "MIT" ]
1
2022-03-31T14:01:51.000Z
2022-03-31T14:01:51.000Z
src/fortrace/utility/rc4.py
dasec/ForTrace
b8187522a2c83fb661e5a1a5f403da8f40a31ead
[ "MIT" ]
null
null
null
src/fortrace/utility/rc4.py
dasec/ForTrace
b8187522a2c83fb661e5a1a5f403da8f40a31ead
[ "MIT" ]
1
2022-03-31T14:02:30.000Z
2022-03-31T14:02:30.000Z
#!/usr/bin/env python # # rc4.py - RC4, ARC4, ARCFOUR algorithm with random salt # # Copyright (c) 2009 joonis new media # Author: Thimo Kraemer <thimo.kraemer@joonis.de> # Modified: 2016 Sascha Kopp <sascha.kopp@stud.h-da.de> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. # from __future__ import absolute_import from __future__ import print_function import random, base64 from hashlib import sha1 from six.moves import range __all__ = ['crypt', 'encrypt', 'decrypt'] def crypt(data, key): """RC4 algorithm :param key: key used for crypto :param data: data to crypt """ x = 0 box = list(range(256)) for i in range(256): x = (x + box[i] + ord(key[i % len(key)])) % 256 box[i], box[x] = box[x], box[i] x = y = 0 out = [] for char in data: x = (x + 1) % 256 y = (y + box[x]) % 256 box[x], box[y] = box[y], box[x] out.append(chr(ord(char) ^ box[(box[x] + box[y]) % 256])) return ''.join(out) def encrypt(data, key, encode=base64.b64encode, salt_length=16): """RC4 encryption with random salt and final encoding :param salt_length: length of salt :param encode: encoding algorithm :param key: key used for encryption :param data: data to encrypt """ salt = '' for n in range(salt_length): salt += chr(random.randrange(256)) data = salt + crypt(data, sha1(key + salt).digest()) if encode: data = encode(data) return data def decrypt(data, key, decode=base64.b64decode, salt_length=16): """RC4 decryption of encoded data :param salt_length: length of salt :param decode: decoding algorithm :param key: key used for decryption :param data: data to decrypt """ if decode: data = decode(data) salt = data[:salt_length] return crypt(data[salt_length:], sha1(key + salt).digest()) if __name__ == '__main__': for i in range(10): data = encrypt('secret message', 'my-key') print(data) print(decrypt(data, 'my-key'))
30.434783
76
0.633571
73cb466c9021439a52f16ee56ff8f27004be819f
3,335
py
Python
reliabilly/components/web/http_requestor.py
corpseware/reliabilly
e8d310d43478407e2cf8b45cc746362124140385
[ "MIT" ]
1
2018-11-24T02:07:37.000Z
2018-11-24T02:07:37.000Z
reliabilly/components/web/http_requestor.py
corpseware/reliabilly
e8d310d43478407e2cf8b45cc746362124140385
[ "MIT" ]
1
2019-01-23T07:12:30.000Z
2019-01-23T07:12:30.000Z
reliabilly/components/web/http_requestor.py
corpseware/reliabilly
e8d310d43478407e2cf8b45cc746362124140385
[ "MIT" ]
1
2019-01-16T05:30:49.000Z
2019-01-16T05:30:49.000Z
import json import pickle import requests from requests.auth import HTTPBasicAuth from reliabilly.settings import Settings, Constants, get_service_url, get_raw_service_url def save_items(items, json_file, pickle_file): # pragma: no cover with open(pickle_file, 'wb') as handle: pickle.dump(items, handle, protocol=pickle.HIGHEST_PROTOCOL) with open(json_file, 'w') as file: json.dump(items, file) class HttpRequestor: """ Generic web http(s) request client. """ def __init__(self, **kwargs): self.client = kwargs.get(Constants.CLIENT, requests) self.save_fn = kwargs.get('save_fn', save_items) self.circuit_breaker = kwargs.get(Constants.CIRCUIT_BREAKER, None) def perform_web_request(self, verb, endpoint_url, **kwargs): if self.circuit_breaker is not None: return self.circuit_breaker.breaker.call(self._perform_web_request, verb, endpoint_url, **kwargs) return self._perform_web_request(verb, endpoint_url, **kwargs) def _perform_web_request(self, verb, endpoint_url, **kwargs): return self.client.request(verb, endpoint_url, **kwargs) def get(self, endpoint_url, **kwargs): return self._perform_web_request(Constants.GET_REQUEST, endpoint_url, **kwargs) def post(self, endpoint_url, **kwargs): return self._perform_web_request(Constants.POST_REQUEST, endpoint_url, **kwargs) def put(self, endpoint_url, **kwargs): return self._perform_web_request(Constants.PUT_REQUEST, endpoint_url, **kwargs) def delete(self, endpoint_url, **kwargs): return self._perform_web_request(Constants.DELETE_REQUEST, endpoint_url, **kwargs) def save_data_items(self, service_name, endpoint=Constants.EMPTY, paged=True): items = self.get_items(endpoint, paged, service_name) endpoint_name = endpoint.strip(Constants.FORWARD_SLASH) pickle_file = f'{service_name}{endpoint_name}.pickle' json_file = f'backup/{service_name}{endpoint_name}.json' self.save_fn(items, json_file, pickle_file) return True def get_items(self, endpoint, paged, service_name): if paged: return self.get_all_items(service_name) headers = self.get_request_headers() url = get_raw_service_url(service_name) + f'{endpoint}' return self.get(url, headers=headers).json() @staticmethod def get_request_headers(): return {Constants.AUTHORIZATION_HEADER: Settings.AUTH_TOKEN, Constants.SERVICE_NAME_ENV: Settings.SERVICE_NAME} def get_all_items(self, service_name): url = get_service_url(service_name) results = list() current_offset = 0 headers = self.get_request_headers() result = self._call_endpoint(url, current_offset, headers=headers) results.extend(result[service_name]) while len(results) < result['total']: current_offset += result['limit'] result = self._call_endpoint(url, current_offset, headers=headers) results.extend(result[service_name]) return results def _call_endpoint(self, url, offset, headers=None): return self.get(url.format(offset=offset), headers=headers).json() @staticmethod def get_auth_value(user, password): return HTTPBasicAuth(user, password)
41.6875
119
0.705547
73cb5d36706c30200fa829f7c2472f3a263da55e
890
py
Python
day1/day1.py
jmcph4/advent2017
f5f53e5a771b5b40b9fb8c5a2ea4e89d06279d32
[ "MIT" ]
null
null
null
day1/day1.py
jmcph4/advent2017
f5f53e5a771b5b40b9fb8c5a2ea4e89d06279d32
[ "MIT" ]
null
null
null
day1/day1.py
jmcph4/advent2017
f5f53e5a771b5b40b9fb8c5a2ea4e89d06279d32
[ "MIT" ]
null
null
null
def sum_matching_digits(digits): total = 0 n = len(digits) if n == 0: return total if n == 1: return digits[0] for i in range(n): if i == n - 1: if digits[i] == digits[0]: total += digits[i] else: if digits[i] == digits[i+1]: total += digits[i] return total # tests print(sum_matching_digits([1, 1, 2, 2])) # 3 print(sum_matching_digits([1, 1, 1, 1])) # 4 print(sum_matching_digits([1, 2, 3, 4])) # 0 print(sum_matching_digits([9, 1, 2, 1, 2, 1, 2, 9])) # 9 INPUT_FILE_PATH = "input.txt" def main(): digits = [] with open(INPUT_FILE_PATH) as f: while True: c = f.read(1) if not c: # check for EOF break if c != "\n": digits.append(int(c)) print(sum_matching_digits(digits))
20.227273
56
0.494382
73cb634673b15088eec79faf9fcded204d33dd92
15,929
py
Python
cogs/ReactionRoles.py
jakebacker/Gompei-Bot
dba3b11874ca19c22f170b8a01928af2b3fb2bd8
[ "MIT" ]
9
2020-02-14T02:07:39.000Z
2022-01-08T03:25:22.000Z
cogs/ReactionRoles.py
jakebacker/Gompei-Bot
dba3b11874ca19c22f170b8a01928af2b3fb2bd8
[ "MIT" ]
16
2020-02-14T02:10:29.000Z
2022-02-04T14:50:58.000Z
cogs/ReactionRoles.py
jakebacker/Gompei-Bot
dba3b11874ca19c22f170b8a01928af2b3fb2bd8
[ "MIT" ]
8
2020-02-21T14:28:07.000Z
2022-02-04T14:49:04.000Z
from cogs.Permissions import moderator_perms from collections import namedtuple from discord.ext import commands from datetime import datetime from config import Config import discord import typing class ReactionRoles(commands.Cog): def __init__(self, bot): self.bot = bot @commands.command(pass_context=True, name="closeServer") @commands.check(moderator_perms) @commands.guild_only() async def close_server(self, ctx): """ Prevents users from picking up access roles """ if Config.guilds[ctx.guild]["closed"]: await ctx.send("The server is already closed") else: Config.set_guild_closed(ctx.guild, True) await ctx.send("Closed the server") @commands.command(pass_context=True, name="openServer") @commands.check(moderator_perms) @commands.guild_only() async def open_server(self, ctx): """ Allows users to pick up access roles """ if not Config.guilds[ctx.guild]["closed"]: await ctx.send("The server is already open") else: Config.set_guild_closed(ctx.guild, False) await ctx.send("Opened the server") @commands.command(pass_context=True, aliases=["addReactionRole", "rradd"]) @commands.check(moderator_perms) @commands.guild_only() async def add_reaction_role(self, ctx, message: discord.Message, emoji: typing.Union[discord.Emoji, str], role: discord.Role): """ Adds a reaction role to a message with the given emoji Usage: .addReactionRole <message> <role> <emote> :param ctx: context object :param message: message to add the reaction role to :param emoji: emoji to react with :param role: role tied to the reaction """ if message in Config.guilds[message.guild]["reaction_roles"]: if emoji in Config.guilds[message.guild]["reaction_roles"][message]["emojis"]: await ctx.send("This emoji is already being used for a reaction role on this message") return try: await message.add_reaction(emoji) except discord.NotFound: await ctx.send("Could not find emoji \"" + emoji + "\"") return Config.add_reaction_role(message, emoji, role) await ctx.send("Added reaction role") @commands.command(pass_context=True, aliases=["removeReactionRole", "rrdelete", "rremove"]) @commands.check(moderator_perms) @commands.guild_only() async def remove_reaction_role(self, ctx, message: discord.Message, emoji: typing.Union[discord.Emoji, str]): """ Removes a reaction role from a message Usage: .removeReactionRole <message> <emoji> :param ctx: context object :param message: message to remove from :param emoji: emoji to remove """ if message not in Config.guilds[message.guild]["reaction_roles"]: await ctx.send("There are no reaction roles on this message") return if emoji not in Config.guilds[message.guild]["reaction_roles"][message]["emojis"]: await ctx.send("This emote is not attached to a reaction role on the message") return Config.remove_reaction_role(message, emoji) await message.remove_reaction(emoji, self.bot.user) await ctx.send("Removed reaction role") @commands.command(pass_context=True, name="makeExclusive") @commands.check(moderator_perms) @commands.guild_only() async def make_exclusive(self, ctx, message: discord.Message): """ Makes it so a user can only pick up one of the roles in the reaction message :param ctx: Context object :param message: Message with reaction roles """ if message not in Config.guilds[message.guild]["reaction_roles"]: await ctx.send("There is no reaction role attached to this message") return elif Config.guilds[message.guild]["reaction_roles"][message]["exclusive"] is True: await ctx.send("This reaction role has already been set to exclusive") return Config.set_reaction_message_exclusivity(message, True) await ctx.send("Made reaction role exclusive") @commands.command(pass_context=True, name="makeInclusive") @commands.check(moderator_perms) @commands.guild_only() async def make_inclusive(self, ctx, message: discord.Message): """ Makes it so a user can pick up and have as many of the roles on the reaction message as they'd like :param ctx: Context object :param message: Message with reaction roles """ if message not in Config.guilds[message.guild]["reaction_roles"]: await ctx.send("There is no reaction role attached to this message") return elif Config.guilds[message.guild]["reaction_roles"][message]["exclusive"] is False: await ctx.send("This reaction role has already been set to inclusive") return Config.set_reaction_message_exclusivity(message, False) await ctx.send("Made reaction role inclusive") @commands.command(pass_context=True, name="setMessage") @commands.check(moderator_perms) @commands.guild_only() async def set_message(self, ctx, roleMsg: discord.Message, emoji: typing.Union[discord.Emoji, str], *, message: str): """ Adds a message to be DM'ed to user when using the given reaction role :param ctx: Context object :param roleMsg: Message that reaction role is attached too :param emoji: Emoji that is being used :param message: String message to send to the user on role pickup """ if roleMsg not in Config.guilds[roleMsg.guild]["reaction_roles"]: await ctx.send("There is no reaction role attached to this message") return if emoji not in Config.guilds[roleMsg.guild]["reaction_roles"][roleMsg]["emojis"]: await ctx.send("This emoji is not attached to the reaction role message") return Config.set_reaction_role_message(roleMsg, emoji, message) await ctx.send("Successfully set message") @commands.command(pass_context=True, name="addRoleRequirement") @commands.check(moderator_perms) @commands.guild_only() async def add_role_requirement(self, ctx, roleMsg: discord.Message, emoji: typing.Union[discord.Emoji, str], role: discord.Role): """ Adds a role to be required in order to use the reaction role :param ctx: Context object :param roleMsg: Message that the reaction role is attached to :param emoji: Emoji for the role :param role: Role required """ if roleMsg not in Config.guilds[roleMsg.guild]["reaction_roles"]: await ctx.send("There is no reaction role attached to this message") return if emoji not in Config.guilds[roleMsg.guild]["reaction_roles"][roleMsg]["emojis"]: await ctx.send("This emoji is not attached to the reaction role message") return if role in Config.guilds[roleMsg.guild]["reaction_roles"][roleMsg]["emojis"][emoji]["reqs"]: await ctx.send(f"{role.name} is already a requirement for this reaction role") Config.add_reaction_role_requirement(roleMsg, emoji, role) await ctx.send(f"Successfully added {role.name} as a requirement for the reaction role") @commands.command(pass_context=True, name="removeRoleRequirement") @commands.check(moderator_perms) @commands.guild_only() async def remove_role_requirement(self, ctx, roleMsg: discord.Message, emoji: typing.Union[discord.Emoji, str], role: discord.Role): """ Removes a role requirement in order to use the reaction role :param ctx: Context object :param roleMsg: Message that the reaction role is attached to :param emoji: Emoji for the role :param role: Role requirement to remove """ if roleMsg not in Config.guilds[roleMsg.guild]["reaction_roles"]: await ctx.send("There is no reaction role attached to this message") return if emoji not in Config.guilds[roleMsg.guild]["reaction_roles"][roleMsg]["emojis"]: await ctx.send("This emoji is not attached to the reaction role message") return if role not in Config.guilds[roleMsg.guild]["reaction_roles"][roleMsg]["emojis"][emoji]["reqs"]: await ctx.send(f"{role.name} is not a requirement for this reaction role") Config.remove_reaction_role_requirement(roleMsg, emoji, role) await ctx.send(f"Successfully added {role.name} as a requirement for the reaction role") @commands.command(pass_context=True, name="clearMessage") @commands.check(moderator_perms) @commands.guild_only() async def clear_message(self, ctx, roleMsg: discord.Message, emoji: typing.Union[discord.Emoji, str]): """ Clears a message from the given reaction role :param ctx: Context object :param roleMsg: Message that the reaction role is attached too :param emoji: Emoji that is being used """ if roleMsg not in Config.guilds[roleMsg.guild]["reaction_roles"]: await ctx.send("This is no reaction role attached to this message") return if not isinstance(emoji, str): emoji = str(emoji.id) if emoji not in Config.guilds[roleMsg.guild]["reaction_roles"][roleMsg]: await ctx.send("This emoji is not attached to the reaction role message") return Config.clear_reaction_role_message(roleMsg, emoji) await ctx.send(f"Successfully cleared message") @commands.Cog.listener() async def on_raw_reaction_add(self, payload): """ Checks for a react on a reaction role :param payload: """ # If react is in DMs if payload.guild_id is None: return # If a bot if payload.member.bot: return guild = self.bot.get_guild(payload.guild_id) message = await guild.get_channel(payload.channel_id).fetch_message(payload.message_id) if message in Config.guilds[guild]["reaction_roles"]: # Fake ctx for EmojiConverter ctx = namedtuple("Context", "bot guild", module=commands.context) ctx.bot = self.bot ctx.guild = guild if payload.emoji.is_custom_emoji(): emoji = await commands.EmojiConverter().convert(ctx, str(payload.emoji.id)) else: emoji = str(payload.emoji) # If the emoji is a reaction role emoji if emoji in Config.guilds[guild]["reaction_roles"][message]["emojis"]: reaction_role = Config.guilds[guild]["reaction_roles"][message]["emojis"][emoji]["role"] roles = payload.member.roles switched = False # If the reaction role is "exclusive" if Config.guilds[guild]["reaction_roles"][message]["exclusive"]: for emote in Config.guilds[guild]["reaction_roles"][message]["emojis"]: role = Config.guilds[guild]["reaction_roles"][message]["emojis"][emote]["role"] if role in payload.member.roles: try: message.remove_reaction(emote, payload.member) except discord.NotFound: pass roles.remove(role) switched = True break roles.append(reaction_role) if Config.guilds[guild]["closed"]: if reaction_role not in Config.guilds[guild]["access_roles"]: pass elif switched: pass else: logging = Config.guilds[guild]["logging"]["overwrite_channels"]["mod"] if logging is None: logging = Config.guilds[guild]["logging"]["channel"] if logging is not None: embed = discord.Embed(title="Server closed", color=0xbe4041) embed.set_author( name=payload.member.name + "#" + payload.member.discriminator, icon_url=payload.member.display_avatar.url ) embed.description = "<@" + str(payload.member.id) + \ "> failed to pick up a role due to server closure.\n\nRole: " + \ reaction_role.mention embed.set_footer(text="ID: " + str(payload.member.id)) embed.timestamp = discord.utils.utcnow() await logging.send(embed=embed) for req_role in Config.guilds[guild]["reaction_roles"][message]["emojis"][emoji]["reqs"]: if req_role not in payload.member.roles: try: await payload.member.send(f"You are missing the required role {req_role.name} to pick up this role") except discord.Forbidden: pass await message.remove_reaction(payload.emoji, payload.member) break else: # Config.reaction_role_add_reactor(message, emoji, payload.member) if reaction_role in payload.member.roles: pass else: await payload.member.edit(roles=roles, reason="Reaction role") if Config.guilds[guild]["reaction_roles"][message]["emojis"][emoji]["message"] is not None: try: await payload.member.send( Config.guilds[guild]["reaction_roles"][message]["emojis"][emoji]["message"] ) except discord.Forbidden: pass @commands.Cog.listener() async def on_raw_reaction_remove(self, payload): """ Checks for a reaction being removed from a reaction role :param payload: """ # If react is in DMs if payload.guild_id is None: return guild = self.bot.get_guild(payload.guild_id) message = await guild.get_channel(payload.channel_id).fetch_message(payload.message_id) member = guild.get_member(payload.user_id) if message in Config.guilds[guild]["reaction_roles"]: # If user is no longer in the server if member is None: return # Fake ctx for EmojiConverter ctx = namedtuple("Context", "bot guild", module=commands.context) ctx.bot = self.bot ctx.guild = guild if payload.emoji.is_custom_emoji(): emoji = await commands.EmojiConverter().convert(ctx, str(payload.emoji.id)) else: emoji = str(payload.emoji) # If the emoji is a reaction role emoji if emoji in Config.guilds[guild]["reaction_roles"][message]["emojis"]: reaction_role = Config.guilds[guild]["reaction_roles"][message]["emojis"][emoji]["role"] roles = member.roles if reaction_role in roles: await member.remove_roles(reaction_role, reason="Reaction role removal") def setup(bot): bot.add_cog(ReactionRoles(bot))
42.477333
136
0.604621
73cb664d3f850a0e83131b0c827cd37e24b6e206
6,053
py
Python
experimenter/models/HFGpt.py
jkhouja/experimenter
f9ff40b57b17837f04263d567966ea1e4634c03a
[ "MIT" ]
null
null
null
experimenter/models/HFGpt.py
jkhouja/experimenter
f9ff40b57b17837f04263d567966ea1e4634c03a
[ "MIT" ]
null
null
null
experimenter/models/HFGpt.py
jkhouja/experimenter
f9ff40b57b17837f04263d567966ea1e4634c03a
[ "MIT" ]
1
2021-04-23T21:52:31.000Z
2021-04-23T21:52:31.000Z
import logging import torch from transformers import GPT2Model from experimenter.models.base import BaseModel class HFGpt(BaseModel): """An encoder decoder model that works for dialgoue. Do not use for LM tasks""" def __init__(self, config): super(HFGpt, self).__init__(config) args = self.args self.hidden_dim = args["hidden_dim"] self.num_layers = 1 # Needed for initalize_h() self.batch_size = config["processor"]["params"]["batch_size"] self.encoders = config["processor"]["params"]["label_encoder"] self.num_classes = config["processor"]["params"]["num_classes"] self.num_outputs = len(self.num_classes) self.teacher_enforced = args["teacher_enforced"] self.in_seq_len = args["inp_seq_len"] self.out_seq_len = args["out_seq_len"] self.vocab_size = args["vocab_size"] self.model_name_or_path = args["model_name_or_path"] self.initializer_range = args["initializer_range"] self.logger.debug(self.args) # Shared for all input self.encoder_decoder = GPT2Model.from_pretrained(self.model_name_or_path) # For each output self.out_decoder = torch.nn.ModuleList() for i in range(self.num_outputs): clss = torch.nn.Linear(self.hidden_dim, self.num_classes[i]) # Common init way in most sota models clss.weight.data.normal_(mean=0.0, std=self.initializer_range) self.out_decoder.append(clss) # Print statistics self.initialize() def forward(self, input_batch, **kwargs): # Inputs are the the first sequence in input. We take all tokens except the last one # which will be used as a first token in decoder # inps shape: (batch_size, input_seq_len -1) inps = input_batch["inp"][0][:, :-1] # Encode using HuggingFace BERT outputs = self.encoder_decoder(input_ids=inps, use_cache=True) # outputs = self.bert_encoder(input_ids=inps) last_hidden_state = outputs.last_hidden_state past_key_values = outputs.past_key_values self.logger.debug(f"shape of last hidden: {last_hidden_state.shape}") # First version. class output only output = [] # TODO: Not working LM part for i in range(self.num_outputs): logging.debug(f"Shape of output layer for output number: {i}") # We will always use last tokens from input as the first decoder tokens last_input_tokens = input_batch["inp"][0][:, -1] # Expand to dimension (batch_size, 1) last_input_tokens = torch.unsqueeze(last_input_tokens, 1) if self.encoders[i] == "text": if self.teacher_enforced: # seq prediction task. Output for output_seq_len starting from last state teacher_labels = torch.cat( (last_input_tokens, input_batch["label"][i][:, :-1]), dim=1 ) outputs = self.encoder_decoder( teacher_labels, past_key_values=past_key_values ) lm_prediction = self.out_decoder[i](outputs.last_hidden_state) # assert teacher_labels.shape == inp_text.shape # lm_prediction = self.out_decoder[i](last_hidden_state) lm_prediction = lm_prediction.permute( 0, 2, 1 ) # batch x vocab x seq # lm_prediction = self.sm(lm_prediction) else: lm_predictions = [] # Use last input token as the first decoder token previ_token = last_input_tokens # Predict step by step using model's output at each timestep for k in range(input_batch["label"][i].shape[1]): # Run one pass outputs = self.encoder_decoder( input_ids=previ_token, past_key_values=past_key_values, use_cache=True, ) # Predict next word past_key_values = outputs.past_key_values out_probs = self.out_decoder[i](outputs.last_hidden_state) out_probs = out_probs.permute(0, 2, 1) # batch x vocab x seq previ_token = out_probs.argmax(dim=1, keepdim=False) # add words probabilities to predictions lm_predictions.append(out_probs) lm_prediction = torch.cat(lm_predictions, dim=2) logging.debug(lm_prediction.shape) output.append(lm_prediction) elif self.encoders[i] == "class": self.logger.debug(f"Device of last_state: {last_hidden_state.device}") # A single class prediction, we take the cls token but should pass that cls_output = self.out_decoder[i]( last_hidden_state[:, 0, :].squeeze() ).squeeze() # cls_output = self.sm(cls_output) # logging.info(cls_output.shape) output.append(cls_output) res = [] try: res.extend( [ s.argmax(dim=1, keepdim=False) if s.dim() > 2 else s.argmax(dim=1, keepdim=True) for s in output ] ) except IndexError: # batch_size = 1 or last batch res.extend([[s.argmax() for s in output]]) for i in range(len(res)): logging.debug(f"Shape of predictions for output number: {i}") logging.debug(res[i].shape) input_batch["out"] = output input_batch["pred"] = res input_batch["meta"] = [] return input_batch
39.822368
93
0.56154
73cbc0b8053bff46a9064d07cede055e70ebb7fb
735
py
Python
sqlalchemy_utils/types/bit.py
jd/sqlalchemy-utils
fa78e45f9bd38b46d5aface41914dad022c0099b
[ "BSD-3-Clause" ]
11
2016-09-14T21:59:55.000Z
2019-01-28T21:58:31.000Z
sqlalchemy_utils/types/bit.py
jd/sqlalchemy-utils
fa78e45f9bd38b46d5aface41914dad022c0099b
[ "BSD-3-Clause" ]
11
2019-12-26T17:21:03.000Z
2022-03-21T22:17:07.000Z
sqlalchemy_utils/types/bit.py
jd/sqlalchemy-utils
fa78e45f9bd38b46d5aface41914dad022c0099b
[ "BSD-3-Clause" ]
3
2021-03-22T14:24:40.000Z
2021-04-02T08:05:27.000Z
import sqlalchemy as sa from sqlalchemy.dialects.postgresql import BIT class BitType(sa.types.TypeDecorator): """ BitType offers way of saving BITs into database. """ impl = sa.types.BINARY def __init__(self, length=1, **kwargs): self.length = length sa.types.TypeDecorator.__init__(self, **kwargs) def load_dialect_impl(self, dialect): # Use the native BIT type for drivers that has it. if dialect.name == 'postgresql': return dialect.type_descriptor(BIT(self.length)) elif dialect.name == 'sqlite': return dialect.type_descriptor(sa.String(self.length)) else: return dialect.type_descriptor(type(self.impl)(self.length))
31.956522
72
0.661224
73cbc9be1beae615cc4ac7f41d52cad03a1e61a2
1,540
py
Python
sample_request.py
NexarDeveloper/nexar-token-py
a542397aecb84a703a81c333e0aa7bbc43673ad3
[ "MIT" ]
4
2021-07-08T20:50:21.000Z
2022-02-15T12:14:18.000Z
sample_request.py
NexarDeveloper/nexar-token-py
a542397aecb84a703a81c333e0aa7bbc43673ad3
[ "MIT" ]
null
null
null
sample_request.py
NexarDeveloper/nexar-token-py
a542397aecb84a703a81c333e0aa7bbc43673ad3
[ "MIT" ]
5
2021-07-05T16:39:31.000Z
2022-03-30T21:48:18.000Z
"""Sample request for extracting GraphQL part data.""" import argparse import json import requests import pyperclip NEXAR_URL = "https://api.nexar.com/graphql" QUERY_MPN = """query ($mpn: String!) { supSearchMpn(q: $mpn) { results { part { category { parentId id name path } mpn manufacturer { name } shortDescription descriptions { text creditString } specs { attribute { name shortname } displayValue } } } } } """ def get_part_info_from_mpn(variables, token) -> dict: """Return Nexar response for the given mpn.""" try: r = requests.post( NEXAR_URL, json={"query": QUERY_MPN, "variables": variables}, headers={"token": token}, ) data = json.loads(r.text)["data"]["supSearchMpn"] except Exception: raise Exception("Error while getting Nexar response") return data if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("mpn", help="The mpn for the part request.", type=str) args = parser.parse_args() token = pyperclip.paste() variables = {"mpn": args.mpn} response = get_part_info_from_mpn(variables, token) print(json.dumps(response, indent = 1))
24.0625
78
0.514935
73cbe4684b6ecfebf51e24544f9c80518d19d4c3
822
py
Python
quads/migrations/0001_initial.py
ChalkLab/SciFlow
5bf021007d6184402ebfe6cefc2111d99160cb69
[ "MIT" ]
1
2021-04-26T20:03:11.000Z
2021-04-26T20:03:11.000Z
quads/migrations/0001_initial.py
ChalkLab/SciFlow
5bf021007d6184402ebfe6cefc2111d99160cb69
[ "MIT" ]
17
2021-04-23T16:51:59.000Z
2021-12-13T21:17:41.000Z
quads/migrations/0001_initial.py
ChalkLab/SciFlow
5bf021007d6184402ebfe6cefc2111d99160cb69
[ "MIT" ]
null
null
null
# Generated by Django 3.2.5 on 2021-07-08 21:27 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Quads', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('sub', models.CharField(max_length=256)), ('prd', models.CharField(max_length=256)), ('obj', models.CharField(max_length=256)), ('gph', models.CharField(max_length=256)), ('updated', models.DateTimeField()), ], options={ 'db_table': 'quads', 'managed': False, }, ), ]
27.4
114
0.520681
73cc255b78fb1139c2e47bcdc435e5ca805cd1c5
1,068
py
Python
araucaria/xas/__init__.py
marcoalsina/araucaria
78039106ae27d3fdef9265503c33f33992199d8e
[ "BSD-2-Clause" ]
8
2021-07-11T22:54:21.000Z
2022-02-16T20:22:25.000Z
araucaria/xas/__init__.py
marcoalsina/araucaria
78039106ae27d3fdef9265503c33f33992199d8e
[ "BSD-2-Clause" ]
null
null
null
araucaria/xas/__init__.py
marcoalsina/araucaria
78039106ae27d3fdef9265503c33f33992199d8e
[ "BSD-2-Clause" ]
null
null
null
#!/usr/bin/python # -*- coding: utf-8 -*- """ The :mod:`~araucaria.xas` module contains the main functions to manipulate XAFS spectra. The following submodules are currently implemented: - The :mod:`~araucaria.xas.merge` module contains functions to pre-process and merge spectra. - The :mod:`~araucaria.xas.deglitch` module contains an algorithm to automatically deglitch a spectrum. - The :mod:`~araucaria.xas.normalize` module contains functions to normalize a spectrum. - The :mod:`~araucaria.xas.autobk` module contains the Autobk algorithm for background removal of a spectrum. - The :mod:`~araucaria.xas.xasft` module contains functions to perform Fourier transforms on a spectrum. - The :mod:`~araucaria.xas.xasutils` module contains utility functions to assist manipulation of spectra. """ from .merge import calibrate, align, merge from .deglitch import deglitch from .normalize import find_e0, guess_edge, pre_edge from .autobk import autobk from .xasft import ftwindow, xftf, xftr, xftf_kwin, xftr_kwin from .xasutils import etok, ktoe, get_mapped_data
48.545455
109
0.781835
73cc3e39f21bf8e3c81a2f40e6b1676fdb5503e0
8,035
py
Python
mdp_irl.py
pragupta/Inverse-Reinforcement-Learning
e7bbb7bb0ad24ebc36d9e0d4b4e6c6788229fd9c
[ "MIT" ]
null
null
null
mdp_irl.py
pragupta/Inverse-Reinforcement-Learning
e7bbb7bb0ad24ebc36d9e0d4b4e6c6788229fd9c
[ "MIT" ]
null
null
null
mdp_irl.py
pragupta/Inverse-Reinforcement-Learning
e7bbb7bb0ad24ebc36d9e0d4b4e6c6788229fd9c
[ "MIT" ]
null
null
null
""" Run inverse reinforcement learning algorithms on the objectworld MDP. Matthew Alger, 2015 matthew.alger@anu.edu.au """ import argparse import numpy as np import matplotlib.pyplot as plt from irl.value_iteration import find_policy from irl.value_iteration import value from irl.value_iteration import optimal_value def normalize(data): return (data - np.min(data)) / (np.max(data) - np.min(data)) def main(grid_size, discount, n_objects, n_colours, n_trajectories, epochs, learning_rate, start_state, wind=0.0, algo="maxnet", mdp="gridworld"): """ Run inverse reinforcement learning on the objectworld MDP. Plots the reward function. grid_size: Grid size. int. discount: MDP discount factor. float. n_objects: Number of objects. int. n_colours: Number of colours. int. n_trajectories: Number of sampled trajectories. int. epochs: Gradient descent iterations. int. learning_rate: Gradient descent learning rate. float. start_state: start location to generate trajectory from algo: IRL algo to run (Currently, support maxnet and deep_maxnet) """ sx, sy = start_state trajectory_length = 8 if mdp == "objectworld": import irl.mdp.objectworld as objectworld ow = objectworld.Objectworld(grid_size, n_objects, n_colours, wind, discount) elif mdp == "gridworld": import irl.mdp.gridworld as gridworld ow = gridworld.Gridworld(grid_size, wind, discount) ground_r = np.array([ow.reward(s) for s in range(ow.n_states)]) policy = find_policy(ow.n_states, ow.n_actions, ow.transition_probability, ground_r, ow.discount, stochastic=False) optimal_v = optimal_value(ow.n_states, ow.n_actions, ow.transition_probability, normalize(ground_r), ow.discount) trajectories = ow.generate_trajectories(n_trajectories, trajectory_length, lambda s: policy[s], random_start=True) feature_matrix = ow.feature_matrix() print("trajectories = ", trajectories.shape) print("epochs = ", epochs) print("feature_matrix.shape = ", feature_matrix.shape) print("policy.shape = ", policy.shape) # ow.plot_grid("value_{}_t{}_e{}_w{}.png".format(algo, # n_trajectories, epochs, wind), value=optimal_v) ow.plot_grid("policy_{}_t{}_e{}_w{}.png".format(algo, n_trajectories, epochs, wind), policy=policy , value=optimal_v) r = [] ground_svf = [] if algo == "maxent": import irl.maxent as maxent ground_svf = maxent.find_svf(ow.n_states, trajectories) r = maxent.irl(feature_matrix, ow.n_actions, discount, ow.transition_probability, trajectories, epochs, learning_rate) elif algo == "deep_maxnet": import irl.deep_maxent as deep_maxent l1 = l2 = 0 structure = (3, 3) r = deep_maxent.irl((feature_matrix.shape[1],) + structure, feature_matrix, ow.n_actions, discount, ow.transition_probability, trajectories, epochs, learning_rate, l1=l1, l2=l2) recovered_policy = find_policy(ow.n_states, ow.n_actions, ow.transition_probability, normalize(r), ow.discount, stochastic=False) recovered_v = value(recovered_policy, ow.n_states, ow.transition_probability, normalize(r), ow.discount) new_trajectory = ow.generate_trajectories(n_trajectories, trajectory_length, lambda s: recovered_policy[s], True, (sx, sy)) recovered_svf = maxent.find_svf(ow.n_states, new_trajectory) # ow.plot_grid("recovered_value_{}_t{}_e{}_w{}.png".format(algo, # n_trajectories, epochs, wind), # value=recovered_v) ow.plot_grid("recovered_policy_{}_t{}_e{}_w{}.png".format(algo, n_trajectories, epochs, wind), policy=recovered_policy, value=recovered_v) # print("new trajectory") # for t in new_trajectory: # for s, a, rw in t: # print (ow.int_to_point(s), ow.actions[a], rw) # print ("---------") y, x = np.mgrid[-0.5:grid_size+0.5, -0.5:grid_size+0.5] plt.subplot(111) plt.pcolor(x, y, ground_svf.reshape((grid_size, grid_size))) plt.colorbar() plt.title("Groundtruth SVF") plt.savefig("ground_svf_{}_t{}_e{}_w{}.png".format(algo, n_trajectories, epochs, wind), format="png", dpi=150) plt.pcolor(x, y, recovered_svf.reshape((grid_size, grid_size))) plt.title("Recovered SVF") plt.savefig("recovered_svf_{}_t{}_e{}_w{}.png".format(algo, n_trajectories, epochs, wind), format="png", dpi=150) plt.pcolor(x, y, normalize(ground_r).reshape((grid_size, grid_size))) plt.title("Groundtruth reward") plt.savefig("ground_reward_{}_t{}_e{}_w{}.png".format(algo, n_trajectories, epochs, wind), format="png", dpi=150) plt.pcolor(x, y, normalize(r).reshape((grid_size, grid_size))) plt.title("Recovered reward") plt.savefig("recovered_reward_{}_t{}_e{}_w{}.png".format(algo, n_trajectories, epochs, wind), format="png", dpi=150) if __name__ == '__main__': parser = argparse.ArgumentParser(description='IRL - maxnet') parser.add_argument('--grid_size', dest='grid_size', type=int, default=10, help='size of the grid') parser.add_argument('--discount', dest='discount', type=float, default=0.9, help='discount factor') parser.add_argument('--n_objects', dest='n_objects', type=int, default=15, help='Number of objects to place on the grid') parser.add_argument('--n_colors', dest='n_colors', default=2, type=int, help='Number of different colors to use for objects on the grid') parser.add_argument('--n_trajectories', dest='n_trajectories', default=20, type=int, help='Number of trajectories to generate as input to IRL') parser.add_argument('--epochs', dest='epochs', default=50, type=int, help='Number of gradient descent steps') parser.add_argument('--lr', dest='lr', default=0.01, type=float, help='Gradient descent learning rate') parser.add_argument('--sx', dest='sx', default=0, type=int, help='x-value for the start state') parser.add_argument('--sy', dest='sy', default=0, type=int, help='x-value for the start state') parser.add_argument('--wind', dest='wind', default=0, type=float, help='randomness in expert behavior') parser.add_argument('--algo', dest='algo', default="maxent", type=str, help='IRL algo to run') parser.add_argument('--mdp', dest='mdp', default="gridworld", type=str, help='MDP problem to solve. Currently, only support gridworld and objectworld') args = parser.parse_args() main(args.grid_size, args.discount, args.n_objects, args.n_colors, args.n_trajectories, args.epochs, args.lr, (args.sx, args.sy), args.wind, args.algo, args.mdp)
40.175
96
0.574487
73cc3e427af50ea579c40ea2089d7dcafb5b716e
7,260
py
Python
directions/base.py
jwass/directions.py
c3734e4cb499fe80e27b2a26575a91ac4e834e95
[ "BSD-3-Clause" ]
11
2015-01-20T19:48:32.000Z
2019-04-27T07:51:03.000Z
directions/base.py
jwass/directions.py
c3734e4cb499fe80e27b2a26575a91ac4e834e95
[ "BSD-3-Clause" ]
2
2016-05-24T18:04:48.000Z
2016-06-03T00:48:32.000Z
directions/base.py
jwass/directions.py
c3734e4cb499fe80e27b2a26575a91ac4e834e95
[ "BSD-3-Clause" ]
11
2015-03-29T19:44:21.000Z
2021-07-01T19:59:08.000Z
import time class Waypoint: VIA = 1 STOP = 2 class Router: def __init__(self, name=None, rate_limit_dt=0): # Just a simple identifier if name is None: self.name = self.default_name # The min time delta in seconds between queries self._rate_limit_dt = rate_limit_dt # The time of the last query, None if it hasn't been hit yet self._last_query = None def raw_query(self, waypoints, **kwargs): return NotImplementedError() def rate_limit_wait(self): """ Sleep if rate limiting is required based on current time and last query. """ if self._rate_limit_dt and self._last_query is not None: dt = time.time() - self._last_query wait = self._rate_limit_dt - dt if wait > 0: time.sleep(wait) def format_output(self, data): return NotImplementedError() def route(self, arg, destination=None, waypoints=None, raw=False, **kwargs): """ Query a route. route(locations): points can be - a sequence of locations - a Shapely LineString route(origin, destination, waypoints=None) - origin and destination are a single destination - waypoints are the points to be inserted between the origin and destination If waypoints is specified, destination must also be specified Each location can be: - string (will be geocoded by the routing provider. Not all providers accept this as input) - (longitude, latitude) sequence (tuple, list, numpy array, etc.) - Shapely Point with x as longitude, y as latitude Additional parameters --------------------- raw : bool, default False Return the raw json dict response from the service Returns ------- list of Route objects If raw is True, returns the json dict instead of converting to Route objects Examples -------- mq = directions.Mapquest(key) routes = mq.route('1 magazine st. cambridge, ma', 'south station boston, ma') routes = mq.route('1 magazine st. cambridge, ma', 'south station boston, ma', waypoints=['700 commonwealth ave. boston, ma']) # Uses each point in the line as a waypoint. There is a limit to the # number of waypoints for each service. Consult the docs. line = LineString(...) routes = mq.route(line) # Feel free to mix different location types routes = mq.route(line.coords[0], 'south station boston, ma', waypoints=[(-71.103972, 42.349324)]) """ points = _parse_points(arg, destination, waypoints) if len(points) < 2: raise ValueError('You must specify at least 2 points') self.rate_limit_wait() data = self.raw_query(points, **kwargs) self._last_query = time.time() if raw: return data return self.format_output(data) def _parse_points(arg, destination=None, waypoints=None): # If destination is None, then arg is all the waypoints if destination is None: # waypoints must be None if waypoints is not None: raise ValueError('Cannot specify waypoints without destination') p = arg else: # arg is origin if waypoints is None: p = [arg, destination] else: p = [arg] + waypoints + [destination] points = _waypoints(p) return points def _waypoints(waypoints): if hasattr(waypoints, 'coords'): waypoints = waypoints.coords points = [] for wp in waypoints: if isinstance(wp, str): p = wp elif hasattr(wp, 'coords'): coords = wp.coords if len(coords) != 1: raise ValueError('Non-point like object used in waypoints') p = coords[0] elif len(wp) == 2: p = wp else: raise ValueError('Non 2-tuple used in waypoints') points.append(p) return points class Route: def __init__(self, coords, distance, duration, maneuvers=None, **kwargs): """ Simple class to represent a single returned route Parameters ---------- coords : sequence of (lon, lat) coordinates distance : length in meters of the route duration : estimated duration of the route in seconds kwargs : additional properties when converting to geojson """ self.coords = coords self.distance = distance self.duration = duration self.properties = kwargs.copy() if maneuvers is None: maneuvers = [] self.maneuvers = maneuvers @property def __geo_interface__(self): geom = {'type': 'LineString', 'coordinates': self.coords} properties = self.properties.copy() properties.update({'distance': self.distance, 'duration': self.duration}) f = {'type': 'Feature', 'geometry': geom, 'properties': properties} return f def geojson(self, include_maneuvers=True): if include_maneuvers: features = [self] + self.maneuvers else: features = [self] properties = self.properties.copy() properties.update({'distance': self.distance, 'duration': self.duration}) return {'type': 'FeatureCollection', 'properties': properties, 'features': [f.__geo_interface__ for f in features]} @classmethod def from_geojson(cls, data): """ Return a Route from a GeoJSON dictionary, as returned by Route.geojson() """ properties = data['properties'] distance = properties.pop('distance') duration = properties.pop('duration') maneuvers = [] for feature in data['features']: geom = feature['geometry'] if geom['type'] == 'LineString': coords = geom['coordinates'] else: maneuvers.append(Maneuver.from_geojson(feature)) return Route(coords, distance, duration, maneuvers, **properties) class Maneuver: def __init__(self, coords, **kwargs): """ Simple class to represent a maneuver. Todo: Add some remaining fields like maneuver text, type, etc. """ self.coords = coords self.properties = kwargs.copy() @property def __geo_interface__(self): geom = {'type': 'Point', 'coordinates': self.coords} f = {'type': 'Feature', 'geometry': geom, 'properties': self.properties} return f @classmethod def from_geojson(cls, data): """ Return a Maneuver from a GeoJSON dictionary """ coords = data['geometry']['coordinates'] return Maneuver(coords, **data['properties'])
29.876543
80
0.566391
73cc6246ab39c66c0920dbed09a8c00383fb278e
5,653
py
Python
openpype/hosts/maya/api/shader_definition_editor.py
yosuperdope/OpenPype
0c90df97ddb8cda291a4f66d35da58b3deb94a71
[ "MIT" ]
null
null
null
openpype/hosts/maya/api/shader_definition_editor.py
yosuperdope/OpenPype
0c90df97ddb8cda291a4f66d35da58b3deb94a71
[ "MIT" ]
null
null
null
openpype/hosts/maya/api/shader_definition_editor.py
yosuperdope/OpenPype
0c90df97ddb8cda291a4f66d35da58b3deb94a71
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """Editor for shader definitions. Shader names are stored as simple text file over GridFS in mongodb. """ import os from Qt import QtWidgets, QtCore, QtGui from openpype.lib.mongo import OpenPypeMongoConnection from openpype import resources import gridfs DEFINITION_FILENAME = "{}/maya/shader_definition.txt".format( os.getenv("AVALON_PROJECT")) class ShaderDefinitionsEditor(QtWidgets.QWidget): """Widget serving as simple editor for shader name definitions.""" # name of the file used to store definitions def __init__(self, parent=None): super(ShaderDefinitionsEditor, self).__init__(parent) self._mongo = OpenPypeMongoConnection.get_mongo_client() self._gridfs = gridfs.GridFS( self._mongo[os.getenv("OPENPYPE_DATABASE_NAME")]) self._editor = None self._original_content = self._read_definition_file() self.setObjectName("shaderDefinitionEditor") self.setWindowTitle("OpenPype shader name definition editor") icon = QtGui.QIcon(resources.pype_icon_filepath()) self.setWindowIcon(icon) self.setWindowFlags(QtCore.Qt.Window) self.setParent(parent) self.setAttribute(QtCore.Qt.WA_DeleteOnClose) self.resize(750, 500) self._setup_ui() self._reload() def _setup_ui(self): """Setup UI of Widget.""" layout = QtWidgets.QVBoxLayout(self) label = QtWidgets.QLabel() label.setText("Put shader names here - one name per line:") layout.addWidget(label) self._editor = QtWidgets.QPlainTextEdit() self._editor.setStyleSheet("border: none;") layout.addWidget(self._editor) btn_layout = QtWidgets.QHBoxLayout() save_btn = QtWidgets.QPushButton("Save") save_btn.clicked.connect(self._save) reload_btn = QtWidgets.QPushButton("Reload") reload_btn.clicked.connect(self._reload) exit_btn = QtWidgets.QPushButton("Exit") exit_btn.clicked.connect(self._close) btn_layout.addWidget(reload_btn) btn_layout.addWidget(save_btn) btn_layout.addWidget(exit_btn) layout.addLayout(btn_layout) def _read_definition_file(self, file=None): """Read definition file from database. Args: file (gridfs.grid_file.GridOut, Optional): File to read. If not set, new query will be issued to find it. Returns: str: Content of the file or empty string if file doesn't exist. """ content = "" if not file: file = self._gridfs.find_one( {"filename": DEFINITION_FILENAME}) if not file: print(">>> [SNDE]: nothing in database yet") return content content = file.read() file.close() return content def _write_definition_file(self, content, force=False): """Write content as definition to file in database. Before file is writen, check is made if its content has not changed. If is changed, warning is issued to user if he wants it to overwrite. Note: GridFs doesn't allow changing file content. You need to delete existing file and create new one. Args: content (str): Content to write. Raises: ContentException: If file is changed in database while editor is running. """ file = self._gridfs.find_one( {"filename": DEFINITION_FILENAME}) if file: content_check = self._read_definition_file(file) if content == content_check: print(">>> [SNDE]: content not changed") return if self._original_content != content_check: if not force: raise ContentException("Content changed") print(">>> [SNDE]: overwriting data") file.close() self._gridfs.delete(file._id) file = self._gridfs.new_file( filename=DEFINITION_FILENAME, content_type='text/plain', encoding='utf-8') file.write(content) file.close() QtCore.QTimer.singleShot(200, self._reset_style) self._editor.setStyleSheet("border: 1px solid #33AF65;") self._original_content = content def _reset_style(self): """Reset editor style back. Used to visually indicate save. """ self._editor.setStyleSheet("border: none;") def _close(self): self.hide() def closeEvent(self, event): event.ignore() self.hide() def _reload(self): print(">>> [SNDE]: reloading") self._set_content(self._read_definition_file()) def _save(self): try: self._write_definition_file(content=self._editor.toPlainText()) except ContentException: # content has changed meanwhile print(">>> [SNDE]: content has changed") self._show_overwrite_warning() def _set_content(self, content): self._editor.setPlainText(content) def _show_overwrite_warning(self): reply = QtWidgets.QMessageBox.question( self, "Warning", ("Content you are editing was changed meanwhile in database.\n" "Please, reload and solve the conflict."), QtWidgets.QMessageBox.OK) if reply == QtWidgets.QMessageBox.OK: # do nothing pass class ContentException(Exception): """This is risen during save if file is changed in database.""" pass
31.937853
75
0.623563
73cc9917145f90f3a2b49987a2a2e7d0d2ca982b
9,785
py
Python
src/falconpy/sensor_visibility_exclusions.py
CrowdStrike/falconpy
e7245202224647a2c8d134e72f27d2f6c667a1ce
[ "Unlicense" ]
111
2020-11-19T00:44:18.000Z
2022-03-03T21:02:32.000Z
src/falconpy/sensor_visibility_exclusions.py
CrowdStrike/falconpy
e7245202224647a2c8d134e72f27d2f6c667a1ce
[ "Unlicense" ]
227
2020-12-05T03:02:27.000Z
2022-03-22T14:12:42.000Z
src/falconpy/sensor_visibility_exclusions.py
CrowdStrike/falconpy
e7245202224647a2c8d134e72f27d2f6c667a1ce
[ "Unlicense" ]
47
2020-11-23T21:00:14.000Z
2022-03-28T18:30:19.000Z
"""Falcon Sensor Visibility Exclusions API Interface Class _______ __ _______ __ __ __ | _ .----.-----.--.--.--.--| | _ | |_.----|__| |--.-----. |. 1___| _| _ | | | | _ | 1___| _| _| | <| -__| |. |___|__| |_____|________|_____|____ |____|__| |__|__|__|_____| |: 1 | |: 1 | |::.. . | CROWDSTRIKE FALCON |::.. . | FalconPy `-------' `-------' OAuth2 API - Customer SDK This is free and unencumbered software released into the public domain. Anyone is free to copy, modify, publish, use, compile, sell, or distribute this software, either in source code form or as a compiled binary, for any purpose, commercial or non-commercial, and by any means. In jurisdictions that recognize copyright laws, the author or authors of this software dedicate any and all copyright interest in the software to the public domain. We make this dedication for the benefit of the public at large and to the detriment of our heirs and successors. We intend this dedication to be an overt act of relinquishment in perpetuity of all present and future rights to this software under copyright law. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. For more information, please refer to <https://unlicense.org> """ from ._util import force_default, process_service_request, handle_single_argument from ._payload import exclusion_payload from ._service_class import ServiceClass from ._endpoint._sensor_visibility_exclusions import _sensor_visibility_exclusions_endpoints as Endpoints class SensorVisibilityExclusions(ServiceClass): """The only requirement to instantiate an instance of this class is one of the following: - a valid client_id and client_secret provided as keywords. - a credential dictionary with client_id and client_secret containing valid API credentials { "client_id": "CLIENT_ID_HERE", "client_secret": "CLIENT_SECRET_HERE" } - a previously-authenticated instance of the authentication service class (oauth2.py) - a valid token provided by the authentication service class (OAuth2.token()) """ @force_default(defaults=["parameters"], default_types=["dict"]) def get_exclusions(self: object, *args, parameters: dict = None, **kwargs) -> dict: """Get a set of Sensor Visibility Exclusions by specifying their IDs Keyword arguments: ids -- List of exclusion IDs to retrieve. String or list of strings. parameters -- full parameters payload, not required if ids is provided as a keyword. Arguments: When not specified, the first argument to this method is assumed to be 'ids'. All others are ignored. Returns: dict object containing API response. HTTP Method: GET Swagger URL /sensor-visibility-exclusions/getSensorVisibilityExclusionsV1 """ return process_service_request( calling_object=self, endpoints=Endpoints, operation_id="getSensorVisibilityExclusionsV1", keywords=kwargs, params=handle_single_argument(args, parameters, "ids") ) @force_default(defaults=["body"], default_types=["dict"]) def create_exclusions(self: object, body: dict = None, **kwargs) -> dict: """Create the Sensor Visibility exclusions Keyword arguments: body -- full body payload, not required when ids keyword is provided. { "comment": "string", "groups": [ "string" ], "value": "string" } comment -- String comment describing why the exclusion is entered. groups -- Group IDs to exclude. List of strings. value -- Value to exclude. String This method only supports keywords for providing arguments. Returns: dict object containing API response. HTTP Method: POST Swagger URL /sensor-visibility-exclusions/createSVExclusionsV1 """ if not body: body = exclusion_payload(passed_keywords=kwargs) return process_service_request( calling_object=self, endpoints=Endpoints, operation_id="createSVExclusionsV1", body=body ) @force_default(defaults=["parameters"], default_types=["dict"]) def delete_exclusions(self: object, *args, parameters: dict = None, **kwargs) -> dict: """Delete the Sensor Visibility exclusions by ID. Keyword arguments: comment -- Explains why this exclusions was deleted. String. ids -- List of exclusion IDs to delete. String or list of strings. parameters -- full parameters payload, not required if ids is provided as a keyword. Arguments: When not specified, the first argument to this method is assumed to be 'ids'. All others are ignored. Returns: dict object containing API response. HTTP Method: DELETE Swagger URL /sensor-visibility-exclusions/deleteSensorVisibilityExclusionsV1 """ return process_service_request( calling_object=self, endpoints=Endpoints, operation_id="deleteSensorVisibilityExclusionsV1", keywords=kwargs, params=handle_single_argument(args, parameters, "ids") ) @force_default(defaults=["body"], default_types=["dict"]) def update_exclusions(self: object, body: dict = None, **kwargs) -> dict: """Update the Sensor Visibility Exclusions Keyword arguments: body -- full body payload, not required when ids keyword is provided. { "comment": "string", "groups": [ "string" ], "id": "string", "value": "string" } comment -- String comment describing why the exclusion is entered. groups -- Group IDs to exclude. List of strings. id -- Exclusion ID to update. String. value -- Value to exclude. String This method only supports keywords for providing arguments. Returns: dict object containing API response. HTTP Method: POST Swagger URL /sensor-visibility-exclusions/updateSensorVisibilityExclusionsV1 """ if not body: body = exclusion_payload(passed_keywords=kwargs) if kwargs.get("id", None): body["id"] = kwargs.get("id", None) return process_service_request( calling_object=self, endpoints=Endpoints, operation_id="updateSensorVisibilityExclusionsV1", body=body ) @force_default(defaults=["parameters"], default_types=["dict"]) def query_exclusions(self: object, parameters: dict = None, **kwargs) -> dict: """Search for Sensor Visibility Exclusions. Keyword arguments: filter -- The filter expression that should be used to limit the results. FQL syntax. An asterisk wildcard '*' includes all results. AVAILABLE FILTERS applied_globally last_modified created_by modified_by created_on value limit -- The maximum number of detections to return in this response. [Integer, default: 100; max: 500] Use with the offset parameter to manage pagination of results. offset -- The first detection to return, where 0 is the latest detection. Use with the limit parameter to manage pagination of results. parameters - full parameters payload, not required if using other keywords. sort -- The property to sort by. FQL syntax (e.g. last_behavior|asc). Available sort fields: applied_globally last_modified created_by modified_by created_on value This method only supports keywords for providing arguments. Returns: dict object containing API response. HTTP Method: GET Swagger URL /sensor-visibility-exclusions/querySensorVisibilityExclusionsV1 """ return process_service_request( calling_object=self, endpoints=Endpoints, operation_id="querySensorVisibilityExclusionsV1", keywords=kwargs, params=parameters ) # These method names align to the operation IDs in the API but # do not conform to snake_case / PEP8 and are defined here for # backwards compatibility / ease of use purposes getSensorVisibilityExclusionsV1 = get_exclusions createSVExclusionsV1 = create_exclusions deleteSensorVisibilityExclusionsV1 = delete_exclusions updateSensorVisibilityExclusionsV1 = update_exclusions querySensorVisibilityExclusionsV1 = query_exclusions # The legacy name for this class does not conform to PascalCase / PEP8 # It is defined here for backwards compatibility purposes only. Sensor_Visibility_Exclusions = SensorVisibilityExclusions # pylint: disable=C0103
40.941423
105
0.640879
73ccbe73506b953dc6a6f39e526c71e758ea6af3
8,681
py
Python
quest/pycocoevalcap/bleu/bleu_scorer.py
shuokabe/deepQuest-mod
7140a57c30deedb0570bc835c6ad3c848f0039f4
[ "BSD-3-Clause" ]
2
2021-09-28T02:26:46.000Z
2021-09-28T04:47:55.000Z
pycocoevalcap/bleu/bleu_scorer.py
ruizhang-ai/GCP
7a0f30c6c3d732627fa269ce943c62a9005cc40f
[ "MIT" ]
null
null
null
pycocoevalcap/bleu/bleu_scorer.py
ruizhang-ai/GCP
7a0f30c6c3d732627fa269ce943c62a9005cc40f
[ "MIT" ]
null
null
null
#!/usr/bin/env python # bleu_scorer.py # David Chiang <chiang@isi.edu> # Copyright (c) 2004-2006 University of Maryland. All rights # reserved. Do not redistribute without permission from the # author. Not for commercial use. # Modified by: # Hao Fang <hfang@uw.edu> # Tsung-Yi Lin <tl483@cornell.edu> '''Provides: cook_refs(refs, n=4): Transform a list of reference hypothesis as strings into a form usable by cook_test(). cook_test(test, refs, n=4): Transform a test sentence as a string (together with the cooked reference hypothesis) into a form usable by score_cooked(). ''' import copy import math from collections import defaultdict def precook(s, n=4, out=False): """Takes a string as input and returns an object that can be given to either cook_refs or cook_test. This is optional: cook_refs and cook_test can take string arguments as well.""" words = s.split() counts = defaultdict(int) for k in xrange(1, n + 1): for i in xrange(len(words) - k + 1): ngram = tuple(words[i:i + k]) counts[ngram] += 1 return (len(words), counts) def cook_refs(refs, eff=None, n=4): ## lhuang: oracle will call with "average" '''Takes a list of reference hypothesis for a single segment and returns an object that encapsulates everything that BLEU needs to know about them.''' reflen = [] maxcounts = {} for ref in refs: rl, counts = precook(ref, n) reflen.append(rl) for (ngram, count) in counts.iteritems(): maxcounts[ngram] = max(maxcounts.get(ngram, 0), count) # Calculate effective reference sentence length. if eff == "shortest": reflen = min(reflen) elif eff == "average": reflen = float(sum(reflen)) / len(reflen) ## lhuang: N.B.: leave reflen computaiton to the very end!! ## lhuang: N.B.: in case of "closest", keep a list of reflens!! (bad design) return (reflen, maxcounts) def cook_test(test, (reflen, refmaxcounts), eff=None, n=4): '''Takes a test sentence and returns an object that encapsulates everything that BLEU needs to know about it.''' testlen, counts = precook(test, n, True) result = {} # Calculate effective reference sentence length. if eff == "closest": result["reflen"] = min((abs(l - testlen), l) for l in reflen)[1] else: ## i.e., "average" or "shortest" or None result["reflen"] = reflen result["testlen"] = testlen result["guess"] = [max(0, testlen - k + 1) for k in xrange(1, n + 1)] result['correct'] = [0] * n for (ngram, count) in counts.iteritems(): result["correct"][len(ngram) - 1] += min(refmaxcounts.get(ngram, 0), count) return result class BleuScorer(object): """Bleu scorer. """ __slots__ = "n", "crefs", "ctest", "_score", "_ratio", "_testlen", "_reflen", "special_reflen" # special_reflen is used in oracle (proportional effective ref len for a node). def copy(self): ''' copy the refs.''' new = BleuScorer(n=self.n) new.ctest = copy.copy(self.ctest) new.crefs = copy.copy(self.crefs) new._score = None return new def __init__(self, test=None, refs=None, n=4, special_reflen=None): ''' singular instance ''' self.n = n self.crefs = [] self.ctest = [] self.cook_append(test, refs) self.special_reflen = special_reflen def cook_append(self, test, refs): '''called by constructor and __iadd__ to avoid creating new instances.''' if refs is not None: self.crefs.append(cook_refs(refs)) if test is not None: cooked_test = cook_test(test, self.crefs[-1]) self.ctest.append(cooked_test) ## N.B.: -1 else: self.ctest.append(None) # lens of crefs and ctest have to match self._score = None ## need to recompute def ratio(self, option=None): self.compute_score(option=option) return self._ratio def score_ratio(self, option=None): '''return (bleu, len_ratio) pair''' return (self.fscore(option=option), self.ratio(option=option)) def score_ratio_str(self, option=None): return "%.4f (%.2f)" % self.score_ratio(option) def reflen(self, option=None): self.compute_score(option=option) return self._reflen def testlen(self, option=None): self.compute_score(option=option) return self._testlen def retest(self, new_test): if type(new_test) is str: new_test = [new_test] assert len(new_test) == len(self.crefs), new_test self.ctest = [] for t, rs in zip(new_test, self.crefs): self.ctest.append(cook_test(t, rs)) self._score = None return self def rescore(self, new_test): ''' replace test(s) with new test(s), and returns the new score.''' return self.retest(new_test).compute_score() def size(self): assert len(self.crefs) == len(self.ctest), "refs/test mismatch! %d<>%d" % (len(self.crefs), len(self.ctest)) return len(self.crefs) def __iadd__(self, other): '''add an instance (e.g., from another sentence).''' if type(other) is tuple: ## avoid creating new BleuScorer instances self.cook_append(other[0], other[1]) else: assert self.compatible(other), "incompatible BLEUs." self.ctest.extend(other.ctest) self.crefs.extend(other.crefs) self._score = None ## need to recompute return self def compatible(self, other): return isinstance(other, BleuScorer) and self.n == other.n def single_reflen(self, option="average"): return self._single_reflen(self.crefs[0][0], option) def _single_reflen(self, reflens, option=None, testlen=None): if option == "shortest": reflen = min(reflens) elif option == "average": reflen = float(sum(reflens)) / len(reflens) elif option == "closest": reflen = min((abs(l - testlen), l) for l in reflens)[1] else: assert False, "unsupported reflen option %s" % option return reflen def recompute_score(self, option=None, verbose=0): self._score = None return self.compute_score(option, verbose) def compute_score(self, option=None, verbose=0): n = self.n small = 1e-9 tiny = 1e-15 ## so that if guess is 0 still return 0 bleu_list = [[] for _ in range(n)] if self._score is not None: return self._score if option is None: option = "average" if len(self.crefs) == 1 else "closest" self._testlen = 0 self._reflen = 0 totalcomps = {'testlen': 0, 'reflen': 0, 'guess': [0] * n, 'correct': [0] * n} # for each sentence for comps in self.ctest: testlen = comps['testlen'] self._testlen += testlen if self.special_reflen is None: ## need computation reflen = self._single_reflen(comps['reflen'], option, testlen) else: reflen = self.special_reflen self._reflen += reflen for key in ['guess', 'correct']: for k in xrange(n): totalcomps[key][k] += comps[key][k] # append per image bleu score bleu = 1. for k in xrange(n): bleu *= (float(comps['correct'][k]) + tiny) \ / (float(comps['guess'][k]) + small) bleu_list[k].append(bleu ** (1. / (k + 1))) ratio = (testlen + tiny) / (reflen + small) ## N.B.: avoid zero division if ratio < 1: for k in xrange(n): bleu_list[k][-1] *= math.exp(1 - 1 / ratio) if verbose > 1: print comps, reflen totalcomps['reflen'] = self._reflen totalcomps['testlen'] = self._testlen bleus = [] bleu = 1. for k in xrange(n): bleu *= float(totalcomps['correct'][k] + tiny) \ / (totalcomps['guess'][k] + small) bleus.append(bleu ** (1. / (k + 1))) ratio = (self._testlen + tiny) / (self._reflen + small) ## N.B.: avoid zero division if ratio < 1: for k in xrange(n): bleus[k] *= math.exp(1 - 1 / ratio) if verbose > 1: print totalcomps print "ratio:", ratio self._score = bleus return self._score, bleu_list
32.271375
151
0.581615
73ccd87e7f6f4b65cb5f846083b5204e0de31b1a
6,078
py
Python
GeeProxy/validators/items_vaildate.py
geebytes/GeeProxy
6f2f57ef1e1e8ea9a295cf987577dab5f1cadfe5
[ "Apache-2.0" ]
2
2020-10-12T05:31:36.000Z
2020-11-30T07:39:06.000Z
GeeProxy/validators/items_vaildate.py
geebytes/GeeProxy
6f2f57ef1e1e8ea9a295cf987577dab5f1cadfe5
[ "Apache-2.0" ]
1
2021-04-19T11:14:59.000Z
2021-04-19T11:14:59.000Z
GeeProxy/validators/items_vaildate.py
geebytes/GeeProxy
6f2f57ef1e1e8ea9a295cf987577dab5f1cadfe5
[ "Apache-2.0" ]
1
2021-09-30T04:36:10.000Z
2021-09-30T04:36:10.000Z
""" @Author: qinzhonghe96@163.com @Date: 2020-03-09 16:48:20 @LastEditors: qinzhonghe96@163.com @LastEditTime: 2020-03-10 03:45:46 @Description: 校验爬取到的代理数据项的可用性和匿名程度 """ import time import asyncio import threading from GeeProxy.utils.redis_cli import client from GeeProxy.utils.logger import item_logger from GeeProxy.validators.validators import ProxyValidator from GeeProxy.client.client import AvailableProxy from GeeProxy.utils.tools import get_vaildator_task from GeeProxy.settings import ITEM_VAILDATE_SET class ItemAvalibleVaildate: """ 批量校验数据项,先校验匿名程度,如果该过程发生请求超时等异常就标记为不可用, 并跳过对特定站点校验的流程程,否则进入特定站点的校验流程 """ def __init__(self, items: list): self._items = items def append(self, item: dict): """ 添加数据项 :param item: 数据项 :return: """ self._items.append(item) async def start_check(self): """ 开始校验 :return: """ if not self._items: return try: # 构建任务列表 tasks = [asyncio.ensure_future(self._process_item(item))for item in self._items] # 异步处理 await asyncio.gather(*tasks) except Exception as e: item_logger.error("While start check proxy anonymous" " occurred an {} exception {}.".format(type(e), str(e))) async def _process_item(self, item: dict): """ 处理数据项 :param item: 单个数据项 :return: """ if not item: return item # 先校验匿名程度 result = await self._check_anonymous(item) # 是否可用 if result.get("available", ""): # 检测代理对所有特定站点的可用性 check_result = await self._check_item(item["url"]) for r in check_result: # 如果代理可用就入库 if r.available: item_logger.info("Add proxy {} to cache.".format( item["url"])) r = await AvailableProxy.add_proxy(r, item) if r: # 入库成功就删除临时记录 client.delete(item["url"]) return item async def _check_anonymous(self, item: dict)-> dict: """ 检测代理的匿名程度,在请求检测接口的过程中如果超时或发生其他异常就认为代理不可用 :param item: 待检测的数据项 :return: 返回检测后的数据项 """ if not item: return item item["available"] = 0 try: # 先判断一下库中有没有这条代理如果有就跳过 if not AvailableProxy.proxy_exist(item["url"]): item_logger.info("Checking proxy {} anonymous.".format(item["url"])) try: result = await ProxyValidator.check_anonymous(item["url"]) item["anonymous"] = int(result) item["available"] = 1 except Exception as e: item_logger.error( "While check proxy {} anonymous occurred an {} exception {}." .format(item["url"], type(e), str(e))) # 发生异常就直接删除临时记录 client.delete(item["url"]) # 代理标记为不可用 item["available"] = 0 except Exception as e: item_logger.error( "While check proxy {} anonymous occurred an {} exception {}.". format(item, type(e), str(e))) return item async def _check_item(self, proxy: str) -> list: """ 校验代理对可用性 :param proxy: 待校验的代理 :return: 结果列表 """ result = [] # 拿到代理校验任务列表 tasks = get_vaildator_task(proxy) # 批量处理 done = await asyncio.gather(*tasks) for d in done: check_result = d if check_result.available: result.append(check_result) return result def start_loop(loop): """ 设置当前线程的循环事件 :param loop: 循环事件 :return: """ asyncio.set_event_loop(loop) loop.run_forever() def item_vaildator_runner(): """ 采用多线程+协程的方式执行校验任务提高任务执行效率, 每批启用5个线程,每个线程启用5个携程, 分批次按每批5*5的数据量从待校验集合中读取数据。 """ pipe = client.pipeline() while True: try: items_number = client.scard(ITEM_VAILDATE_SET) if items_number: # 在当前线程下创建时间循环,(未启用),在start_loop里面启动它 new_loop = asyncio.new_event_loop() # 通过当前线程开启新的线程去启动事件循环 thread = threading.Thread(target=new_loop.run_forever) # 获取当前循环事件 loop = asyncio.get_event_loop() futs = [] thread.start() # 启用5个线程 try: for k in range(0, 5): # 读取数据 for i in range(0, 5): pipe.spop(ITEM_VAILDATE_SET) urls = list(pipe.execute()) items = [] for url in urls: if url is not None: pipe.hgetall(url) items = list(pipe.execute()) item_vaildator = ItemAvalibleVaildate(items=items) # 这几个是关键,代表在新线程中事件循环不断“游走”执行 futs.append( asyncio.run_coroutine_threadsafe( item_vaildator.start_check(), loop=new_loop)) except Exception as e: item_logger.error( "While create check proxies anonymous tasks occurred an {} exception {}." .format(type(e), str(e))) futs = [asyncio.wrap_future(f, loop=loop) for f in futs] loop.run_until_complete(asyncio.wait(futs)) new_loop.call_soon_threadsafe(new_loop.stop) thread.join() time.sleep(1) except Exception as e: item_logger.error( "While check proxies anonymous occurred an {} exception {}.". format(type(e), str(e)))
31.329897
97
0.51234
73ccdec5d7ccfcb9edc410306476089de41cd8d3
765
py
Python
google/cloud/pubsublite_v1/services/cursor_service/__init__.py
LaudateCorpus1/python-pubsublite
f0d65ca7b1efec9eae2dda1481c4bfe978eaa2dd
[ "Apache-2.0" ]
15
2020-11-10T15:36:52.000Z
2022-03-06T15:00:25.000Z
google/cloud/pubsublite_v1/services/cursor_service/__init__.py
LaudateCorpus1/python-pubsublite
f0d65ca7b1efec9eae2dda1481c4bfe978eaa2dd
[ "Apache-2.0" ]
110
2020-11-11T18:14:31.000Z
2022-03-30T22:42:17.000Z
google/cloud/pubsublite_v1/services/cursor_service/__init__.py
LaudateCorpus1/python-pubsublite
f0d65ca7b1efec9eae2dda1481c4bfe978eaa2dd
[ "Apache-2.0" ]
6
2020-11-13T19:24:27.000Z
2022-01-29T08:13:14.000Z
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from .client import CursorServiceClient from .async_client import CursorServiceAsyncClient __all__ = ( "CursorServiceClient", "CursorServiceAsyncClient", )
33.26087
74
0.756863
73cd0288a1b9e9ced9032f4aaf6cd91802d6adbd
511
py
Python
content/test/gpu/page_sets/__init__.py
Wzzzx/chromium-crosswalk
768dde8efa71169f1c1113ca6ef322f1e8c9e7de
[ "BSD-3-Clause-No-Nuclear-License-2014", "BSD-3-Clause" ]
2
2019-01-28T08:09:58.000Z
2021-11-15T15:32:10.000Z
content/test/gpu/page_sets/__init__.py
maidiHaitai/haitaibrowser
a232a56bcfb177913a14210e7733e0ea83a6b18d
[ "BSD-3-Clause" ]
null
null
null
content/test/gpu/page_sets/__init__.py
maidiHaitai/haitaibrowser
a232a56bcfb177913a14210e7733e0ea83a6b18d
[ "BSD-3-Clause" ]
6
2020-09-23T08:56:12.000Z
2021-11-18T03:40:49.000Z
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os import sys from telemetry.core import discover from telemetry.story import story_set from page_sets.gpu_process_tests import GpuProcessTestsStorySet from page_sets.gpu_rasterization_tests import GpuRasterizationTestsStorySet from page_sets.memory_tests import MemoryTestsStorySet from page_sets.pixel_tests import PixelTestsStorySet
36.5
75
0.851272
73cd08ef893764cef0eef53f936bc6f9ac62dc52
78,688
py
Python
sympy/utilities/runtests.py
lemmalearning/sympy
62ad387ed3f7b243c889dd342296afc9a32ad1ea
[ "BSD-3-Clause" ]
1
2015-08-31T06:55:47.000Z
2015-08-31T06:55:47.000Z
sympy/utilities/runtests.py
lemmalearning/sympy
62ad387ed3f7b243c889dd342296afc9a32ad1ea
[ "BSD-3-Clause" ]
null
null
null
sympy/utilities/runtests.py
lemmalearning/sympy
62ad387ed3f7b243c889dd342296afc9a32ad1ea
[ "BSD-3-Clause" ]
3
2015-04-18T22:33:32.000Z
2015-09-23T06:45:07.000Z
""" This is our testing framework. Goals: * it should be compatible with py.test and operate very similarly (or identically) * doesn't require any external dependencies * preferably all the functionality should be in this file only * no magic, just import the test file and execute the test functions, that's it * portable """ from __future__ import print_function, division import os import sys import platform import inspect import traceback import pdb import re import linecache from fnmatch import fnmatch from timeit import default_timer as clock import doctest as pdoctest # avoid clashing with our doctest() function from doctest import DocTestFinder, DocTestRunner import random import subprocess import signal import stat from inspect import isgeneratorfunction from sympy.core.cache import clear_cache from sympy.core.compatibility import (exec_, PY3, get_function_code, string_types, xrange) from sympy.utilities.misc import find_executable from sympy.external import import_module from sympy.utilities.exceptions import SymPyDeprecationWarning IS_WINDOWS = (os.name == 'nt') class Skipped(Exception): pass import __future__ # add more flags ?? future_flags = __future__.division.compiler_flag def _indent(s, indent=4): """ Add the given number of space characters to the beginning of every non-blank line in ``s``, and return the result. If the string ``s`` is Unicode, it is encoded using the stdout encoding and the ``backslashreplace`` error handler. """ # After a 2to3 run the below code is bogus, so wrap it with a version check if not PY3: if isinstance(s, unicode): s = s.encode(pdoctest._encoding, 'backslashreplace') # This regexp matches the start of non-blank lines: return re.sub('(?m)^(?!$)', indent*' ', s) pdoctest._indent = _indent # ovverride reporter to maintain windows and python3 def _report_failure(self, out, test, example, got): """ Report that the given example failed. """ s = self._checker.output_difference(example, got, self.optionflags) s = s.encode('raw_unicode_escape').decode('utf8', 'ignore') out(self._failure_header(test, example) + s) if PY3 and IS_WINDOWS: DocTestRunner.report_failure = _report_failure def convert_to_native_paths(lst): """ Converts a list of '/' separated paths into a list of native (os.sep separated) paths and converts to lowercase if the system is case insensitive. """ newlst = [] for i, rv in enumerate(lst): rv = os.path.join(*rv.split("/")) # on windows the slash after the colon is dropped if sys.platform == "win32": pos = rv.find(':') if pos != -1: if rv[pos + 1] != '\\': rv = rv[:pos + 1] + '\\' + rv[pos + 1:] newlst.append(sys_normcase(rv)) return newlst def get_sympy_dir(): """ Returns the root sympy directory and set the global value indicating whether the system is case sensitive or not. """ global sys_case_insensitive this_file = os.path.abspath(__file__) sympy_dir = os.path.join(os.path.dirname(this_file), "..", "..") sympy_dir = os.path.normpath(sympy_dir) sys_case_insensitive = (os.path.isdir(sympy_dir) and os.path.isdir(sympy_dir.lower()) and os.path.isdir(sympy_dir.upper())) return sys_normcase(sympy_dir) def sys_normcase(f): if sys_case_insensitive: # global defined after call to get_sympy_dir() return f.lower() return f def setup_pprint(): from sympy import pprint_use_unicode, init_printing # force pprint to be in ascii mode in doctests pprint_use_unicode(False) # hook our nice, hash-stable strprinter init_printing(pretty_print=False) def run_in_subprocess_with_hash_randomization(function, function_args=(), function_kwargs={}, command=sys.executable, module='sympy.utilities.runtests', force=False): """ Run a function in a Python subprocess with hash randomization enabled. If hash randomization is not supported by the version of Python given, it returns False. Otherwise, it returns the exit value of the command. The function is passed to sys.exit(), so the return value of the function will be the return value. The environment variable PYTHONHASHSEED is used to seed Python's hash randomization. If it is set, this function will return False, because starting a new subprocess is unnecessary in that case. If it is not set, one is set at random, and the tests are run. Note that if this environment variable is set when Python starts, hash randomization is automatically enabled. To force a subprocess to be created even if PYTHONHASHSEED is set, pass ``force=True``. This flag will not force a subprocess in Python versions that do not support hash randomization (see below), because those versions of Python do not support the ``-R`` flag. ``function`` should be a string name of a function that is importable from the module ``module``, like "_test". The default for ``module`` is "sympy.utilities.runtests". ``function_args`` and ``function_kwargs`` should be a repr-able tuple and dict, respectively. The default Python command is sys.executable, which is the currently running Python command. This function is necessary because the seed for hash randomization must be set by the environment variable before Python starts. Hence, in order to use a predetermined seed for tests, we must start Python in a separate subprocess. Hash randomization was added in the minor Python versions 2.6.8, 2.7.3, 3.1.5, and 3.2.3, and is enabled by default in all Python versions after and including 3.3.0. Examples ======== >>> from sympy.utilities.runtests import ( ... run_in_subprocess_with_hash_randomization) >>> # run the core tests in verbose mode >>> run_in_subprocess_with_hash_randomization("_test", ... function_args=("core",), ... function_kwargs={'verbose': True}) # doctest: +SKIP # Will return 0 if sys.executable supports hash randomization and tests # pass, 1 if they fail, and False if it does not support hash # randomization. """ # Note, we must return False everywhere, not None, as subprocess.call will # sometimes return None. # First check if the Python version supports hash randomization # If it doesn't have this support, it won't reconize the -R flag p = subprocess.Popen([command, "-RV"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) p.communicate() if p.returncode != 0: return False hash_seed = os.getenv("PYTHONHASHSEED") if not hash_seed: os.environ["PYTHONHASHSEED"] = str(random.randrange(2**32)) else: if not force: return False # Now run the command commandstring = ("import sys; from %s import %s;sys.exit(%s(*%s, **%s))" % (module, function, function, repr(function_args), repr(function_kwargs))) try: return subprocess.call([command, "-R", "-c", commandstring]) finally: # Put the environment variable back, so that it reads correctly for # the current Python process. if hash_seed is None: del os.environ["PYTHONHASHSEED"] else: os.environ["PYTHONHASHSEED"] = hash_seed def run_all_tests(test_args=(), test_kwargs={}, doctest_args=(), doctest_kwargs={}, examples_args=(), examples_kwargs={'quiet': True}): """ Run all tests. Right now, this runs the regular tests (bin/test), the doctests (bin/doctest), the examples (examples/all.py), and the sage tests (see sympy/external/tests/test_sage.py). This is what ``setup.py test`` uses. You can pass arguments and keyword arguments to the test functions that support them (for now, test, doctest, and the examples). See the docstrings of those functions for a description of the available options. For example, to run the solvers tests with colors turned off: >>> from sympy.utilities.runtests import run_all_tests >>> run_all_tests(test_args=("solvers",), ... test_kwargs={"colors:False"}) # doctest: +SKIP """ tests_successful = True try: # Regular tests if not test(*test_args, **test_kwargs): # some regular test fails, so set the tests_successful # flag to false and continue running the doctests tests_successful = False # Doctests print() if not doctest(*doctest_args, **doctest_kwargs): tests_successful = False # Examples print() sys.path.append("examples") from all import run_examples # examples/all.py if not run_examples(*examples_args, **examples_kwargs): tests_successful = False # Sage tests if not (sys.platform == "win32" or PY3): # run Sage tests; Sage currently doesn't support Windows or Python 3 dev_null = open(os.devnull, 'w') if subprocess.call("sage -v", shell=True, stdout=dev_null, stderr=dev_null) == 0: if subprocess.call("sage -python bin/test " "sympy/external/tests/test_sage.py", shell=True) != 0: tests_successful = False if tests_successful: return else: # Return nonzero exit code sys.exit(1) except KeyboardInterrupt: print() print("DO *NOT* COMMIT!") sys.exit(1) def test(*paths, **kwargs): """ Run tests in the specified test_*.py files. Tests in a particular test_*.py file are run if any of the given strings in ``paths`` matches a part of the test file's path. If ``paths=[]``, tests in all test_*.py files are run. Notes: - If sort=False, tests are run in random order (not default). - Paths can be entered in native system format or in unix, forward-slash format. - Files that are on the blacklist can be tested by providing their path; they are only excluded if no paths are given. **Explanation of test results** ====== =============================================================== Output Meaning ====== =============================================================== . passed F failed X XPassed (expected to fail but passed) f XFAILed (expected to fail and indeed failed) s skipped w slow T timeout (e.g., when ``--timeout`` is used) K KeyboardInterrupt (when running the slow tests with ``--slow``, you can interrupt one of them without killing the test runner) ====== =============================================================== Colors have no additional meaning and are used just to facilitate interpreting the output. Examples ======== >>> import sympy Run all tests: >>> sympy.test() # doctest: +SKIP Run one file: >>> sympy.test("sympy/core/tests/test_basic.py") # doctest: +SKIP >>> sympy.test("_basic") # doctest: +SKIP Run all tests in sympy/functions/ and some particular file: >>> sympy.test("sympy/core/tests/test_basic.py", ... "sympy/functions") # doctest: +SKIP Run all tests in sympy/core and sympy/utilities: >>> sympy.test("/core", "/util") # doctest: +SKIP Run specific test from a file: >>> sympy.test("sympy/core/tests/test_basic.py", ... kw="test_equality") # doctest: +SKIP Run specific test from any file: >>> sympy.test(kw="subs") # doctest: +SKIP Run the tests with verbose mode on: >>> sympy.test(verbose=True) # doctest: +SKIP Don't sort the test output: >>> sympy.test(sort=False) # doctest: +SKIP Turn on post-mortem pdb: >>> sympy.test(pdb=True) # doctest: +SKIP Turn off colors: >>> sympy.test(colors=False) # doctest: +SKIP Force colors, even when the output is not to a terminal (this is useful, e.g., if you are piping to ``less -r`` and you still want colors) >>> sympy.test(force_colors=False) # doctest: +SKIP The traceback verboseness can be set to "short" or "no" (default is "short") >>> sympy.test(tb='no') # doctest: +SKIP The ``split`` option can be passed to split the test run into parts. The split currently only splits the test files, though this may change in the future. ``split`` should be a string of the form 'a/b', which will run part ``a`` of ``b``. For instance, to run the first half of the test suite: >>> sympy.test(split='1/2') # doctest: +SKIP You can disable running the tests in a separate subprocess using ``subprocess=False``. This is done to support seeding hash randomization, which is enabled by default in the Python versions where it is supported. If subprocess=False, hash randomization is enabled/disabled according to whether it has been enabled or not in the calling Python process. However, even if it is enabled, the seed cannot be printed unless it is called from a new Python process. Hash randomization was added in the minor Python versions 2.6.8, 2.7.3, 3.1.5, and 3.2.3, and is enabled by default in all Python versions after and including 3.3.0. If hash randomization is not supported ``subprocess=False`` is used automatically. >>> sympy.test(subprocess=False) # doctest: +SKIP To set the hash randomization seed, set the environment variable ``PYTHONHASHSEED`` before running the tests. This can be done from within Python using >>> import os >>> os.environ['PYTHONHASHSEED'] = '42' # doctest: +SKIP Or from the command line using $ PYTHONHASHSEED=42 ./bin/test If the seed is not set, a random seed will be chosen. Note that to reproduce the same hash values, you must use both the same seed as well as the same architecture (32-bit vs. 64-bit). """ subprocess = kwargs.pop("subprocess", True) rerun = kwargs.pop("rerun", 0) if subprocess: # loop backwards so last i is 0 for i in xrange(rerun, -1, -1): ret = run_in_subprocess_with_hash_randomization("_test", function_args=paths, function_kwargs=kwargs) if ret is False: break val = not bool(ret) # exit on the first failure or if done if not val or i == 0: return val # rerun even if hash randomization is not supported for i in xrange(rerun, -1, -1): val = not bool(_test(*paths, **kwargs)) if not val or i == 0: return val def _test(*paths, **kwargs): """ Internal function that actually runs the tests. All keyword arguments from ``test()`` are passed to this function except for ``subprocess``. Returns 0 if tests passed and 1 if they failed. See the docstring of ``test()`` for more information. """ verbose = kwargs.get("verbose", False) tb = kwargs.get("tb", "short") kw = kwargs.get("kw", "") post_mortem = kwargs.get("pdb", False) colors = kwargs.get("colors", True) force_colors = kwargs.get("force_colors", False) sort = kwargs.get("sort", True) seed = kwargs.get("seed", None) if seed is None: seed = random.randrange(100000000) timeout = kwargs.get("timeout", False) slow = kwargs.get("slow", False) enhance_asserts = kwargs.get("enhance_asserts", False) split = kwargs.get('split', None) blacklist = kwargs.get('blacklist', []) blacklist.extend([ "sympy/mpmath", # needs to be fixed upstream ]) blacklist = convert_to_native_paths(blacklist) r = PyTestReporter(verbose=verbose, tb=tb, colors=colors, force_colors=force_colors, split=split) t = SymPyTests(r, kw, post_mortem, seed) # Disable warnings for external modules import sympy.external sympy.external.importtools.WARN_OLD_VERSION = False sympy.external.importtools.WARN_NOT_INSTALLED = False # Show deprecation warnings import warnings warnings.simplefilter("error", SymPyDeprecationWarning) test_files = t.get_test_files('sympy') not_blacklisted = [f for f in test_files if not any(b in f for b in blacklist)] if len(paths) == 0: matched = not_blacklisted else: paths = convert_to_native_paths(paths) matched = [] for f in not_blacklisted: basename = os.path.basename(f) for p in paths: if p in f or fnmatch(basename, p): matched.append(f) break if split: matched = split_list(matched, split) t._testfiles.extend(matched) return int(not t.test(sort=sort, timeout=timeout, slow=slow, enhance_asserts=enhance_asserts)) def doctest(*paths, **kwargs): """ Runs doctests in all \*.py files in the sympy directory which match any of the given strings in ``paths`` or all tests if paths=[]. Notes: - Paths can be entered in native system format or in unix, forward-slash format. - Files that are on the blacklist can be tested by providing their path; they are only excluded if no paths are given. Examples ======== >>> import sympy Run all tests: >>> sympy.doctest() # doctest: +SKIP Run one file: >>> sympy.doctest("sympy/core/basic.py") # doctest: +SKIP >>> sympy.doctest("polynomial.rst") # doctest: +SKIP Run all tests in sympy/functions/ and some particular file: >>> sympy.doctest("/functions", "basic.py") # doctest: +SKIP Run any file having polynomial in its name, doc/src/modules/polynomial.rst, sympy/functions/special/polynomials.py, and sympy/polys/polynomial.py: >>> sympy.doctest("polynomial") # doctest: +SKIP The ``split`` option can be passed to split the test run into parts. The split currently only splits the test files, though this may change in the future. ``split`` should be a string of the form 'a/b', which will run part ``a`` of ``b``. Note that the regular doctests and the Sphinx doctests are split independently. For instance, to run the first half of the test suite: >>> sympy.doctest(split='1/2') # doctest: +SKIP The ``subprocess`` and ``verbose`` options are the same as with the function ``test()``. See the docstring of that function for more information. """ subprocess = kwargs.pop("subprocess", True) if subprocess: ret = run_in_subprocess_with_hash_randomization("_doctest", function_args=paths, function_kwargs=kwargs) if ret is not False: return not bool(ret) return not bool(_doctest(*paths, **kwargs)) def _doctest(*paths, **kwargs): """ Internal function that actually runs the doctests. All keyword arguments from ``doctest()`` are passed to this function except for ``subprocess``. Returns 0 if tests passed and 1 if they failed. See the docstrings of ``doctest()`` and ``test()`` for more information. """ normal = kwargs.get("normal", False) verbose = kwargs.get("verbose", False) blacklist = kwargs.get("blacklist", []) split = kwargs.get('split', None) blacklist.extend([ "doc/src/modules/mpmath", # needs to be fixed upstream "sympy/mpmath", # needs to be fixed upstream "doc/src/modules/plotting.rst", # generates live plots "sympy/utilities/compilef.py", # needs tcc "sympy/physics/gaussopt.py", # raises deprecation warning ]) if import_module('numpy') is None: blacklist.extend([ "sympy/plotting/experimental_lambdify.py", "sympy/plotting/plot_implicit.py", "examples/advanced/autowrap_integrators.py", "examples/advanced/autowrap_ufuncify.py", "examples/intermediate/sample.py", "examples/intermediate/mplot2d.py", "examples/intermediate/mplot3d.py", "doc/src/modules/numeric-computation.rst" ]) else: if import_module('matplotlib') is None: blacklist.extend([ "examples/intermediate/mplot2d.py", "examples/intermediate/mplot3d.py" ]) else: # don't display matplotlib windows from sympy.plotting.plot import unset_show unset_show() if import_module('pyglet') is None: blacklist.extend(["sympy/plotting/pygletplot"]) if import_module('theano') is None: blacklist.extend(["doc/src/modules/numeric-computation.rst"]) # disabled because of doctest failures in asmeurer's bot blacklist.extend([ "sympy/utilities/autowrap.py", "examples/advanced/autowrap_integrators.py", "examples/advanced/autowrap_ufuncify.py" ]) # pytest = import_module('pytest') # py = import_module('py') # if py is None or pytest is None: # blacklist.extend([ # "sympy/conftest.py", # "sympy/utilities/benchmarking.py" # ]) # blacklist these modules until issue 4840 is resolved blacklist.extend([ "sympy/conftest.py", "sympy/utilities/benchmarking.py" ]) blacklist = convert_to_native_paths(blacklist) # Disable warnings for external modules import sympy.external sympy.external.importtools.WARN_OLD_VERSION = False sympy.external.importtools.WARN_NOT_INSTALLED = False # Show deprecation warnings import warnings warnings.simplefilter("error", SymPyDeprecationWarning) r = PyTestReporter(verbose, split=split) t = SymPyDocTests(r, normal) test_files = t.get_test_files('sympy') test_files.extend(t.get_test_files('examples', init_only=False)) not_blacklisted = [f for f in test_files if not any(b in f for b in blacklist)] if len(paths) == 0: matched = not_blacklisted else: # take only what was requested...but not blacklisted items # and allow for partial match anywhere or fnmatch of name paths = convert_to_native_paths(paths) matched = [] for f in not_blacklisted: basename = os.path.basename(f) for p in paths: if p in f or fnmatch(basename, p): matched.append(f) break if split: matched = split_list(matched, split) t._testfiles.extend(matched) # run the tests and record the result for this *py portion of the tests if t._testfiles: failed = not t.test() else: failed = False # N.B. # -------------------------------------------------------------------- # Here we test *.rst files at or below doc/src. Code from these must # be self supporting in terms of imports since there is no importing # of necessary modules by doctest.testfile. If you try to pass *.py # files through this they might fail because they will lack the needed # imports and smarter parsing that can be done with source code. # test_files = t.get_test_files('doc/src', '*.rst', init_only=False) test_files.sort() not_blacklisted = [f for f in test_files if not any(b in f for b in blacklist)] if len(paths) == 0: matched = not_blacklisted else: # Take only what was requested as long as it's not on the blacklist. # Paths were already made native in *py tests so don't repeat here. # There's no chance of having a *py file slip through since we # only have *rst files in test_files. matched = [] for f in not_blacklisted: basename = os.path.basename(f) for p in paths: if p in f or fnmatch(basename, p): matched.append(f) break if split: matched = split_list(matched, split) setup_pprint() first_report = True for rst_file in matched: if not os.path.isfile(rst_file): continue old_displayhook = sys.displayhook try: # out = pdoctest.testfile( # rst_file, module_relative=False, encoding='utf-8', # optionflags=pdoctest.ELLIPSIS | pdoctest.NORMALIZE_WHITESPACE) out = sympytestfile( rst_file, module_relative=False, encoding='utf-8', optionflags=pdoctest.ELLIPSIS | pdoctest.NORMALIZE_WHITESPACE | pdoctest.IGNORE_EXCEPTION_DETAIL) finally: # make sure we return to the original displayhook in case some # doctest has changed that sys.displayhook = old_displayhook rstfailed, tested = out if tested: failed = rstfailed or failed if first_report: first_report = False msg = 'rst doctests start' if not t._testfiles: r.start(msg=msg) else: r.write_center(msg) print() # use as the id, everything past the first 'sympy' file_id = rst_file[rst_file.find('sympy') + len('sympy') + 1:] print(file_id, end=" ") # get at least the name out so it is know who is being tested wid = r.terminal_width - len(file_id) - 1 # update width test_file = '[%s]' % (tested) report = '[%s]' % (rstfailed or 'OK') print(''.join( [test_file, ' '*(wid - len(test_file) - len(report)), report]) ) # the doctests for *py will have printed this message already if there was # a failure, so now only print it if there was intervening reporting by # testing the *rst as evidenced by first_report no longer being True. if not first_report and failed: print() print("DO *NOT* COMMIT!") return int(failed) sp = re.compile(r'([0-9]+)/([1-9][0-9]*)') def split_list(l, split): """ Splits a list into part a of b split should be a string of the form 'a/b'. For instance, '1/3' would give the split one of three. If the length of the list is not divisible by the number of splits, the last split will have more items. >>> from sympy.utilities.runtests import split_list >>> a = list(range(10)) >>> split_list(a, '1/3') [0, 1, 2] >>> split_list(a, '2/3') [3, 4, 5] >>> split_list(a, '3/3') [6, 7, 8, 9] """ m = sp.match(split) if not m: raise ValueError("split must be a string of the form a/b where a and b are ints") i, t = map(int, m.groups()) return l[(i - 1)*len(l)//t:i*len(l)//t] from collections import namedtuple SymPyTestResults = namedtuple('TestResults', 'failed attempted') def sympytestfile(filename, module_relative=True, name=None, package=None, globs=None, verbose=None, report=True, optionflags=0, extraglobs=None, raise_on_error=False, parser=pdoctest.DocTestParser(), encoding=None): """ Test examples in the given file. Return (#failures, #tests). Optional keyword arg ``module_relative`` specifies how filenames should be interpreted: - If ``module_relative`` is True (the default), then ``filename`` specifies a module-relative path. By default, this path is relative to the calling module's directory; but if the ``package`` argument is specified, then it is relative to that package. To ensure os-independence, ``filename`` should use "/" characters to separate path segments, and should not be an absolute path (i.e., it may not begin with "/"). - If ``module_relative`` is False, then ``filename`` specifies an os-specific path. The path may be absolute or relative (to the current working directory). Optional keyword arg ``name`` gives the name of the test; by default use the file's basename. Optional keyword argument ``package`` is a Python package or the name of a Python package whose directory should be used as the base directory for a module relative filename. If no package is specified, then the calling module's directory is used as the base directory for module relative filenames. It is an error to specify ``package`` if ``module_relative`` is False. Optional keyword arg ``globs`` gives a dict to be used as the globals when executing examples; by default, use {}. A copy of this dict is actually used for each docstring, so that each docstring's examples start with a clean slate. Optional keyword arg ``extraglobs`` gives a dictionary that should be merged into the globals that are used to execute examples. By default, no extra globals are used. Optional keyword arg ``verbose`` prints lots of stuff if true, prints only failures if false; by default, it's true iff "-v" is in sys.argv. Optional keyword arg ``report`` prints a summary at the end when true, else prints nothing at the end. In verbose mode, the summary is detailed, else very brief (in fact, empty if all tests passed). Optional keyword arg ``optionflags`` or's together module constants, and defaults to 0. Possible values (see the docs for details): - DONT_ACCEPT_TRUE_FOR_1 - DONT_ACCEPT_BLANKLINE - NORMALIZE_WHITESPACE - ELLIPSIS - SKIP - IGNORE_EXCEPTION_DETAIL - REPORT_UDIFF - REPORT_CDIFF - REPORT_NDIFF - REPORT_ONLY_FIRST_FAILURE Optional keyword arg ``raise_on_error`` raises an exception on the first unexpected exception or failure. This allows failures to be post-mortem debugged. Optional keyword arg ``parser`` specifies a DocTestParser (or subclass) that should be used to extract tests from the files. Optional keyword arg ``encoding`` specifies an encoding that should be used to convert the file to unicode. Advanced tomfoolery: testmod runs methods of a local instance of class doctest.Tester, then merges the results into (or creates) global Tester instance doctest.master. Methods of doctest.master can be called directly too, if you want to do something unusual. Passing report=0 to testmod is especially useful then, to delay displaying a summary. Invoke doctest.master.summarize(verbose) when you're done fiddling. """ if package and not module_relative: raise ValueError("Package may only be specified for module-" "relative paths.") # Relativize the path if not PY3: text, filename = pdoctest._load_testfile( filename, package, module_relative) if encoding is not None: text = text.decode(encoding) else: text, filename = pdoctest._load_testfile( filename, package, module_relative, encoding) # If no name was given, then use the file's name. if name is None: name = os.path.basename(filename) # Assemble the globals. if globs is None: globs = {} else: globs = globs.copy() if extraglobs is not None: globs.update(extraglobs) if '__name__' not in globs: globs['__name__'] = '__main__' if raise_on_error: runner = pdoctest.DebugRunner(verbose=verbose, optionflags=optionflags) else: runner = SymPyDocTestRunner(verbose=verbose, optionflags=optionflags) runner._checker = SymPyOutputChecker() # Read the file, convert it to a test, and run it. test = parser.get_doctest(text, globs, name, filename, 0) runner.run(test, compileflags=future_flags) if report: runner.summarize() if pdoctest.master is None: pdoctest.master = runner else: pdoctest.master.merge(runner) return SymPyTestResults(runner.failures, runner.tries) class SymPyTests(object): def __init__(self, reporter, kw="", post_mortem=False, seed=None): self._post_mortem = post_mortem self._kw = kw self._count = 0 self._root_dir = sympy_dir self._reporter = reporter self._reporter.root_dir(self._root_dir) self._testfiles = [] self._seed = seed if seed is not None else random.random() def test(self, sort=False, timeout=False, slow=False, enhance_asserts=False): """ Runs the tests returning True if all tests pass, otherwise False. If sort=False run tests in random order. """ if sort: self._testfiles.sort() else: from random import shuffle random.seed(self._seed) shuffle(self._testfiles) self._reporter.start(self._seed) for f in self._testfiles: try: self.test_file(f, sort, timeout, slow, enhance_asserts) except KeyboardInterrupt: print(" interrupted by user") self._reporter.finish() raise return self._reporter.finish() def _enhance_asserts(self, source): from ast import (NodeTransformer, Compare, Name, Store, Load, Tuple, Assign, BinOp, Str, Mod, Assert, parse, fix_missing_locations) ops = {"Eq": '==', "NotEq": '!=', "Lt": '<', "LtE": '<=', "Gt": '>', "GtE": '>=', "Is": 'is', "IsNot": 'is not', "In": 'in', "NotIn": 'not in'} class Transform(NodeTransformer): def visit_Assert(self, stmt): if isinstance(stmt.test, Compare): compare = stmt.test values = [compare.left] + compare.comparators names = [ "_%s" % i for i, _ in enumerate(values) ] names_store = [ Name(n, Store()) for n in names ] names_load = [ Name(n, Load()) for n in names ] target = Tuple(names_store, Store()) value = Tuple(values, Load()) assign = Assign([target], value) new_compare = Compare(names_load[0], compare.ops, names_load[1:]) msg_format = "\n%s " + "\n%s ".join([ ops[op.__class__.__name__] for op in compare.ops ]) + "\n%s" msg = BinOp(Str(msg_format), Mod(), Tuple(names_load, Load())) test = Assert(new_compare, msg, lineno=stmt.lineno, col_offset=stmt.col_offset) return [assign, test] else: return stmt tree = parse(source) new_tree = Transform().visit(tree) return fix_missing_locations(new_tree) def test_file(self, filename, sort=True, timeout=False, slow=False, enhance_asserts=False): funcs = [] try: clear_cache() self._count += 1 gl = {'__file__': filename} random.seed(self._seed) try: if PY3: open_file = lambda: open(filename, encoding="utf8") else: open_file = lambda: open(filename) with open_file() as f: source = f.read() if enhance_asserts: try: source = self._enhance_asserts(source) except ImportError: pass code = compile(source, filename, "exec") exec_(code, gl) except (SystemExit, KeyboardInterrupt): raise except ImportError: self._reporter.import_error(filename, sys.exc_info()) return pytestfile = "" if "XFAIL" in gl: pytestfile = inspect.getsourcefile(gl["XFAIL"]) pytestfile2 = "" if "slow" in gl: pytestfile2 = inspect.getsourcefile(gl["slow"]) disabled = gl.get("disabled", False) if not disabled: # we need to filter only those functions that begin with 'test_' # that are defined in the testing file or in the file where # is defined the XFAIL decorator funcs = [gl[f] for f in gl.keys() if f.startswith("test_") and (inspect.isfunction(gl[f]) or inspect.ismethod(gl[f])) and (inspect.getsourcefile(gl[f]) == filename or inspect.getsourcefile(gl[f]) == pytestfile or inspect.getsourcefile(gl[f]) == pytestfile2)] if slow: funcs = [f for f in funcs if getattr(f, '_slow', False)] # Sorting of XFAILed functions isn't fixed yet :-( funcs.sort(key=lambda x: inspect.getsourcelines(x)[1]) i = 0 while i < len(funcs): if isgeneratorfunction(funcs[i]): # some tests can be generators, that return the actual # test functions. We unpack it below: f = funcs.pop(i) for fg in f(): func = fg[0] args = fg[1:] fgw = lambda: func(*args) funcs.insert(i, fgw) i += 1 else: i += 1 # drop functions that are not selected with the keyword expression: funcs = [x for x in funcs if self.matches(x)] if not funcs: return except Exception: self._reporter.entering_filename(filename, len(funcs)) raise self._reporter.entering_filename(filename, len(funcs)) if not sort: random.shuffle(funcs) for f in funcs: self._reporter.entering_test(f) try: if getattr(f, '_slow', False) and not slow: raise Skipped("Slow") if timeout: self._timeout(f, timeout) else: random.seed(self._seed) f() except KeyboardInterrupt: if getattr(f, '_slow', False): self._reporter.test_skip("KeyboardInterrupt") else: raise except Exception: if timeout: signal.alarm(0) # Disable the alarm. It could not be handled before. t, v, tr = sys.exc_info() if t is AssertionError: self._reporter.test_fail((t, v, tr)) if self._post_mortem: pdb.post_mortem(tr) elif t.__name__ == "Skipped": self._reporter.test_skip(v) elif t.__name__ == "XFail": self._reporter.test_xfail() elif t.__name__ == "XPass": self._reporter.test_xpass(v) else: self._reporter.test_exception((t, v, tr)) if self._post_mortem: pdb.post_mortem(tr) else: self._reporter.test_pass() self._reporter.leaving_filename() def _timeout(self, function, timeout): def callback(x, y): signal.alarm(0) raise Skipped("Timeout") signal.signal(signal.SIGALRM, callback) signal.alarm(timeout) # Set an alarm with a given timeout function() signal.alarm(0) # Disable the alarm def matches(self, x): """ Does the keyword expression self._kw match "x"? Returns True/False. Always returns True if self._kw is "". """ if self._kw == "": return True return x.__name__.find(self._kw) != -1 def get_test_files(self, dir, pat='test_*.py'): """ Returns the list of test_*.py (default) files at or below directory ``dir`` relative to the sympy home directory. """ dir = os.path.join(self._root_dir, convert_to_native_paths([dir])[0]) g = [] for path, folders, files in os.walk(dir): g.extend([os.path.join(path, f) for f in files if fnmatch(f, pat)]) return sorted([sys_normcase(gi) for gi in g]) class SymPyDocTests(object): def __init__(self, reporter, normal): self._count = 0 self._root_dir = sympy_dir self._reporter = reporter self._reporter.root_dir(self._root_dir) self._normal = normal self._testfiles = [] def test(self): """ Runs the tests and returns True if all tests pass, otherwise False. """ self._reporter.start() for f in self._testfiles: try: self.test_file(f) except KeyboardInterrupt: print(" interrupted by user") self._reporter.finish() raise return self._reporter.finish() def test_file(self, filename): clear_cache() from sympy.core.compatibility import StringIO rel_name = filename[len(self._root_dir) + 1:] dirname, file = os.path.split(filename) module = rel_name.replace(os.sep, '.')[:-3] if rel_name.startswith("examples"): # Examples files do not have __init__.py files, # So we have to temporarily extend sys.path to import them sys.path.insert(0, dirname) module = file[:-3] # remove ".py" setup_pprint() try: module = pdoctest._normalize_module(module) tests = SymPyDocTestFinder().find(module) except (SystemExit, KeyboardInterrupt): raise except ImportError: self._reporter.import_error(filename, sys.exc_info()) return finally: if rel_name.startswith("examples"): del sys.path[0] tests = [test for test in tests if len(test.examples) > 0] # By default tests are sorted by alphabetical order by function name. # We sort by line number so one can edit the file sequentially from # bottom to top. However, if there are decorated functions, their line # numbers will be too large and for now one must just search for these # by text and function name. tests.sort(key=lambda x: -x.lineno) if not tests: return self._reporter.entering_filename(filename, len(tests)) for test in tests: assert len(test.examples) != 0 # check if there are external dependencies which need to be met if '_doctest_depends_on' in test.globs: if not self._process_dependencies(test.globs['_doctest_depends_on']): self._reporter.test_skip() continue runner = SymPyDocTestRunner(optionflags=pdoctest.ELLIPSIS | pdoctest.NORMALIZE_WHITESPACE | pdoctest.IGNORE_EXCEPTION_DETAIL) runner._checker = SymPyOutputChecker() old = sys.stdout new = StringIO() sys.stdout = new # If the testing is normal, the doctests get importing magic to # provide the global namespace. If not normal (the default) then # then must run on their own; all imports must be explicit within # a function's docstring. Once imported that import will be # available to the rest of the tests in a given function's # docstring (unless clear_globs=True below). if not self._normal: test.globs = {} # if this is uncommented then all the test would get is what # comes by default with a "from sympy import *" #exec('from sympy import *') in test.globs test.globs['print_function'] = print_function try: f, t = runner.run(test, compileflags=future_flags, out=new.write, clear_globs=False) except KeyboardInterrupt: raise finally: sys.stdout = old if f > 0: self._reporter.doctest_fail(test.name, new.getvalue()) else: self._reporter.test_pass() self._reporter.leaving_filename() def get_test_files(self, dir, pat='*.py', init_only=True): """ Returns the list of \*.py files (default) from which docstrings will be tested which are at or below directory ``dir``. By default, only those that have an __init__.py in their parent directory and do not start with ``test_`` will be included. """ def importable(x): """ Checks if given pathname x is an importable module by checking for __init__.py file. Returns True/False. Currently we only test if the __init__.py file exists in the directory with the file "x" (in theory we should also test all the parent dirs). """ init_py = os.path.join(os.path.dirname(x), "__init__.py") return os.path.exists(init_py) dir = os.path.join(self._root_dir, convert_to_native_paths([dir])[0]) g = [] for path, folders, files in os.walk(dir): g.extend([os.path.join(path, f) for f in files if not f.startswith('test_') and fnmatch(f, pat)]) if init_only: # skip files that are not importable (i.e. missing __init__.py) g = [x for x in g if importable(x)] return [sys_normcase(gi) for gi in g] def _process_dependencies(self, deps): """ Returns ``False`` if some dependencies are not met and the test should be skipped otherwise returns ``True``. """ executables = deps.get('exe', None) moduledeps = deps.get('modules', None) viewers = deps.get('disable_viewers', None) pyglet = deps.get('pyglet', None) # print deps if executables is not None: for ex in executables: found = find_executable(ex) # print "EXE %s found %s" %(ex, found) if found is None: return False if moduledeps is not None: for extmod in moduledeps: if extmod == 'matplotlib': matplotlib = import_module( 'matplotlib', __import__kwargs={'fromlist': ['pyplot', 'cm', 'collections']}, min_module_version='1.0.0', catch=(RuntimeError,)) if matplotlib is not None: pass # print "EXTMODULE matplotlib version %s found" % \ # matplotlib.__version__ else: # print "EXTMODULE matplotlib > 1.0.0 not found" return False else: # TODO min version support mod = import_module(extmod) if mod is not None: version = "unknown" if hasattr(mod, '__version__'): version = mod.__version__ # print "EXTMODULE %s version %s found" %(extmod, version) else: # print "EXTMODULE %s not found" %(extmod) return False if viewers is not None: import tempfile tempdir = tempfile.mkdtemp() os.environ['PATH'] = '%s:%s' % (tempdir, os.environ['PATH']) if PY3: vw = '#!/usr/bin/env python3\n' \ 'import sys\n' \ 'if len(sys.argv) <= 1:\n' \ ' exit("wrong number of args")\n' else: vw = '#!/usr/bin/env python\n' \ 'import sys\n' \ 'if len(sys.argv) <= 1:\n' \ ' exit("wrong number of args")\n' for viewer in viewers: with open(os.path.join(tempdir, viewer), 'w') as fh: fh.write(vw) # make the file executable os.chmod(os.path.join(tempdir, viewer), stat.S_IREAD | stat.S_IWRITE | stat.S_IXUSR) if pyglet: # monkey-patch pyglet s.t. it does not open a window during # doctesting import pyglet class DummyWindow(object): def __init__(self, *args, **kwargs): self.has_exit=True self.width = 600 self.height = 400 def set_vsync(self, x): pass def switch_to(self): pass def push_handlers(self, x): pass def close(self): pass pyglet.window.Window = DummyWindow return True class SymPyDocTestFinder(DocTestFinder): """ A class used to extract the DocTests that are relevant to a given object, from its docstring and the docstrings of its contained objects. Doctests can currently be extracted from the following object types: modules, functions, classes, methods, staticmethods, classmethods, and properties. Modified from doctest's version by looking harder for code in the case that it looks like the the code comes from a different module. In the case of decorated functions (e.g. @vectorize) they appear to come from a different module (e.g. multidemensional) even though their code is not there. """ def _find(self, tests, obj, name, module, source_lines, globs, seen): """ Find tests for the given object and any contained objects, and add them to ``tests``. """ if self._verbose: print('Finding tests in %s' % name) # If we've already processed this object, then ignore it. if id(obj) in seen: return seen[id(obj)] = 1 # Make sure we don't run doctests for classes outside of sympy, such # as in numpy or scipy. if inspect.isclass(obj): if obj.__module__.split('.')[0] != 'sympy': return # Find a test for this object, and add it to the list of tests. test = self._get_test(obj, name, module, globs, source_lines) if test is not None: tests.append(test) if not self._recurse: return # Look for tests in a module's contained objects. if inspect.ismodule(obj): for rawname, val in obj.__dict__.items(): # Recurse to functions & classes. if inspect.isfunction(val) or inspect.isclass(val): # Make sure we don't run doctests functions or classes # from different modules if val.__module__ != module.__name__: continue assert self._from_module(module, val), \ "%s is not in module %s (rawname %s)" % (val, module, rawname) try: valname = '%s.%s' % (name, rawname) self._find(tests, val, valname, module, source_lines, globs, seen) except KeyboardInterrupt: raise # Look for tests in a module's __test__ dictionary. for valname, val in getattr(obj, '__test__', {}).items(): if not isinstance(valname, string_types): raise ValueError("SymPyDocTestFinder.find: __test__ keys " "must be strings: %r" % (type(valname),)) if not (inspect.isfunction(val) or inspect.isclass(val) or inspect.ismethod(val) or inspect.ismodule(val) or isinstance(val, string_types)): raise ValueError("SymPyDocTestFinder.find: __test__ values " "must be strings, functions, methods, " "classes, or modules: %r" % (type(val),)) valname = '%s.__test__.%s' % (name, valname) self._find(tests, val, valname, module, source_lines, globs, seen) # Look for tests in a class's contained objects. if inspect.isclass(obj): for valname, val in obj.__dict__.items(): # Special handling for staticmethod/classmethod. if isinstance(val, staticmethod): val = getattr(obj, valname) if isinstance(val, classmethod): val = getattr(obj, valname).__func__ # Recurse to methods, properties, and nested classes. if (inspect.isfunction(val) or inspect.isclass(val) or isinstance(val, property)): # Make sure we don't run doctests functions or classes # from different modules if isinstance(val, property): if hasattr(val.fget, '__module__'): if val.fget.__module__ != module.__name__: continue else: if val.__module__ != module.__name__: continue assert self._from_module(module, val), \ "%s is not in module %s (valname %s)" % ( val, module, valname) valname = '%s.%s' % (name, valname) self._find(tests, val, valname, module, source_lines, globs, seen) def _get_test(self, obj, name, module, globs, source_lines): """ Return a DocTest for the given object, if it defines a docstring; otherwise, return None. """ lineno = None # Extract the object's docstring. If it doesn't have one, # then return None (no test for this object). if isinstance(obj, string_types): # obj is a string in the case for objects in the polys package. # Note that source_lines is a binary string (compiled polys # modules), which can't be handled by _find_lineno so determine # the line number here. docstring = obj matches = re.findall("line \d+", name) assert len(matches) == 1, \ "string '%s' does not contain lineno " % name # NOTE: this is not the exact linenumber but its better than no # lineno ;) lineno = int(matches[0][5:]) else: try: if obj.__doc__ is None: docstring = '' else: docstring = obj.__doc__ if not isinstance(docstring, string_types): docstring = str(docstring) except (TypeError, AttributeError): docstring = '' # Don't bother if the docstring is empty. if self._exclude_empty and not docstring: return None # check that properties have a docstring because _find_lineno # assumes it if isinstance(obj, property): if obj.fget.__doc__ is None: return None # Find the docstring's location in the file. if lineno is None: # handling of properties is not implemented in _find_lineno so do # it here if hasattr(obj, 'func_closure') and obj.func_closure is not None: tobj = obj.func_closure[0].cell_contents elif isinstance(obj, property): tobj = obj.fget else: tobj = obj lineno = self._find_lineno(tobj, source_lines) if lineno is None: return None # Return a DocTest for this object. if module is None: filename = None else: filename = getattr(module, '__file__', module.__name__) if filename[-4:] in (".pyc", ".pyo"): filename = filename[:-1] if hasattr(obj, '_doctest_depends_on'): globs['_doctest_depends_on'] = obj._doctest_depends_on else: globs['_doctest_depends_on'] = {} return self._parser.get_doctest(docstring, globs, name, filename, lineno) class SymPyDocTestRunner(DocTestRunner): """ A class used to run DocTest test cases, and accumulate statistics. The ``run`` method is used to process a single DocTest case. It returns a tuple ``(f, t)``, where ``t`` is the number of test cases tried, and ``f`` is the number of test cases that failed. Modified from the doctest version to not reset the sys.displayhook (see issue 5140). See the docstring of the original DocTestRunner for more information. """ def run(self, test, compileflags=None, out=None, clear_globs=True): """ Run the examples in ``test``, and display the results using the writer function ``out``. The examples are run in the namespace ``test.globs``. If ``clear_globs`` is true (the default), then this namespace will be cleared after the test runs, to help with garbage collection. If you would like to examine the namespace after the test completes, then use ``clear_globs=False``. ``compileflags`` gives the set of flags that should be used by the Python compiler when running the examples. If not specified, then it will default to the set of future-import flags that apply to ``globs``. The output of each example is checked using ``SymPyDocTestRunner.check_output``, and the results are formatted by the ``SymPyDocTestRunner.report_*`` methods. """ self.test = test if compileflags is None: compileflags = pdoctest._extract_future_flags(test.globs) save_stdout = sys.stdout if out is None: out = save_stdout.write sys.stdout = self._fakeout # Patch pdb.set_trace to restore sys.stdout during interactive # debugging (so it's not still redirected to self._fakeout). # Note that the interactive output will go to *our* # save_stdout, even if that's not the real sys.stdout; this # allows us to write test cases for the set_trace behavior. save_set_trace = pdb.set_trace self.debugger = pdoctest._OutputRedirectingPdb(save_stdout) self.debugger.reset() pdb.set_trace = self.debugger.set_trace # Patch linecache.getlines, so we can see the example's source # when we're inside the debugger. self.save_linecache_getlines = pdoctest.linecache.getlines linecache.getlines = self.__patched_linecache_getlines try: test.globs['print_function'] = print_function return self.__run(test, compileflags, out) finally: sys.stdout = save_stdout pdb.set_trace = save_set_trace linecache.getlines = self.save_linecache_getlines if clear_globs: test.globs.clear() # We have to override the name mangled methods. SymPyDocTestRunner._SymPyDocTestRunner__patched_linecache_getlines = \ DocTestRunner._DocTestRunner__patched_linecache_getlines SymPyDocTestRunner._SymPyDocTestRunner__run = DocTestRunner._DocTestRunner__run SymPyDocTestRunner._SymPyDocTestRunner__record_outcome = \ DocTestRunner._DocTestRunner__record_outcome class SymPyOutputChecker(pdoctest.OutputChecker): """ Compared to the OutputChecker from the stdlib our OutputChecker class supports numerical comparison of floats occuring in the output of the doctest examples """ def __init__(self): # NOTE OutputChecker is an old-style class with no __init__ method, # so we can't call the base class version of __init__ here got_floats = r'(\d+\.\d*|\.\d+)' # floats in the 'want' string may contain ellipses want_floats = got_floats + r'(\.{3})?' front_sep = r'\s|\+|\-|\*|,' back_sep = front_sep + r'|j|e' fbeg = r'^%s(?=%s|$)' % (got_floats, back_sep) fmidend = r'(?<=%s)%s(?=%s|$)' % (front_sep, got_floats, back_sep) self.num_got_rgx = re.compile(r'(%s|%s)' %(fbeg, fmidend)) fbeg = r'^%s(?=%s|$)' % (want_floats, back_sep) fmidend = r'(?<=%s)%s(?=%s|$)' % (front_sep, want_floats, back_sep) self.num_want_rgx = re.compile(r'(%s|%s)' %(fbeg, fmidend)) def check_output(self, want, got, optionflags): """ Return True iff the actual output from an example (`got`) matches the expected output (`want`). These strings are always considered to match if they are identical; but depending on what option flags the test runner is using, several non-exact match types are also possible. See the documentation for `TestRunner` for more information about option flags. """ # Handle the common case first, for efficiency: # if they're string-identical, always return true. if got == want: return True # TODO parse integers as well ? # Parse floats and compare them. If some of the parsed floats contain # ellipses, skip the comparison. matches = self.num_got_rgx.finditer(got) numbers_got = [match.group(1) for match in matches] # list of strs matches = self.num_want_rgx.finditer(want) numbers_want = [match.group(1) for match in matches] # list of strs if len(numbers_got) != len(numbers_want): return False if len(numbers_got) > 0: nw_ = [] for ng, nw in zip(numbers_got, numbers_want): if '...' in nw: nw_.append(ng) continue else: nw_.append(nw) if abs(float(ng)-float(nw)) > 1e-5: return False got = self.num_got_rgx.sub(r'%s', got) got = got % tuple(nw_) # <BLANKLINE> can be used as a special sequence to signify a # blank line, unless the DONT_ACCEPT_BLANKLINE flag is used. if not (optionflags & pdoctest.DONT_ACCEPT_BLANKLINE): # Replace <BLANKLINE> in want with a blank line. want = re.sub('(?m)^%s\s*?$' % re.escape(pdoctest.BLANKLINE_MARKER), '', want) # If a line in got contains only spaces, then remove the # spaces. got = re.sub('(?m)^\s*?$', '', got) if got == want: return True # This flag causes doctest to ignore any differences in the # contents of whitespace strings. Note that this can be used # in conjunction with the ELLIPSIS flag. if optionflags & pdoctest.NORMALIZE_WHITESPACE: got = ' '.join(got.split()) want = ' '.join(want.split()) if got == want: return True # The ELLIPSIS flag says to let the sequence "..." in `want` # match any substring in `got`. if optionflags & pdoctest.ELLIPSIS: if pdoctest._ellipsis_match(want, got): return True # We didn't find any match; return false. return False class Reporter(object): """ Parent class for all reporters. """ pass class PyTestReporter(Reporter): """ Py.test like reporter. Should produce output identical to py.test. """ def __init__(self, verbose=False, tb="short", colors=True, force_colors=False, split=None): self._verbose = verbose self._tb_style = tb self._colors = colors self._force_colors = force_colors self._xfailed = 0 self._xpassed = [] self._failed = [] self._failed_doctest = [] self._passed = 0 self._skipped = 0 self._exceptions = [] self._terminal_width = None self._default_width = 80 self._split = split # this tracks the x-position of the cursor (useful for positioning # things on the screen), without the need for any readline library: self._write_pos = 0 self._line_wrap = False def root_dir(self, dir): self._root_dir = dir @property def terminal_width(self): if self._terminal_width is not None: return self._terminal_width def findout_terminal_width(): if sys.platform == "win32": # Windows support is based on: # # http://code.activestate.com/recipes/ # 440694-determine-size-of-console-window-on-windows/ from ctypes import windll, create_string_buffer h = windll.kernel32.GetStdHandle(-12) csbi = create_string_buffer(22) res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi) if res: import struct (_, _, _, _, _, left, _, right, _, _, _) = \ struct.unpack("hhhhHhhhhhh", csbi.raw) return right - left else: return self._default_width if hasattr(sys.stdout, 'isatty') and not sys.stdout.isatty(): return self._default_width # leave PIPEs alone try: process = subprocess.Popen(['stty', '-a'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout = process.stdout.read() if PY3: stdout = stdout.decode("utf-8") except (OSError, IOError): pass else: # We support the following output formats from stty: # # 1) Linux -> columns 80 # 2) OS X -> 80 columns # 3) Solaris -> columns = 80 re_linux = r"columns\s+(?P<columns>\d+);" re_osx = r"(?P<columns>\d+)\s*columns;" re_solaris = r"columns\s+=\s+(?P<columns>\d+);" for regex in (re_linux, re_osx, re_solaris): match = re.search(regex, stdout) if match is not None: columns = match.group('columns') try: return int(columns) except ValueError: pass return self._default_width width = findout_terminal_width() self._terminal_width = width return width def write(self, text, color="", align="left", width=None, force_colors=False): """ Prints a text on the screen. It uses sys.stdout.write(), so no readline library is necessary. Parameters ========== color : choose from the colors below, "" means default color align : "left"/"right", "left" is a normal print, "right" is aligned on the right-hand side of the screen, filled with spaces if necessary width : the screen width """ color_templates = ( ("Black", "0;30"), ("Red", "0;31"), ("Green", "0;32"), ("Brown", "0;33"), ("Blue", "0;34"), ("Purple", "0;35"), ("Cyan", "0;36"), ("LightGray", "0;37"), ("DarkGray", "1;30"), ("LightRed", "1;31"), ("LightGreen", "1;32"), ("Yellow", "1;33"), ("LightBlue", "1;34"), ("LightPurple", "1;35"), ("LightCyan", "1;36"), ("White", "1;37"), ) colors = {} for name, value in color_templates: colors[name] = value c_normal = '\033[0m' c_color = '\033[%sm' if width is None: width = self.terminal_width if align == "right": if self._write_pos + len(text) > width: # we don't fit on the current line, create a new line self.write("\n") self.write(" "*(width - self._write_pos - len(text))) if not self._force_colors and hasattr(sys.stdout, 'isatty') and not \ sys.stdout.isatty(): # the stdout is not a terminal, this for example happens if the # output is piped to less, e.g. "bin/test | less". In this case, # the terminal control sequences would be printed verbatim, so # don't use any colors. color = "" elif sys.platform == "win32": # Windows consoles don't support ANSI escape sequences color = "" elif not self._colors: color = "" if self._line_wrap: if text[0] != "\n": sys.stdout.write("\n") # Avoid UnicodeEncodeError when printing out test failures if PY3 and IS_WINDOWS: text = text.encode('raw_unicode_escape').decode('utf8', 'ignore') elif PY3 and not sys.stdout.encoding.lower().startswith('utf'): text = text.encode(sys.stdout.encoding, 'backslashreplace' ).decode(sys.stdout.encoding) if color == "": sys.stdout.write(text) else: sys.stdout.write("%s%s%s" % (c_color % colors[color], text, c_normal)) sys.stdout.flush() l = text.rfind("\n") if l == -1: self._write_pos += len(text) else: self._write_pos = len(text) - l - 1 self._line_wrap = self._write_pos >= width self._write_pos %= width def write_center(self, text, delim="="): width = self.terminal_width if text != "": text = " %s " % text idx = (width - len(text)) // 2 t = delim*idx + text + delim*(width - idx - len(text)) self.write(t + "\n") def write_exception(self, e, val, tb): t = traceback.extract_tb(tb) # remove the first item, as that is always runtests.py t = t[1:] t = traceback.format_list(t) self.write("".join(t)) t = traceback.format_exception_only(e, val) self.write("".join(t)) def start(self, seed=None, msg="test process starts"): self.write_center(msg) executable = sys.executable v = tuple(sys.version_info) python_version = "%s.%s.%s-%s-%s" % v implementation = platform.python_implementation() if implementation == 'PyPy': implementation += " %s.%s.%s-%s-%s" % sys.pypy_version_info self.write("executable: %s (%s) [%s]\n" % (executable, python_version, implementation)) from .misc import ARCH self.write("architecture: %s\n" % ARCH) from sympy.core.cache import USE_CACHE self.write("cache: %s\n" % USE_CACHE) from sympy.core.compatibility import GROUND_TYPES, HAS_GMPY version = '' if GROUND_TYPES =='gmpy': if HAS_GMPY == 1: import gmpy elif HAS_GMPY == 2: import gmpy2 as gmpy version = gmpy.version() self.write("ground types: %s %s\n" % (GROUND_TYPES, version)) if seed is not None: self.write("random seed: %d\n" % seed) from .misc import HASH_RANDOMIZATION self.write("hash randomization: ") hash_seed = os.getenv("PYTHONHASHSEED") or '0' if HASH_RANDOMIZATION and (hash_seed == "random" or int(hash_seed)): self.write("on (PYTHONHASHSEED=%s)\n" % hash_seed) else: self.write("off\n") if self._split: self.write("split: %s\n" % self._split) self.write('\n') self._t_start = clock() def finish(self): self._t_end = clock() self.write("\n") global text, linelen text = "tests finished: %d passed, " % self._passed linelen = len(text) def add_text(mytext): global text, linelen """Break new text if too long.""" if linelen + len(mytext) > self.terminal_width: text += '\n' linelen = 0 text += mytext linelen += len(mytext) if len(self._failed) > 0: add_text("%d failed, " % len(self._failed)) if len(self._failed_doctest) > 0: add_text("%d failed, " % len(self._failed_doctest)) if self._skipped > 0: add_text("%d skipped, " % self._skipped) if self._xfailed > 0: add_text("%d expected to fail, " % self._xfailed) if len(self._xpassed) > 0: add_text("%d expected to fail but passed, " % len(self._xpassed)) if len(self._exceptions) > 0: add_text("%d exceptions, " % len(self._exceptions)) add_text("in %.2f seconds" % (self._t_end - self._t_start)) if len(self._xpassed) > 0: self.write_center("xpassed tests", "_") for e in self._xpassed: self.write("%s: %s\n" % (e[0], e[1])) self.write("\n") if self._tb_style != "no" and len(self._exceptions) > 0: #self.write_center("These tests raised an exception", "_") for e in self._exceptions: filename, f, (t, val, tb) = e self.write_center("", "_") if f is None: s = "%s" % filename else: s = "%s:%s" % (filename, f.__name__) self.write_center(s, "_") self.write_exception(t, val, tb) self.write("\n") if self._tb_style != "no" and len(self._failed) > 0: #self.write_center("Failed", "_") for e in self._failed: filename, f, (t, val, tb) = e self.write_center("", "_") self.write_center("%s:%s" % (filename, f.__name__), "_") self.write_exception(t, val, tb) self.write("\n") if self._tb_style != "no" and len(self._failed_doctest) > 0: #self.write_center("Failed", "_") for e in self._failed_doctest: filename, msg = e self.write_center("", "_") self.write_center("%s" % filename, "_") self.write(msg) self.write("\n") self.write_center(text) ok = len(self._failed) == 0 and len(self._exceptions) == 0 and \ len(self._failed_doctest) == 0 if not ok: self.write("DO *NOT* COMMIT!\n") return ok def entering_filename(self, filename, n): rel_name = filename[len(self._root_dir) + 1:] self._active_file = rel_name self._active_file_error = False self.write(rel_name) self.write("[%d] " % n) def leaving_filename(self): self.write(" ") if self._active_file_error: self.write("[FAIL]", "Red", align="right") else: self.write("[OK]", "Green", align="right") self.write("\n") if self._verbose: self.write("\n") def entering_test(self, f): self._active_f = f if self._verbose: self.write("\n" + f.__name__ + " ") def test_xfail(self): self._xfailed += 1 self.write("f", "Green") def test_xpass(self, v): message = str(v) self._xpassed.append((self._active_file, message)) self.write("X", "Green") def test_fail(self, exc_info): self._failed.append((self._active_file, self._active_f, exc_info)) self.write("F", "Red") self._active_file_error = True def doctest_fail(self, name, error_msg): # the first line contains "******", remove it: error_msg = "\n".join(error_msg.split("\n")[1:]) self._failed_doctest.append((name, error_msg)) self.write("F", "Red") self._active_file_error = True def test_pass(self, char="."): self._passed += 1 if self._verbose: self.write("ok", "Green") else: self.write(char, "Green") def test_skip(self, v=None): char = "s" self._skipped += 1 if v is not None: message = str(v) if message == "KeyboardInterrupt": char = "K" elif message == "Timeout": char = "T" elif message == "Slow": char = "w" self.write(char, "Blue") if self._verbose: self.write(" - ", "Blue") if v is not None: self.write(message, "Blue") def test_exception(self, exc_info): self._exceptions.append((self._active_file, self._active_f, exc_info)) self.write("E", "Red") self._active_file_error = True def import_error(self, filename, exc_info): self._exceptions.append((filename, None, exc_info)) rel_name = filename[len(self._root_dir) + 1:] self.write(rel_name) self.write("[?] Failed to import", "Red") self.write(" ") self.write("[FAIL]", "Red", align="right") self.write("\n") sympy_dir = get_sympy_dir()
36.925387
118
0.574217
73cd2839e8980df0fbeacb2a5c17ef09c920938f
1,716
py
Python
detectron/utils/coordinator.py
singhnarotam1997/Detectron
ecc6b25fc8869486126f1384b4e6e042a718bd5b
[ "Apache-2.0" ]
null
null
null
detectron/utils/coordinator.py
singhnarotam1997/Detectron
ecc6b25fc8869486126f1384b4e6e042a718bd5b
[ "Apache-2.0" ]
null
null
null
detectron/utils/coordinator.py
singhnarotam1997/Detectron
ecc6b25fc8869486126f1384b4e6e042a718bd5b
[ "Apache-2.0" ]
null
null
null
# Copyright (c) Facebook, Inc. and its affiliates. # All rights reserved. # # This source code is licensed under the license found in the # LICENSE file in the root directory of this source tree. ############################################################################## """Coordinated access to a shared multithreading/processing queue.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals import contextlib import logging import queue as Queue import threading import traceback log = logging.getLogger(__name__) class Coordinator(object): def __init__(self): self._event = threading.Event() def request_stop(self): log.debug('Coordinator stopping') self._event.set() def should_stop(self): return self._event.is_set() def wait_for_stop(self): return self._event.wait() @contextlib.contextmanager def stop_on_exception(self): try: yield except Exception: if not self.should_stop(): traceback.print_exc() self.request_stop() def coordinated_get(coordinator, queue): while not coordinator.should_stop(): try: return queue.get(block=True, timeout=1.0) except Queue.Empty: continue raise Exception('Coordinator stopped during get()') def coordinated_put(coordinator, queue, element): while not coordinator.should_stop(): try: queue.put(element, block=True, timeout=1.0) return except Queue.Full: continue raise Exception('Coordinator stopped during put()')
26
78
0.644522
73cd365f99f40a27fa9ad1f4eff2257ef4db734e
23,781
py
Python
justap_server_sdk_python/models/v1_extra_alipay_page.py
justapnet/justap-server-sdk-python
2d3110c6447833334fa2f7e93ffa63e06913df17
[ "Apache-2.0" ]
null
null
null
justap_server_sdk_python/models/v1_extra_alipay_page.py
justapnet/justap-server-sdk-python
2d3110c6447833334fa2f7e93ffa63e06913df17
[ "Apache-2.0" ]
null
null
null
justap_server_sdk_python/models/v1_extra_alipay_page.py
justapnet/justap-server-sdk-python
2d3110c6447833334fa2f7e93ffa63e06913df17
[ "Apache-2.0" ]
null
null
null
# coding: utf-8 """ Justap API 欢迎阅读 Justap Api 文档 Justap 是为移动端应用和PC端应用打造的下一代聚合支付SAAS服务平台,通过一个 SDK 即可快速的支持各种形式的应用,并且一次接口完成多个不同支付渠道的接入。平台除了支持服务商子商户模式,同时还对商家自有商户(即自己前往微信、支付宝等机构开户)提供了完整的支持。 感谢您的支持,我们将不断探索,为您提供更优质的服务!如需技术支持可前往商户中心提交工单,支持工程师会尽快与您取得联系! # 文档说明 采用 REST 风格设计。所有接口请求地址都是可预期的以及面向资源的。使用规范的 HTTP 响应代码来表示请求结果的正确或错误信息。使用 HTTP 内置的特性,如 HTTP Authentication 和 HTTP 请求方法让接口易于理解。 ## HTTP 状态码 HTTP 状态码可以用于表明服务的状态。服务器返回的 HTTP 状态码遵循 [RFC 7231](http://tools.ietf.org/html/rfc7231#section-6) 和 [IANA Status Code Registry](http://www.iana.org/assignments/http-status-codes/http-status-codes.xhtml) 标准。 ## 认证 在调用 API 时,必须提供 API Key 作为每个请求的身份验证。你可以在管理平台内管理你的 API Key。API Key 是商户在系统中的身份标识,请安全存储,确保其不要被泄露。如需获取或更新 API Key ,也可以在商户中心内进行操作。 Api Key 在使用自定义的 HTTP Header 进行传递。 ``` X-Justap-Api-Key ``` API Key 分为 live 和 test 两种模式。分别对应真实交易环境和模拟测试交易环境并且可以实时切换。 测试模式下的 API Key 会模拟交易等请求,但是不会产生任何真实交易行为和费用,便于调试和接入。 **⚠️ 注意**:在使用 live 模式前,需要先前往 `商户中心 -> 应用设置 -> 开发参数` 开启 live 模式。 <SecurityDefinitions /> ## 请求类型 所有的 API 请求只支持 HTTPS 方式调用。 ## 路由参数 路由参数是指出现在 URL 路径中的可变变量。在本文档中,使用 `{}` 包裹的部分。 例如: `{charge_id}`,在实际使用是,需要将 `{charge_id}` 替换为实际值 `charge_8a8sdf888888` ## MIME Type MIME 类型用于指示服务器返回的数据格式。服务器目前默认采用 `application/json`。 例如: ``` application/json ``` ## 错误 服务器使用 HTTP 状态码 (status code) 来表明一个 API 请求的成功或失败状态。返回 HTTP 2XX 表明 API 请求成功。返回 HTTP 4XX 表明在请求 API 时提供了错误信息,例如参数缺失、参数错误、支付渠道错误等。返回 HTTP 5XX 表明 API 请求时,服务器发生了错误。 在返回错误的状态码时,回同时返回一些错误信息提示出错原因。 具体的错误码我们正在整理当中。 ## 分页 所有的 Justap 资源都可以被 list API 方法支持,例如分页 charges 和 refunds。这些 list API 方法拥有相同的数据结构。Justap 是基于 cursor 的分页机制,使用参数 starting_after 来决定列表从何处开始,使用参数 ending_before 来决定列表从何处结束。 ## 参数说明 请求参数中包含的以下字段释义请参考: - REQUIRED: 必填参数 - OPTIONAL: 可选参数,可以在请求当前接口时按需传入 - CONDITIONAL: 在某些条件下必传 - RESPONSE-ONLY: 标示该参数仅在接口返回参数中出现,调用 API 时无需传入 # 如何保证幂等性 如果发生请求超时或服务器内部错误,客户端可能会尝试重发请求。您可以在请求中设置 ClientToken 参数避免多次重试带来重复操作的问题。 ## 什么是幂等性 在数学计算或者计算机科学中,幂等性(idempotence)是指相同操作或资源在一次或多次请求中具有同样效果的作用。幂等性是在分布式系统设计中具有十分重要的地位。 ## 保证幂等性 通常情况下,客户端只需要在500(InternalErrorInternalError)或503(ServiceUnavailable)错误,或者无法获取响应结果时重试。充实时您可以从客户端生成一个参数值不超过64个的ASCII字符,并将值赋予 ClientToken,保证重试请求的幂等性。 ## ClientToken 详解 ClientToken参数的详细信息如下所示。 - ClientToken 是一个由客户端生成的唯一的、大小写敏感、不超过64个ASCII字符的字符串。例如,`ClientToken=123e4567-e89b-12d3-a456-426655440000`。 - 如果您提供了一个已经使用过的 ClientToken,但其他请求参数**有变化**,则服务器会返回 IdempotentParameterMismatch 的错误代码。 - 如果您提供了一个已经使用过的 ClientToken,且其他请求参数**不变**,则服务器会尝试返回 ClientToken 对应的记录。 ## API列表 以下为部分包含了 ClientToken 参数的API,供您参考。具体哪些API支持 ClientToken 参数请以各 API 文档为准,此处不一一列举。 - [申请退款接口](https://www.justap.cn/docs#operation/TradeService_Refunds) # 签名 为保证安全,JUSTAP 所有接口均需要对请求进行签名。服务器收到请求后进行签名的验证。如果签名验证不通过,将会拒绝处理请求,并返回 401 Unauthorized。 签名算法: ``` base64Encode(hamc-sha256(md5(请求 body + 请求时间戳 + 一次性随机字符串) + 一次性随机字符串)) ``` ## 准备 首先需要在 Justap 创建一个应用,商户需要生成一对 RSA 密钥对,并将公钥配置到 `商户中心 -> 开发配置`。 RSA 可以使用支付宝提供的 [密钥生成工具](https://opendocs.alipay.com/common/02kipl) 来生成。 商户在使用时,可以按照下述步骤生成请求的签名。 ## 算法描述: - 在请求发送前,取完整的**请求 body** - 生成一个随机的32位字符串,得到 **一次性随机字符串** - 获取当前时间的时间戳,得到 **请求时间戳** - 在请求字符串后面拼接上 **请求时间戳** 和 **一次性随机字符串**,得到 **待 Hash 字符串** - 对 **待 Hash 字符串** 计算 md5,得到 **待签名字符串** - **待签名字符串** 后面拼接上 一次性随机字符串,得到完整的 **待签名字符串** - 使用商户 RSA 私钥,对 **待签名字符串** 计算签名,并对 结果 进行 base64 编码,即可得到 **签名** ## 设置HTTP头 Justap 要求请求通过 自定义头部 来传递签名。具体定义如下: ``` X-Justap-Signature: 签名 X-Justap-Request-Time: 请求时间戳 X-Justap-Nonce: 一次性随机字符串 X-Justap-Body-Hash: 待签名字符串 ``` 具体的签名算法实现,可参考我们提供的各语言 SDK。 # WebHooks # noqa: E501 OpenAPI spec version: 1.0 Contact: support@justap.net Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six from justap_server_sdk_python.configuration import Configuration class V1ExtraAlipayPage(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'agreement_sign_params': 'V1ExtraAlipayAgreementSignParams', 'business_params': 'int', 'disable_pay_channels': 'str', 'enable_pay_channels': 'str', 'ext_user_info': 'V1ExtraAlipayExtUserInfo', 'extend_params': 'V1ExtraAlipayExtendParams', 'goods_detail': 'list[V1ExtraAlipayGoodsDetail]', 'goods_type': 'str', 'integration_type': 'int', 'invoice_info': 'V1ExtraAlipayInvoiceInfo', 'merchant_trade_id': 'str', 'pay_url': 'str', 'promo_params': 'str', 'qr_pay_mode': 'str', 'qrcode_width': 'int', 'request_from_url': 'int', 'royalty_info': 'V1ExtraAlipayRoyaltyInfo', 'seller_id': 'str', 'settle_info': 'V1ExtraAlipaySettleInfo', 'store_id': 'str', 'sub_merchant': 'V1ExtraAlipaySubMerchant', 'time_expire': 'int' } attribute_map = { 'agreement_sign_params': 'agreement_sign_params', 'business_params': 'business_params', 'disable_pay_channels': 'disable_pay_channels', 'enable_pay_channels': 'enable_pay_channels', 'ext_user_info': 'ext_user_info', 'extend_params': 'extend_params', 'goods_detail': 'goods_detail', 'goods_type': 'goods_type', 'integration_type': 'integration_type', 'invoice_info': 'invoice_info', 'merchant_trade_id': 'merchant_trade_id', 'pay_url': 'pay_url', 'promo_params': 'promo_params', 'qr_pay_mode': 'qr_pay_mode', 'qrcode_width': 'qrcode_width', 'request_from_url': 'request_from_url', 'royalty_info': 'royalty_info', 'seller_id': 'seller_id', 'settle_info': 'settle_info', 'store_id': 'store_id', 'sub_merchant': 'sub_merchant', 'time_expire': 'time_expire' } def __init__(self, agreement_sign_params=None, business_params=None, disable_pay_channels=None, enable_pay_channels=None, ext_user_info=None, extend_params=None, goods_detail=None, goods_type=None, integration_type=None, invoice_info=None, merchant_trade_id=None, pay_url=None, promo_params=None, qr_pay_mode=None, qrcode_width=None, request_from_url=None, royalty_info=None, seller_id=None, settle_info=None, store_id=None, sub_merchant=None, time_expire=None, _configuration=None): # noqa: E501 """V1ExtraAlipayPage - a model defined in Swagger""" # noqa: E501 if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._agreement_sign_params = None self._business_params = None self._disable_pay_channels = None self._enable_pay_channels = None self._ext_user_info = None self._extend_params = None self._goods_detail = None self._goods_type = None self._integration_type = None self._invoice_info = None self._merchant_trade_id = None self._pay_url = None self._promo_params = None self._qr_pay_mode = None self._qrcode_width = None self._request_from_url = None self._royalty_info = None self._seller_id = None self._settle_info = None self._store_id = None self._sub_merchant = None self._time_expire = None self.discriminator = None if agreement_sign_params is not None: self.agreement_sign_params = agreement_sign_params if business_params is not None: self.business_params = business_params if disable_pay_channels is not None: self.disable_pay_channels = disable_pay_channels if enable_pay_channels is not None: self.enable_pay_channels = enable_pay_channels if ext_user_info is not None: self.ext_user_info = ext_user_info if extend_params is not None: self.extend_params = extend_params if goods_detail is not None: self.goods_detail = goods_detail if goods_type is not None: self.goods_type = goods_type if integration_type is not None: self.integration_type = integration_type if invoice_info is not None: self.invoice_info = invoice_info if merchant_trade_id is not None: self.merchant_trade_id = merchant_trade_id if pay_url is not None: self.pay_url = pay_url if promo_params is not None: self.promo_params = promo_params if qr_pay_mode is not None: self.qr_pay_mode = qr_pay_mode if qrcode_width is not None: self.qrcode_width = qrcode_width if request_from_url is not None: self.request_from_url = request_from_url if royalty_info is not None: self.royalty_info = royalty_info if seller_id is not None: self.seller_id = seller_id if settle_info is not None: self.settle_info = settle_info if store_id is not None: self.store_id = store_id if sub_merchant is not None: self.sub_merchant = sub_merchant if time_expire is not None: self.time_expire = time_expire @property def agreement_sign_params(self): """Gets the agreement_sign_params of this V1ExtraAlipayPage. # noqa: E501 :return: The agreement_sign_params of this V1ExtraAlipayPage. # noqa: E501 :rtype: V1ExtraAlipayAgreementSignParams """ return self._agreement_sign_params @agreement_sign_params.setter def agreement_sign_params(self, agreement_sign_params): """Sets the agreement_sign_params of this V1ExtraAlipayPage. :param agreement_sign_params: The agreement_sign_params of this V1ExtraAlipayPage. # noqa: E501 :type: V1ExtraAlipayAgreementSignParams """ self._agreement_sign_params = agreement_sign_params @property def business_params(self): """Gets the business_params of this V1ExtraAlipayPage. # noqa: E501 :return: The business_params of this V1ExtraAlipayPage. # noqa: E501 :rtype: int """ return self._business_params @business_params.setter def business_params(self, business_params): """Sets the business_params of this V1ExtraAlipayPage. :param business_params: The business_params of this V1ExtraAlipayPage. # noqa: E501 :type: int """ self._business_params = business_params @property def disable_pay_channels(self): """Gets the disable_pay_channels of this V1ExtraAlipayPage. # noqa: E501 :return: The disable_pay_channels of this V1ExtraAlipayPage. # noqa: E501 :rtype: str """ return self._disable_pay_channels @disable_pay_channels.setter def disable_pay_channels(self, disable_pay_channels): """Sets the disable_pay_channels of this V1ExtraAlipayPage. :param disable_pay_channels: The disable_pay_channels of this V1ExtraAlipayPage. # noqa: E501 :type: str """ self._disable_pay_channels = disable_pay_channels @property def enable_pay_channels(self): """Gets the enable_pay_channels of this V1ExtraAlipayPage. # noqa: E501 :return: The enable_pay_channels of this V1ExtraAlipayPage. # noqa: E501 :rtype: str """ return self._enable_pay_channels @enable_pay_channels.setter def enable_pay_channels(self, enable_pay_channels): """Sets the enable_pay_channels of this V1ExtraAlipayPage. :param enable_pay_channels: The enable_pay_channels of this V1ExtraAlipayPage. # noqa: E501 :type: str """ self._enable_pay_channels = enable_pay_channels @property def ext_user_info(self): """Gets the ext_user_info of this V1ExtraAlipayPage. # noqa: E501 :return: The ext_user_info of this V1ExtraAlipayPage. # noqa: E501 :rtype: V1ExtraAlipayExtUserInfo """ return self._ext_user_info @ext_user_info.setter def ext_user_info(self, ext_user_info): """Sets the ext_user_info of this V1ExtraAlipayPage. :param ext_user_info: The ext_user_info of this V1ExtraAlipayPage. # noqa: E501 :type: V1ExtraAlipayExtUserInfo """ self._ext_user_info = ext_user_info @property def extend_params(self): """Gets the extend_params of this V1ExtraAlipayPage. # noqa: E501 :return: The extend_params of this V1ExtraAlipayPage. # noqa: E501 :rtype: V1ExtraAlipayExtendParams """ return self._extend_params @extend_params.setter def extend_params(self, extend_params): """Sets the extend_params of this V1ExtraAlipayPage. :param extend_params: The extend_params of this V1ExtraAlipayPage. # noqa: E501 :type: V1ExtraAlipayExtendParams """ self._extend_params = extend_params @property def goods_detail(self): """Gets the goods_detail of this V1ExtraAlipayPage. # noqa: E501 :return: The goods_detail of this V1ExtraAlipayPage. # noqa: E501 :rtype: list[V1ExtraAlipayGoodsDetail] """ return self._goods_detail @goods_detail.setter def goods_detail(self, goods_detail): """Sets the goods_detail of this V1ExtraAlipayPage. :param goods_detail: The goods_detail of this V1ExtraAlipayPage. # noqa: E501 :type: list[V1ExtraAlipayGoodsDetail] """ self._goods_detail = goods_detail @property def goods_type(self): """Gets the goods_type of this V1ExtraAlipayPage. # noqa: E501 :return: The goods_type of this V1ExtraAlipayPage. # noqa: E501 :rtype: str """ return self._goods_type @goods_type.setter def goods_type(self, goods_type): """Sets the goods_type of this V1ExtraAlipayPage. :param goods_type: The goods_type of this V1ExtraAlipayPage. # noqa: E501 :type: str """ self._goods_type = goods_type @property def integration_type(self): """Gets the integration_type of this V1ExtraAlipayPage. # noqa: E501 :return: The integration_type of this V1ExtraAlipayPage. # noqa: E501 :rtype: int """ return self._integration_type @integration_type.setter def integration_type(self, integration_type): """Sets the integration_type of this V1ExtraAlipayPage. :param integration_type: The integration_type of this V1ExtraAlipayPage. # noqa: E501 :type: int """ self._integration_type = integration_type @property def invoice_info(self): """Gets the invoice_info of this V1ExtraAlipayPage. # noqa: E501 :return: The invoice_info of this V1ExtraAlipayPage. # noqa: E501 :rtype: V1ExtraAlipayInvoiceInfo """ return self._invoice_info @invoice_info.setter def invoice_info(self, invoice_info): """Sets the invoice_info of this V1ExtraAlipayPage. :param invoice_info: The invoice_info of this V1ExtraAlipayPage. # noqa: E501 :type: V1ExtraAlipayInvoiceInfo """ self._invoice_info = invoice_info @property def merchant_trade_id(self): """Gets the merchant_trade_id of this V1ExtraAlipayPage. # noqa: E501 :return: The merchant_trade_id of this V1ExtraAlipayPage. # noqa: E501 :rtype: str """ return self._merchant_trade_id @merchant_trade_id.setter def merchant_trade_id(self, merchant_trade_id): """Sets the merchant_trade_id of this V1ExtraAlipayPage. :param merchant_trade_id: The merchant_trade_id of this V1ExtraAlipayPage. # noqa: E501 :type: str """ self._merchant_trade_id = merchant_trade_id @property def pay_url(self): """Gets the pay_url of this V1ExtraAlipayPage. # noqa: E501 :return: The pay_url of this V1ExtraAlipayPage. # noqa: E501 :rtype: str """ return self._pay_url @pay_url.setter def pay_url(self, pay_url): """Sets the pay_url of this V1ExtraAlipayPage. :param pay_url: The pay_url of this V1ExtraAlipayPage. # noqa: E501 :type: str """ self._pay_url = pay_url @property def promo_params(self): """Gets the promo_params of this V1ExtraAlipayPage. # noqa: E501 :return: The promo_params of this V1ExtraAlipayPage. # noqa: E501 :rtype: str """ return self._promo_params @promo_params.setter def promo_params(self, promo_params): """Sets the promo_params of this V1ExtraAlipayPage. :param promo_params: The promo_params of this V1ExtraAlipayPage. # noqa: E501 :type: str """ self._promo_params = promo_params @property def qr_pay_mode(self): """Gets the qr_pay_mode of this V1ExtraAlipayPage. # noqa: E501 :return: The qr_pay_mode of this V1ExtraAlipayPage. # noqa: E501 :rtype: str """ return self._qr_pay_mode @qr_pay_mode.setter def qr_pay_mode(self, qr_pay_mode): """Sets the qr_pay_mode of this V1ExtraAlipayPage. :param qr_pay_mode: The qr_pay_mode of this V1ExtraAlipayPage. # noqa: E501 :type: str """ self._qr_pay_mode = qr_pay_mode @property def qrcode_width(self): """Gets the qrcode_width of this V1ExtraAlipayPage. # noqa: E501 :return: The qrcode_width of this V1ExtraAlipayPage. # noqa: E501 :rtype: int """ return self._qrcode_width @qrcode_width.setter def qrcode_width(self, qrcode_width): """Sets the qrcode_width of this V1ExtraAlipayPage. :param qrcode_width: The qrcode_width of this V1ExtraAlipayPage. # noqa: E501 :type: int """ self._qrcode_width = qrcode_width @property def request_from_url(self): """Gets the request_from_url of this V1ExtraAlipayPage. # noqa: E501 :return: The request_from_url of this V1ExtraAlipayPage. # noqa: E501 :rtype: int """ return self._request_from_url @request_from_url.setter def request_from_url(self, request_from_url): """Sets the request_from_url of this V1ExtraAlipayPage. :param request_from_url: The request_from_url of this V1ExtraAlipayPage. # noqa: E501 :type: int """ self._request_from_url = request_from_url @property def royalty_info(self): """Gets the royalty_info of this V1ExtraAlipayPage. # noqa: E501 :return: The royalty_info of this V1ExtraAlipayPage. # noqa: E501 :rtype: V1ExtraAlipayRoyaltyInfo """ return self._royalty_info @royalty_info.setter def royalty_info(self, royalty_info): """Sets the royalty_info of this V1ExtraAlipayPage. :param royalty_info: The royalty_info of this V1ExtraAlipayPage. # noqa: E501 :type: V1ExtraAlipayRoyaltyInfo """ self._royalty_info = royalty_info @property def seller_id(self): """Gets the seller_id of this V1ExtraAlipayPage. # noqa: E501 :return: The seller_id of this V1ExtraAlipayPage. # noqa: E501 :rtype: str """ return self._seller_id @seller_id.setter def seller_id(self, seller_id): """Sets the seller_id of this V1ExtraAlipayPage. :param seller_id: The seller_id of this V1ExtraAlipayPage. # noqa: E501 :type: str """ self._seller_id = seller_id @property def settle_info(self): """Gets the settle_info of this V1ExtraAlipayPage. # noqa: E501 :return: The settle_info of this V1ExtraAlipayPage. # noqa: E501 :rtype: V1ExtraAlipaySettleInfo """ return self._settle_info @settle_info.setter def settle_info(self, settle_info): """Sets the settle_info of this V1ExtraAlipayPage. :param settle_info: The settle_info of this V1ExtraAlipayPage. # noqa: E501 :type: V1ExtraAlipaySettleInfo """ self._settle_info = settle_info @property def store_id(self): """Gets the store_id of this V1ExtraAlipayPage. # noqa: E501 :return: The store_id of this V1ExtraAlipayPage. # noqa: E501 :rtype: str """ return self._store_id @store_id.setter def store_id(self, store_id): """Sets the store_id of this V1ExtraAlipayPage. :param store_id: The store_id of this V1ExtraAlipayPage. # noqa: E501 :type: str """ self._store_id = store_id @property def sub_merchant(self): """Gets the sub_merchant of this V1ExtraAlipayPage. # noqa: E501 :return: The sub_merchant of this V1ExtraAlipayPage. # noqa: E501 :rtype: V1ExtraAlipaySubMerchant """ return self._sub_merchant @sub_merchant.setter def sub_merchant(self, sub_merchant): """Sets the sub_merchant of this V1ExtraAlipayPage. :param sub_merchant: The sub_merchant of this V1ExtraAlipayPage. # noqa: E501 :type: V1ExtraAlipaySubMerchant """ self._sub_merchant = sub_merchant @property def time_expire(self): """Gets the time_expire of this V1ExtraAlipayPage. # noqa: E501 :return: The time_expire of this V1ExtraAlipayPage. # noqa: E501 :rtype: int """ return self._time_expire @time_expire.setter def time_expire(self, time_expire): """Sets the time_expire of this V1ExtraAlipayPage. :param time_expire: The time_expire of this V1ExtraAlipayPage. # noqa: E501 :type: int """ self._time_expire = time_expire def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(V1ExtraAlipayPage, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, V1ExtraAlipayPage): return False return self.to_dict() == other.to_dict() def __ne__(self, other): """Returns true if both objects are not equal""" if not isinstance(other, V1ExtraAlipayPage): return True return self.to_dict() != other.to_dict()
35.49403
3,404
0.660233
73cd3984a69a0edde2fc050d3dc2629295364576
24,234
py
Python
recognition/partial_fc/mxnet/symbol/symbol_utils.py
dwhite54/insightface
ea172e4921c3960c0684404afff6d0d862447eae
[ "MIT" ]
12,377
2017-12-04T02:46:57.000Z
2022-03-31T16:48:31.000Z
recognition/partial_fc/mxnet/symbol/symbol_utils.py
dwhite54/insightface
ea172e4921c3960c0684404afff6d0d862447eae
[ "MIT" ]
1,851
2017-12-05T05:41:23.000Z
2022-03-30T13:06:22.000Z
recognition/partial_fc/mxnet/symbol/symbol_utils.py
dwhite54/insightface
ea172e4921c3960c0684404afff6d0d862447eae
[ "MIT" ]
4,198
2017-12-05T02:57:19.000Z
2022-03-30T10:29:37.000Z
import sys import os import mxnet as mx sys.path.append(os.path.join(os.path.dirname(__file__), '../..')) from default import config def Conv(**kwargs): # name = kwargs.get('name') # _weight = mx.symbol.Variable(name+'_weight') # _bias = mx.symbol.Variable(name+'_bias', lr_mult=2.0, wd_mult=0.0) # body = mx.sym.Convolution(weight = _weight, bias = _bias, **kwargs) body = mx.sym.Convolution(**kwargs) return body def Act(data, act_type, name): # ignore param act_type, set it in this function if act_type == 'prelu': body = mx.sym.LeakyReLU(data=data, act_type='prelu', name=name) else: body = mx.sym.Activation(data=data, act_type=act_type, name=name) return body bn_mom = config.bn_mom def Linear(data, num_filter=1, kernel=(1, 1), stride=(1, 1), pad=(0, 0), num_group=1, name=None, suffix=''): conv = mx.sym.Convolution(data=data, num_filter=num_filter, kernel=kernel, num_group=num_group, stride=stride, pad=pad, no_bias=True, name='%s%s_conv2d' % (name, suffix)) bn = mx.sym.BatchNorm(data=conv, name='%s%s_batchnorm' % (name, suffix), fix_gamma=False, momentum=bn_mom) return bn def get_fc1(last_conv, num_classes, fc_type, input_channel=512): body = last_conv if fc_type == 'Z': body = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, name='bn1') body = mx.symbol.Dropout(data=body, p=0.4) fc1 = body elif fc_type == 'E': body = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, name='bn1') body = mx.symbol.Dropout(data=body, p=0.4) fc1 = mx.sym.FullyConnected(data=body, num_hidden=num_classes, name='pre_fc1') fc1 = mx.sym.BatchNorm(data=fc1, fix_gamma=True, eps=2e-5, momentum=bn_mom, name='fc1') elif fc_type == 'FC': body = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, name='bn1') fc1 = mx.sym.FullyConnected(data=body, num_hidden=num_classes, name='pre_fc1') fc1 = mx.sym.BatchNorm(data=fc1, fix_gamma=True, eps=2e-5, momentum=bn_mom, name='fc1') elif fc_type == 'SFC': body = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, name='bn1') body = Conv(data=body, num_filter=input_channel, kernel=(3, 3), stride=(2, 2), pad=(1, 1), no_bias=True, name="convf", num_group=input_channel) body = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, name='bnf') body = Act(data=body, act_type=config.net_act, name='reluf') body = Conv(data=body, num_filter=input_channel, kernel=(1, 1), pad=(0, 0), stride=(1, 1), name="convf2") body = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, name='bnf2') body = Act(data=body, act_type=config.net_act, name='reluf2') fc1 = mx.sym.FullyConnected(data=body, num_hidden=num_classes, name='pre_fc1') fc1 = mx.sym.BatchNorm(data=fc1, fix_gamma=True, eps=2e-5, momentum=bn_mom, name='fc1') elif fc_type == 'GAP': bn1 = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, name='bn1') relu1 = Act(data=bn1, act_type=config.net_act, name='relu1') # Although kernel is not used here when global_pool=True, we should put one pool1 = mx.sym.Pooling(data=relu1, global_pool=True, kernel=(7, 7), pool_type='avg', name='pool1') flat = mx.sym.Flatten(data=pool1) fc1 = mx.sym.FullyConnected(data=flat, num_hidden=num_classes, name='pre_fc1') fc1 = mx.sym.BatchNorm(data=fc1, fix_gamma=True, eps=2e-5, momentum=bn_mom, name='fc1') elif fc_type == 'GNAP': # mobilefacenet++ filters_in = 512 # param in mobilefacenet if num_classes > filters_in: body = mx.sym.Convolution(data=last_conv, num_filter=num_classes, kernel=(1, 1), stride=(1, 1), pad=(0, 0), no_bias=True, name='convx') body = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=0.9, name='convx_bn') body = Act(data=body, act_type=config.net_act, name='convx_relu') filters_in = num_classes else: body = last_conv body = mx.sym.BatchNorm(data=body, fix_gamma=True, eps=2e-5, momentum=0.9, name='bn6f') spatial_norm = body * body spatial_norm = mx.sym.sum(data=spatial_norm, axis=1, keepdims=True) spatial_sqrt = mx.sym.sqrt(spatial_norm) # spatial_mean=mx.sym.mean(spatial_sqrt, axis=(1,2,3), keepdims=True) spatial_mean = mx.sym.mean(spatial_sqrt) spatial_div_inverse = mx.sym.broadcast_div(spatial_mean, spatial_sqrt) spatial_attention_inverse = mx.symbol.tile(spatial_div_inverse, reps=(1, filters_in, 1, 1)) body = body * spatial_attention_inverse # body = mx.sym.broadcast_mul(body, spatial_div_inverse) fc1 = mx.sym.Pooling(body, kernel=(7, 7), global_pool=True, pool_type='avg') if num_classes < filters_in: fc1 = mx.sym.BatchNorm(data=fc1, fix_gamma=True, eps=2e-5, momentum=0.9, name='bn6w') fc1 = mx.sym.FullyConnected(data=fc1, num_hidden=num_classes, name='pre_fc1') else: fc1 = mx.sym.Flatten(data=fc1) fc1 = mx.sym.BatchNorm(data=fc1, fix_gamma=True, eps=2e-5, momentum=0.9, name='fc1') elif fc_type == "GDC": # mobilefacenet_v1 conv_6_dw = Linear(last_conv, num_filter=input_channel, num_group=input_channel, kernel=(7, 7), pad=(0, 0), stride=(1, 1), name="conv_6dw7_7") conv_6_f = mx.sym.FullyConnected(data=conv_6_dw, num_hidden=num_classes, name='pre_fc1') fc1 = mx.sym.BatchNorm(data=conv_6_f, fix_gamma=True, eps=2e-5, momentum=bn_mom, name='fc1') elif fc_type == 'F': body = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, name='bn1') body = mx.symbol.Dropout(data=body, p=0.4) fc1 = mx.sym.FullyConnected(data=body, num_hidden=num_classes, name='fc1') elif fc_type == 'G': body = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, name='bn1') fc1 = mx.sym.FullyConnected(data=body, num_hidden=num_classes, name='fc1') elif fc_type == 'H': fc1 = mx.sym.FullyConnected(data=body, num_hidden=num_classes, name='fc1') elif fc_type == 'I': body = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, name='bn1') fc1 = mx.sym.FullyConnected(data=body, num_hidden=num_classes, name='pre_fc1') fc1 = mx.sym.BatchNorm(data=fc1, fix_gamma=True, eps=2e-5, momentum=bn_mom, name='fc1') elif fc_type == 'J': fc1 = mx.sym.FullyConnected(data=body, num_hidden=num_classes, name='pre_fc1') fc1 = mx.sym.BatchNorm(data=fc1, fix_gamma=True, eps=2e-5, momentum=bn_mom, name='fc1') return fc1 def residual_unit_v3(data, num_filter, stride, dim_match, name, **kwargs): """Return ResNet Unit symbol for building ResNet Parameters ---------- data : str Input data num_filter : int Number of output channels bnf : int Bottle neck channels factor with regard to num_filter stride : tuple Stride used in convolution dim_match : Boolean True means channel number between input and output is the same, otherwise means differ name : str Base name of the operators workspace : int Workspace used in convolution operator """ bn_mom = kwargs.get('bn_mom', 0.9) workspace = kwargs.get('workspace', 256) memonger = kwargs.get('memonger', False) # print('in unit3') bn1 = mx.sym.BatchNorm(data=data, fix_gamma=False, eps=2e-5, momentum=bn_mom, name=name + '_bn1') conv1 = Conv(data=bn1, num_filter=num_filter, kernel=(3, 3), stride=(1, 1), pad=(1, 1), no_bias=True, workspace=workspace, name=name + '_conv1') bn2 = mx.sym.BatchNorm(data=conv1, fix_gamma=False, eps=2e-5, momentum=bn_mom, name=name + '_bn2') act1 = Act(data=bn2, act_type=config.net_act, name=name + '_relu1') conv2 = Conv(data=act1, num_filter=num_filter, kernel=(3, 3), stride=stride, pad=(1, 1), no_bias=True, workspace=workspace, name=name + '_conv2') bn3 = mx.sym.BatchNorm(data=conv2, fix_gamma=False, eps=2e-5, momentum=bn_mom, name=name + '_bn3') if dim_match: shortcut = data else: conv1sc = Conv(data=data, num_filter=num_filter, kernel=(1, 1), stride=stride, no_bias=True, workspace=workspace, name=name + '_conv1sc') shortcut = mx.sym.BatchNorm(data=conv1sc, fix_gamma=False, momentum=bn_mom, eps=2e-5, name=name + '_sc') if memonger: shortcut._set_attr(mirror_stage='True') return bn3 + shortcut def residual_unit_v1l(data, num_filter, stride, dim_match, name, bottle_neck): """Return ResNet Unit symbol for building ResNet Parameters ---------- data : str Input data num_filter : int Number of output channels bnf : int Bottle neck channels factor with regard to num_filter stride : tuple Stride used in convolution dim_match : Boolean True means channel number between input and output is the same, otherwise means differ name : str Base name of the operators workspace : int Workspace used in convolution operator """ workspace = config.workspace bn_mom = config.bn_mom memonger = False use_se = config.net_se act_type = config.net_act # print('in unit1') if bottle_neck: conv1 = Conv(data=data, num_filter=int(num_filter * 0.25), kernel=(1, 1), stride=(1, 1), pad=(0, 0), no_bias=True, workspace=workspace, name=name + '_conv1') bn1 = mx.sym.BatchNorm(data=conv1, fix_gamma=False, eps=2e-5, momentum=bn_mom, name=name + '_bn1') act1 = Act(data=bn1, act_type=act_type, name=name + '_relu1') conv2 = Conv(data=act1, num_filter=int(num_filter * 0.25), kernel=(3, 3), stride=(1, 1), pad=(1, 1), no_bias=True, workspace=workspace, name=name + '_conv2') bn2 = mx.sym.BatchNorm(data=conv2, fix_gamma=False, eps=2e-5, momentum=bn_mom, name=name + '_bn2') act2 = Act(data=bn2, act_type=act_type, name=name + '_relu2') conv3 = Conv(data=act2, num_filter=num_filter, kernel=(1, 1), stride=stride, pad=(0, 0), no_bias=True, workspace=workspace, name=name + '_conv3') bn3 = mx.sym.BatchNorm(data=conv3, fix_gamma=False, eps=2e-5, momentum=bn_mom, name=name + '_bn3') if use_se: # se begin body = mx.sym.Pooling(data=bn3, global_pool=True, kernel=(7, 7), pool_type='avg', name=name + '_se_pool1') body = Conv(data=body, num_filter=num_filter // 16, kernel=(1, 1), stride=(1, 1), pad=(0, 0), name=name + "_se_conv1", workspace=workspace) body = Act(data=body, act_type=act_type, name=name + '_se_relu1') body = Conv(data=body, num_filter=num_filter, kernel=(1, 1), stride=(1, 1), pad=(0, 0), name=name + "_se_conv2", workspace=workspace) body = mx.symbol.Activation(data=body, act_type='sigmoid', name=name + "_se_sigmoid") bn3 = mx.symbol.broadcast_mul(bn3, body) # se end if dim_match: shortcut = data else: conv1sc = Conv(data=data, num_filter=num_filter, kernel=(1, 1), stride=stride, no_bias=True, workspace=workspace, name=name + '_conv1sc') shortcut = mx.sym.BatchNorm(data=conv1sc, fix_gamma=False, eps=2e-5, momentum=bn_mom, name=name + '_sc') if memonger: shortcut._set_attr(mirror_stage='True') return Act(data=bn3 + shortcut, act_type=act_type, name=name + '_relu3') else: conv1 = Conv(data=data, num_filter=num_filter, kernel=(3, 3), stride=(1, 1), pad=(1, 1), no_bias=True, workspace=workspace, name=name + '_conv1') bn1 = mx.sym.BatchNorm(data=conv1, fix_gamma=False, momentum=bn_mom, eps=2e-5, name=name + '_bn1') act1 = Act(data=bn1, act_type=act_type, name=name + '_relu1') conv2 = Conv(data=act1, num_filter=num_filter, kernel=(3, 3), stride=stride, pad=(1, 1), no_bias=True, workspace=workspace, name=name + '_conv2') bn2 = mx.sym.BatchNorm(data=conv2, fix_gamma=False, momentum=bn_mom, eps=2e-5, name=name + '_bn2') if use_se: # se begin body = mx.sym.Pooling(data=bn2, global_pool=True, kernel=(7, 7), pool_type='avg', name=name + '_se_pool1') body = Conv(data=body, num_filter=num_filter // 16, kernel=(1, 1), stride=(1, 1), pad=(0, 0), name=name + "_se_conv1", workspace=workspace) body = Act(data=body, act_type=act_type, name=name + '_se_relu1') body = Conv(data=body, num_filter=num_filter, kernel=(1, 1), stride=(1, 1), pad=(0, 0), name=name + "_se_conv2", workspace=workspace) body = mx.symbol.Activation(data=body, act_type='sigmoid', name=name + "_se_sigmoid") bn2 = mx.symbol.broadcast_mul(bn2, body) # se end if dim_match: shortcut = data else: conv1sc = Conv(data=data, num_filter=num_filter, kernel=(1, 1), stride=stride, no_bias=True, workspace=workspace, name=name + '_conv1sc') shortcut = mx.sym.BatchNorm(data=conv1sc, fix_gamma=False, momentum=bn_mom, eps=2e-5, name=name + '_sc') if memonger: shortcut._set_attr(mirror_stage='True') return Act(data=bn2 + shortcut, act_type=act_type, name=name + '_relu3') def get_head(data, version_input, num_filter): bn_mom = config.bn_mom workspace = config.workspace kwargs = {'bn_mom': bn_mom, 'workspace': workspace} data = data - 127.5 data = data * 0.0078125 # data = mx.sym.BatchNorm(data=data, fix_gamma=True, eps=2e-5, momentum=bn_mom, name='bn_data') if version_input == 0: body = Conv(data=data, num_filter=num_filter, kernel=(7, 7), stride=(2, 2), pad=(3, 3), no_bias=True, name="conv0", workspace=workspace) body = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, name='bn0') body = Act(data=body, act_type=config.net_act, name='relu0') body = mx.sym.Pooling(data=body, kernel=(3, 3), stride=(2, 2), pad=(1, 1), pool_type='max') else: body = data _num_filter = min(num_filter, 64) body = Conv(data=body, num_filter=_num_filter, kernel=(3, 3), stride=(1, 1), pad=(1, 1), no_bias=True, name="conv0", workspace=workspace) body = mx.sym.BatchNorm(data=body, fix_gamma=False, eps=2e-5, momentum=bn_mom, name='bn0') body = Act(data=body, act_type=config.net_act, name='relu0') # body = residual_unit_v3(body, _num_filter, (2, 2), False, name='head', **kwargs) body = residual_unit_v1l(body, _num_filter, (2, 2), False, name='head', bottle_neck=False) return body
40.592965
99
0.397417
73cd41373c33690b79d1d5a06d00cfe98b9006d5
4,342
py
Python
Instance_Segmentation/SketchDataset.py
yokoro13/SketchyScene
1301a9d3ee2fd91b6c19fe8b1e036a39f514ef4a
[ "MIT" ]
null
null
null
Instance_Segmentation/SketchDataset.py
yokoro13/SketchyScene
1301a9d3ee2fd91b6c19fe8b1e036a39f514ef4a
[ "MIT" ]
null
null
null
Instance_Segmentation/SketchDataset.py
yokoro13/SketchyScene
1301a9d3ee2fd91b6c19fe8b1e036a39f514ef4a
[ "MIT" ]
null
null
null
import os import sys import numpy as np import scipy.io from PIL import Image sys.path.append('libs') import utils nImgs_map = {'train': 11264, 'val': 2273, 'test': 541} cate = ["bicycle", "car", "motorcycle", "airplane", "fire hydrant", "traffic light", "cat", "dog", "horse", "sheep", "cow", "elephant", "other", "zebra", "giraffe", "cloud", "grass", "cloud", "tree"] class SketchDataset(utils.Dataset): """Generates the sketchyscene dataset.""" def __init__(self, dataset_base_dir): self.dataset_base_dir = dataset_base_dir super(SketchDataset, self).__init__() def load_sketches(self, mode): assert mode in ["train", "val", "test"] # Add classes for i in range(len(cate)): cat_name = cate[i] self.add_class("sketchyscene", i + 1, cat_name) # Add images nImgs = nImgs_map[mode] for i in range(nImgs): self.add_image("sketchyscene", image_id=i, path="", mode=mode) def load_image(self, image_id): """Load the specified image and return a [H,W,3] Numpy array. """ info = self.image_info[image_id] mode = info['mode'] image_name = str(image_id + 1) + '.png' # e.g. L0_sample5564.png images_base_dir = os.path.join(self.dataset_base_dir, mode, 'DRAWING_GT') image_path = os.path.join(images_base_dir, image_name) # print(image_path) image = Image.open(image_path) image = image.convert("RGB") image = np.array(image, dtype=np.float32) # shape = [H, W, 3] # plt.imshow(image.astype(np.uint8)) # # plt.show() return image def image_reference(self, image_id): """Return the shapes data of the image.""" info = self.image_info[image_id] if info["source"] == "sketchyscene": return info['mode'] else: super(self.__class__).image_reference(self, image_id) def load_mask(self, image_id): """Load instance masks for the given image. Returns: masks: A bool array of shape [height, width, instance count] with a binary mask per instance. class_ids: a 1D array of class IDs of the instance masks. """ info = self.image_info[image_id] mode = info['mode'] mask_class_name = str(image_id + 1) + '.mat' mask_instance_name = str(image_id + 1) + '.mat' class_base_dir = os.path.join(self.dataset_base_dir, mode, 'CLASS_GT') instance_base_dir = os.path.join(self.dataset_base_dir, mode, 'INSTANCE_GT') mask_class_path = os.path.join(class_base_dir, mask_class_name) mask_instance_path = os.path.join(instance_base_dir, mask_instance_name) INSTANCE_GT = scipy.io.loadmat(mask_instance_path)['INSTANCE_GT'] INSTANCE_GT = np.array(INSTANCE_GT, dtype=np.uint8) # shape=(750, 750) CLASS_GT = scipy.io.loadmat(mask_class_path)['CLASS_GT'] # (750, 750) # print(np.max(INSTANCE_GT)) # e.g. 101 instance_count = np.bincount(INSTANCE_GT.flatten()) # print(instance_count.shape) # e.g. shape=(102,) instance_count = instance_count[1:] # e.g. shape=(101,) nonzero_count = np.count_nonzero(instance_count) # e.g. 16 # print("nonzero_count", nonzero_count) # e.g. shape=(102,) mask_set = np.zeros([nonzero_count, INSTANCE_GT.shape[0], INSTANCE_GT.shape[1]], dtype=np.uint8) class_id_set = np.zeros([nonzero_count], dtype=np.uint8) real_instanceIdx = 0 for i in range(instance_count.shape[0]): if instance_count[i] == 0: continue instanceIdx = i + 1 ## mask mask = np.zeros([INSTANCE_GT.shape[0], INSTANCE_GT.shape[1]], dtype=np.uint8) mask[INSTANCE_GT == instanceIdx] = 1 mask_set[real_instanceIdx] = mask class_gt_filtered = CLASS_GT * mask class_gt_filtered = np.bincount(class_gt_filtered.flatten()) class_gt_filtered = class_gt_filtered[1:] class_id = np.argmax(class_gt_filtered) + 1 class_id_set[real_instanceIdx] = class_id real_instanceIdx += 1 mask_set = np.transpose(mask_set, (1, 2, 0)) return mask_set, class_id_set
36.183333
108
0.613312
73cda10667d4e2038d273198cdf8845e775d8fc3
795
py
Python
Task1B.py
SweetTomato111/flood-warning-system
b33bbc90899b18786227933ebf5f1aa2b91074f6
[ "MIT" ]
null
null
null
Task1B.py
SweetTomato111/flood-warning-system
b33bbc90899b18786227933ebf5f1aa2b91074f6
[ "MIT" ]
null
null
null
Task1B.py
SweetTomato111/flood-warning-system
b33bbc90899b18786227933ebf5f1aa2b91074f6
[ "MIT" ]
null
null
null
# Copyright (C) 2018 Garth N. Wells # # SPDX-License-Identifier: MIT from floodsystem.stationdata import build_station_list from floodsystem.geo import station_by_distance def run(): """Requirements for Task 1B""" # Build list of stations stations = build_station_list() #Example Code station_by_distance_list = station_by_distance(stations, (52.2053,0.1218)) nearest_10=[] print("Nearest") for i in station_by_distance_list[:10]: nearest_10.append((i[0].name, i[1])) print(nearest_10) print("Furthest") furthest_10=[] for i in station_by_distance_list[-10:]: furthest_10.append((i[0].name, i[1])) print(furthest_10) if __name__ == "__main__": print("*** Task 1B: CUED Part IA Flood Warning System ***") run()
26.5
78
0.675472
73cdaaac2067a350a1a11afa1bb3aba96aeeff4d
666
py
Python
swcms_social/faq/migrations/0002_auto_20180302_1639.py
ivanff/swcms
20d121003243abcc26e41409bc44f1c0ef3c6c2a
[ "MIT" ]
null
null
null
swcms_social/faq/migrations/0002_auto_20180302_1639.py
ivanff/swcms
20d121003243abcc26e41409bc44f1c0ef3c6c2a
[ "MIT" ]
1
2019-06-25T11:17:35.000Z
2019-06-25T11:17:54.000Z
swcms_social/faq/migrations/0002_auto_20180302_1639.py
ivanff/swcms-social
20d121003243abcc26e41409bc44f1c0ef3c6c2a
[ "MIT" ]
null
null
null
# Generated by Django 2.0.2 on 2018-03-02 13:39 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('faq', '0001_initial'), ] operations = [ migrations.AlterField( model_name='faq', name='subject', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='faq.Subject', verbose_name='Тема'), ), migrations.AlterField( model_name='subject', name='h1', field=models.CharField(max_length=250, verbose_name='H1'), ), ]
26.64
144
0.608108
73cdc35313c5c0af6eb6776e73b36285c9d8fff2
3,615
py
Python
hood/migrations/0001_initial.py
Hillarydalie/jiraniwatch
4d531c066a641cfc6bbeb4f2ade2f71f706aedd3
[ "MIT" ]
null
null
null
hood/migrations/0001_initial.py
Hillarydalie/jiraniwatch
4d531c066a641cfc6bbeb4f2ade2f71f706aedd3
[ "MIT" ]
7
2020-06-06T00:18:19.000Z
2022-03-12T00:03:41.000Z
hood/migrations/0001_initial.py
Hillarydalie/jiraniwatch
4d531c066a641cfc6bbeb4f2ade2f71f706aedd3
[ "MIT" ]
null
null
null
# Generated by Django 2.2.6 on 2019-10-30 07:17 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Hood', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('hoodName', models.CharField(max_length=100)), ('hoodLocation', models.CharField(max_length=50, null=True)), ('occupantsCount', models.PositiveSmallIntegerField(null=True)), ('admin', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='Profile', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(blank=True, max_length=500)), ('avatar', models.ImageField(upload_to='profilepic/')), ('generalLocation', models.TextField(blank=True, max_length=500)), ('email', models.EmailField(max_length=254)), ('hood', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='hood.Hood')), ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='Post', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('title', models.CharField(max_length=100)), ('description', models.CharField(max_length=2000)), ('postDate', models.DateTimeField(auto_now_add=True)), ('hood', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='hood.Hood')), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'ordering': ['-postDate'], }, ), migrations.CreateModel( name='Join', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('hood_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='hood.Hood')), ('user_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.CreateModel( name='Business', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('business_name', models.CharField(max_length=100, null=True)), ('description_of_biz', models.TextField(null=True)), ('location', models.CharField(max_length=1000, null=True)), ('email', models.EmailField(max_length=254)), ('hood', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='hood.Hood')), ('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), ]
48.851351
130
0.59834
73cdc74410085b008d264c4ad5eca3ef220b29fb
3,235
py
Python
tests/datastructures/test_second_largest_item_in_bst.py
JASTYN/pythonmaster
46638ab09d28b65ce5431cd0759fe6df272fb85d
[ "Apache-2.0", "MIT" ]
3
2017-05-02T10:28:13.000Z
2019-02-06T09:10:11.000Z
tests/datastructures/test_second_largest_item_in_bst.py
JASTYN/pythonmaster
46638ab09d28b65ce5431cd0759fe6df272fb85d
[ "Apache-2.0", "MIT" ]
2
2017-06-21T20:39:14.000Z
2020-02-25T10:28:57.000Z
tests/datastructures/test_second_largest_item_in_bst.py
JASTYN/pythonmaster
46638ab09d28b65ce5431cd0759fe6df272fb85d
[ "Apache-2.0", "MIT" ]
2
2016-07-29T04:35:22.000Z
2017-01-18T17:05:36.000Z
import unittest # from datastructures.trees.binary_search_tree import find_second_largest class Test(unittest.TestCase): class BinaryTreeNode(object): def __init__(self, value): self.value = value self.left = None self.right = None def insert_left(self, value): self.left = Test.BinaryTreeNode(value) return self.left def insert_right(self, value): self.right = Test.BinaryTreeNode(value) return self.right # def test_full_tree(self): # tree = Test.BinaryTreeNode(50) # left = tree.insert_left(30) # right = tree.insert_right(70) # left.insert_left(10) # left.insert_right(40) # right.insert_left(60) # right.insert_right(80) # actual = find_second_largest(tree) # expected = 70 # self.assertEqual(actual, expected) # def test_largest_has_a_left_child(self): # tree = Test.BinaryTreeNode(50) # left = tree.insert_left(30) # right = tree.insert_right(70) # left.insert_left(10) # left.insert_right(40) # right.insert_left(60) # actual = find_second_largest(tree) # expected = 60 # self.assertEqual(actual, expected) # def test_largest_has_a_left_subtree(self): # tree = Test.BinaryTreeNode(50) # left = tree.insert_left(30) # right = tree.insert_right(70) # left.insert_left(10) # left.insert_right(40) # right_left = right.insert_left(60) # right_left_left = right_left.insert_left(55) # right_left.insert_right(65) # right_left_left.insert_right(58) # actual = find_second_largest(tree) # expected = 65 # self.assertEqual(actual, expected) # def test_second_largest_is_root_node(self): # tree = Test.BinaryTreeNode(50) # left = tree.insert_left(30) # tree.insert_right(70) # left.insert_left(10) # left.insert_right(40) # actual = find_second_largest(tree) # expected = 50 # self.assertEqual(actual, expected) # def test_descending_linked_list(self): # tree = Test.BinaryTreeNode(50) # left = tree.insert_left(40) # left_left = left.insert_left(30) # left_left_left = left_left.insert_left(20) # left_left_left.insert_left(10) # actual = find_second_largest(tree) # expected = 40 # self.assertEqual(actual, expected) # def test_ascending_linked_list(self): # tree = Test.BinaryTreeNode(50) # right = tree.insert_right(60) # right_right = right.insert_right(70) # right_right.insert_right(80) # actual = find_second_largest(tree) # expected = 70 # self.assertEqual(actual, expected) # def test_error_when_tree_has_one_node(self): # tree = Test.BinaryTreeNode(50) # with self.assertRaises(Exception): # find_second_largest(tree) # def test_error_when_tree_is_empty(self): # with self.assertRaises(Exception): # find_second_largest(None) if __name__ == '__main__': unittest.main(verbosity=2)
32.029703
73
0.61762
73cdcd6712af1781c16dad789e6e72bcac6d9ae8
10,029
py
Python
test/registry/fixtures.py
jakedt/quay
424c1a19d744be444ed27aa1718fd74af311d863
[ "Apache-2.0" ]
null
null
null
test/registry/fixtures.py
jakedt/quay
424c1a19d744be444ed27aa1718fd74af311d863
[ "Apache-2.0" ]
null
null
null
test/registry/fixtures.py
jakedt/quay
424c1a19d744be444ed27aa1718fd74af311d863
[ "Apache-2.0" ]
null
null
null
import copy import logging.config import json import os import shutil from tempfile import NamedTemporaryFile import pytest from Crypto import Random from flask import jsonify, g from flask_principal import Identity from app import storage from data.database import ( close_db_filter, configure, DerivedStorageForImage, QueueItem, Image, TagManifest, TagManifestToManifest, Manifest, ManifestLegacyImage, ManifestBlob, NamespaceGeoRestriction, User, ) from data import model from data.registry_model import registry_model from endpoints.csrf import generate_csrf_token from util.log import logfile_path from test.registry.liveserverfixture import LiveServerExecutor @pytest.fixture() def registry_server_executor(app): def generate_csrf(): return generate_csrf_token() def set_supports_direct_download(enabled): storage.put_content( ["local_us"], "supports_direct_download", "true" if enabled else "false" ) return "OK" def delete_image(image_id): image = Image.get(docker_image_id=image_id) image.docker_image_id = "DELETED" image.save() return "OK" def get_storage_replication_entry(image_id): image = Image.get(docker_image_id=image_id) QueueItem.select().where(QueueItem.queue_name ** ("%" + image.storage.uuid + "%")).get() return "OK" def set_feature(feature_name, value): import features from app import app old_value = features._FEATURES[feature_name].value features._FEATURES[feature_name].value = value app.config["FEATURE_%s" % feature_name] = value return jsonify({"old_value": old_value}) def set_config_key(config_key, value): from app import app as current_app old_value = app.config.get(config_key) app.config[config_key] = value current_app.config[config_key] = value # Close any existing connection. close_db_filter(None) # Reload the database config. configure(app.config) return jsonify({"old_value": old_value}) def clear_derived_cache(): DerivedStorageForImage.delete().execute() return "OK" def clear_uncompressed_size(image_id): image = model.image.get_image_by_id("devtable", "newrepo", image_id) image.storage.uncompressed_size = None image.storage.save() return "OK" def add_token(): another_token = model.token.create_delegate_token( "devtable", "newrepo", "my-new-token", "write" ) return model.token.get_full_token_string(another_token) def break_database(): # Close any existing connection. close_db_filter(None) # Reload the database config with an invalid connection. config = copy.copy(app.config) config["DB_URI"] = "sqlite:///not/a/valid/database" configure(config) return "OK" def reload_app(server_hostname): # Close any existing connection. close_db_filter(None) # Reload the database config. app.config["SERVER_HOSTNAME"] = server_hostname[len("http://") :] configure(app.config) # Reload random after the process split, as it cannot be used uninitialized across forks. Random.atfork() # Required for anonymous calls to not exception. g.identity = Identity(None, "none") if os.environ.get("DEBUGLOG") == "true": logging.config.fileConfig(logfile_path(debug=True), disable_existing_loggers=False) return "OK" def create_app_repository(namespace, name): user = model.user.get_user(namespace) model.repository.create_repository(namespace, name, user, repo_kind="application") return "OK" def disable_namespace(namespace): namespace_obj = model.user.get_namespace_user(namespace) namespace_obj.enabled = False namespace_obj.save() return "OK" def delete_manifests(): ManifestLegacyImage.delete().execute() ManifestBlob.delete().execute() Manifest.delete().execute() TagManifestToManifest.delete().execute() TagManifest.delete().execute() return "OK" def set_geo_block_for_namespace(namespace_name, iso_country_code): NamespaceGeoRestriction.create( namespace=User.get(username=namespace_name), description="", unstructured_json={}, restricted_region_iso_code=iso_country_code, ) return "OK" executor = LiveServerExecutor() executor.register("generate_csrf", generate_csrf) executor.register("set_supports_direct_download", set_supports_direct_download) executor.register("delete_image", delete_image) executor.register("get_storage_replication_entry", get_storage_replication_entry) executor.register("set_feature", set_feature) executor.register("set_config_key", set_config_key) executor.register("clear_derived_cache", clear_derived_cache) executor.register("clear_uncompressed_size", clear_uncompressed_size) executor.register("add_token", add_token) executor.register("break_database", break_database) executor.register("reload_app", reload_app) executor.register("create_app_repository", create_app_repository) executor.register("disable_namespace", disable_namespace) executor.register("delete_manifests", delete_manifests) executor.register("set_geo_block_for_namespace", set_geo_block_for_namespace) return executor @pytest.fixture(params=["pre_oci_model", "oci_model"]) def data_model(request): return request.param @pytest.fixture() def liveserver_app(app, registry_server_executor, init_db_path, data_model): # Change the data model being used. registry_model.set_for_testing(data_model == "oci_model") registry_server_executor.apply_blueprint_to_app(app) if os.environ.get("DEBUG", "false").lower() == "true": app.config["DEBUG"] = True # Copy the clean database to a new path. We cannot share the DB created by the # normal app fixture, as it is already open in the local process. local_db_file = NamedTemporaryFile(delete=True) local_db_file.close() shutil.copy2(init_db_path, local_db_file.name) app.config["DB_URI"] = "sqlite:///{0}".format(local_db_file.name) return app @pytest.fixture() def app_reloader(request, liveserver, registry_server_executor): registry_server_executor.on(liveserver).reload_app(liveserver.url) yield class FeatureFlagValue(object): """ Helper object which temporarily sets the value of a feature flag. Usage: with FeatureFlagValue('ANONYMOUS_ACCESS', False, registry_server_executor.on(liveserver)): ... Features.ANONYMOUS_ACCESS is False in this context ... """ def __init__(self, feature_flag, test_value, executor): self.feature_flag = feature_flag self.test_value = test_value self.executor = executor self.old_value = None def __enter__(self): result = self.executor.set_feature(self.feature_flag, self.test_value) self.old_value = result.json()["old_value"] def __exit__(self, type, value, traceback): self.executor.set_feature(self.feature_flag, self.old_value) class ConfigChange(object): """ Helper object which temporarily sets the value of a config key. Usage: with ConfigChange('SOMEKEY', 'value', registry_server_executor.on(liveserver)): ... app.config['SOMEKEY'] is 'value' in this context ... """ def __init__(self, config_key, test_value, executor, liveserver): self.config_key = config_key self.test_value = test_value self.executor = executor self.liveserver = liveserver self.old_value = None def __enter__(self): result = self.executor.set_config_key(self.config_key, self.test_value) self.old_value = result.json()["old_value"] def __exit__(self, type, value, traceback): self.executor.set_config_key(self.config_key, self.old_value) class ApiCaller(object): def __init__(self, liveserver_session, registry_server_executor): self.liveserver_session = liveserver_session self.registry_server_executor = registry_server_executor def conduct_auth(self, username, password): r = self.post( "/api/v1/signin", data=json.dumps(dict(username=username, password=password)), headers={"Content-Type": "application/json"}, ) assert r.status_code == 200 def _adjust_params(self, kwargs): csrf_token = self.registry_server_executor.on_session( self.liveserver_session ).generate_csrf() if "params" not in kwargs: kwargs["params"] = {} kwargs["params"].update( {"_csrf_token": csrf_token,} ) return kwargs def get(self, url, **kwargs): kwargs = self._adjust_params(kwargs) return self.liveserver_session.get(url, **kwargs) def post(self, url, **kwargs): kwargs = self._adjust_params(kwargs) return self.liveserver_session.post(url, **kwargs) def put(self, url, **kwargs): kwargs = self._adjust_params(kwargs) return self.liveserver_session.put(url, **kwargs) def delete(self, url, **kwargs): kwargs = self._adjust_params(kwargs) return self.liveserver_session.delete(url, **kwargs) def change_repo_visibility(self, namespace, repository, visibility): self.post( "/api/v1/repository/%s/%s/changevisibility" % (namespace, repository), data=json.dumps(dict(visibility=visibility)), headers={"Content-Type": "application/json"}, ) @pytest.fixture(scope="function") def api_caller(liveserver, registry_server_executor): return ApiCaller(liveserver.new_session(), registry_server_executor)
32.144231
97
0.687207
73cdd99f1117d2c9aba83bde68e7ccdacab9d91c
2,468
py
Python
Tools/resultsdbpy/resultsdbpy/example/main.py
jacadcaps/webkitty
9aebd2081349f9a7b5d168673c6f676a1450a66d
[ "BSD-2-Clause" ]
6
2021-07-05T16:09:39.000Z
2022-03-06T22:44:42.000Z
Tools/resultsdbpy/resultsdbpy/example/main.py
jacadcaps/webkitty
9aebd2081349f9a7b5d168673c6f676a1450a66d
[ "BSD-2-Clause" ]
7
2022-03-15T13:25:39.000Z
2022-03-15T13:25:44.000Z
Tools/resultsdbpy/resultsdbpy/example/main.py
jacadcaps/webkitty
9aebd2081349f9a7b5d168673c6f676a1450a66d
[ "BSD-2-Clause" ]
null
null
null
# Copyright (C) 2020 Apple Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import hashlib import json from example.environment import Environment, ModelFromEnvironment from flask import abort, Flask, request from resultsdbpy.controller.api_routes import APIRoutes from resultsdbpy.view.view_routes import ViewRoutes environment = Environment() print(f'Environment for web-app:\n{environment}') model = ModelFromEnvironment(environment) app = Flask(__name__) api_routes = APIRoutes(model=model, import_name=__name__) view_routes = ViewRoutes( title='Example Results Database', model=model, controller=api_routes, import_name=__name__, ) @app.route('/__health', methods=('GET',)) def health(): if not model.healthy(writable=True): abort(503, description='Health check failed, invalid database connections') return 'ok' @app.errorhandler(401) @app.errorhandler(404) @app.errorhandler(405) def handle_errors(error): if request.path.startswith('/api/'): return api_routes.error_response(error) return view_routes.error(error=error) app.register_blueprint(api_routes) app.register_blueprint(view_routes) def main(): app.run(host='0.0.0.0', port=environment.port)
36.294118
83
0.773096
73cded750aa8f5e2e1a05c840b2b09b29539275f
6,703
py
Python
train_pose.py
gosticks/body-pose-animation
eb1b5876a845f277d43bfc18dcd48c4a9c694c06
[ "MIT" ]
null
null
null
train_pose.py
gosticks/body-pose-animation
eb1b5876a845f277d43bfc18dcd48c4a9c694c06
[ "MIT" ]
null
null
null
train_pose.py
gosticks/body-pose-animation
eb1b5876a845f277d43bfc18dcd48c4a9c694c06
[ "MIT" ]
null
null
null
from modules.distance_loss import WeightedMSELoss from modules.utils import get_loss_layers from camera_estimation import TorchCameraEstimate import smplx import torch from tqdm import tqdm import torchgeometry as tgm # internal imports from modules.pose import BodyPose from modules.filter import JointFilter from modules.camera import SimpleCamera from renderer import Renderer def train_pose( model: smplx.SMPL, # current datapoints keypoints, keypoint_conf, # 3D to 2D camera layer camera: SimpleCamera, # model type model_type="smplx", # pytorch config device=torch.device('cuda'), dtype=torch.float32, # optimizer settings optimizer=None, optimizer_type="LBFGS", learning_rate=1e-3, iterations=60, patience=10, # renderer options renderer: Renderer = None, render_steps=True, extra_loss_layers=[], use_progress_bar=True, use_openpose_conf_loss=True, loss_analysis=True ): if use_progress_bar: print("[pose] starting training") print("[pose] dtype=", dtype, device) offscreen_step_output = [] # is enabled will use openpose keypoint confidence # as weights on the loss components if use_openpose_conf_loss: loss_layer = WeightedMSELoss( weights=keypoint_conf, device=device, dtype=dtype ) else: loss_layer = torch.nn.MSELoss(reduction="sum").to( device=device, dtype=dtype ) # make sure camera module is on the correct device camera = camera.to(device=device, dtype=dtype) # setup keypoint data keypoints = torch.tensor(keypoints).to(device=device, dtype=dtype) keypoint_filter = JointFilter( model_type=model_type, filter_dims=3).to(device=device, dtype=dtype) # filter keypoints keypoints = keypoint_filter(keypoints) # create filter layer to ignore unused joints, keypoints during optimization filter_layer = JointFilter( model_type=model_type, filter_dims=3).to(device=device, dtype=dtype) # setup torch modules pose_layer = BodyPose(model, dtype=dtype, device=device, useBodyMeanAngles=False).to(device=device, dtype=dtype) parameters = [pose_layer.body_pose] # setup all loss layers for l in extra_loss_layers: # make sure layer is running on the correct device l.to(device=device, dtype=dtype) # register parameters if present if l.has_parameters: parameters = parameters + list(l.parameters()) if optimizer is None: if optimizer_type.lower() == "lbfgs": optimizer = torch.optim.LBFGS elif optimizer_type.lower() == "adam": optimizer = torch.optim.Adam optimizer = optimizer(parameters, learning_rate) if use_progress_bar: pbar = tqdm(total=iterations) # store results for optional plotting cur_patience = patience best_loss = None best_output = None # setup loss history data gathergin loss_history = [] if loss_analysis: loss_components = {"points": []} for l in extra_loss_layers: loss_components[l.__class__.__name__] = [] # prediction and loss computation closere def predict(): # return joints based on current model state body_joints, cur_pose = pose_layer() # compute homogeneous coordinates and project them to 2D space points = tgm.convert_points_to_homogeneous(body_joints) points = camera(points).squeeze() points = filter_layer(points) # compute loss between 2D joint projection and OpenPose keypoints loss = loss_layer(points, keypoints) if loss_analysis: loss_components['points'].append(loss.item()) # apply extra losses for l in extra_loss_layers: cur_loss = l(cur_pose, body_joints, points, keypoints, pose_layer.cur_out) if loss_analysis: loss_components[l.__class__.__name__].append(cur_loss.item()) loss = loss + cur_loss return loss # main optimizer closure def optim_closure(): if torch.is_grad_enabled(): optimizer.zero_grad() loss = predict() if loss.requires_grad: loss.backward() return loss # camera translation R = camera.trans.detach().cpu().numpy().squeeze() # main optimization loop for t in range(iterations): loss = optimizer.step(optim_closure) # compute loss cur_loss = loss.item() loss_history.append(loss) if best_loss is None: best_loss = cur_loss elif cur_loss < best_loss: best_loss = cur_loss best_output = pose_layer.cur_out else: cur_patience = cur_patience - 1 if patience == 0: print("[train] aborted due to patience limit reached") if use_progress_bar: pbar.set_description("Error %f" % cur_loss) pbar.update(1) if renderer is not None and render_steps: renderer.render_model( model=model, model_out=pose_layer.cur_out, transform=R ) if renderer.use_offscreen: offscreen_step_output.append(renderer.get_snapshot()) if use_progress_bar: pbar.close() print("Final result:", loss.item()) return best_output, loss_history, offscreen_step_output, loss_components def train_pose_with_conf( config, pose_camera, model: smplx.SMPL, keypoints, keypoint_conf, device=torch.device('cpu'), dtype=torch.float32, renderer: Renderer = None, render_steps=True, use_progress_bar=True, print_loss_layers=False ): loss_layers = get_loss_layers(config, model, device, dtype) if print_loss_layers: print(loss_layers) best_output, loss_history, offscreen_step_output, loss_components = train_pose( model=model.to(dtype=dtype), keypoints=keypoints, keypoint_conf=keypoint_conf, camera=pose_camera, device=device, dtype=dtype, renderer=renderer, optimizer_type=config['pose']['optimizer'], iterations=config['pose']['iterations'], learning_rate=config['pose']['lr'], render_steps=render_steps, use_openpose_conf_loss=config['pose']['useOpenPoseConf'], use_progress_bar=use_progress_bar, extra_loss_layers=loss_layers ) return best_output, loss_history, offscreen_step_output, loss_components
28.645299
83
0.649262
73cdf1446158cc844a9dcaae11cff77dbe261088
676
py
Python
empower/core/service.py
imec-idlab/empower-runtime
eda52649f855722fdec1d02e25a28c61a8fbda06
[ "Apache-2.0" ]
2
2020-02-28T15:54:01.000Z
2020-11-24T08:45:11.000Z
empower/core/service.py
imec-idlab/empower-runtime
eda52649f855722fdec1d02e25a28c61a8fbda06
[ "Apache-2.0" ]
null
null
null
empower/core/service.py
imec-idlab/empower-runtime
eda52649f855722fdec1d02e25a28c61a8fbda06
[ "Apache-2.0" ]
1
2020-03-02T16:41:20.000Z
2020-03-02T16:41:20.000Z
#!/usr/bin/env python3 # # Copyright (c) 2018 Roberto Riggio # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """EmPOWER base service class.""" class EmpowerService: pass
29.391304
66
0.747041
73ce151d89aa7e29ce42dbfb9de8cf3761484a5b
4,590
py
Python
sdk/oep/azure-mgmt-oep/azure/mgmt/oep/aio/_open_energy_platform_management_service_apis.py
vincenttran-msft/azure-sdk-for-python
348b56f9f03eeb3f7b502eed51daf494ffff874d
[ "MIT" ]
1
2022-03-09T08:59:13.000Z
2022-03-09T08:59:13.000Z
sdk/oep/azure-mgmt-oep/azure/mgmt/oep/aio/_open_energy_platform_management_service_apis.py
vincenttran-msft/azure-sdk-for-python
348b56f9f03eeb3f7b502eed51daf494ffff874d
[ "MIT" ]
null
null
null
sdk/oep/azure-mgmt-oep/azure/mgmt/oep/aio/_open_energy_platform_management_service_apis.py
vincenttran-msft/azure-sdk-for-python
348b56f9f03eeb3f7b502eed51daf494ffff874d
[ "MIT" ]
1
2022-03-04T06:21:56.000Z
2022-03-04T06:21:56.000Z
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from copy import deepcopy from typing import Any, Awaitable, Optional, TYPE_CHECKING from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.mgmt.core import AsyncARMPipelineClient from msrest import Deserializer, Serializer from .. import models from ._configuration import OpenEnergyPlatformManagementServiceAPIsConfiguration from .operations import EnergyServicesOperations, LocationsOperations, Operations if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential class OpenEnergyPlatformManagementServiceAPIs: """Open Energy Platform Management Service APIs. :ivar locations: LocationsOperations operations :vartype locations: open_energy_platform_management_service_apis.aio.operations.LocationsOperations :ivar energy_services: EnergyServicesOperations operations :vartype energy_services: open_energy_platform_management_service_apis.aio.operations.EnergyServicesOperations :ivar operations: Operations operations :vartype operations: open_energy_platform_management_service_apis.aio.operations.Operations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The ID of the target subscription. :type subscription_id: str :param base_url: Service URL. Default value is 'https://management.azure.com'. :type base_url: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ def __init__( self, credential: "AsyncTokenCredential", subscription_id: str, base_url: str = "https://management.azure.com", **kwargs: Any ) -> None: self._config = OpenEnergyPlatformManagementServiceAPIsConfiguration(credential=credential, subscription_id=subscription_id, **kwargs) self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False self.locations = LocationsOperations(self._client, self._config, self._serialize, self._deserialize) self.energy_services = EnergyServicesOperations(self._client, self._config, self._serialize, self._deserialize) self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) def _send_request( self, request: HttpRequest, **kwargs: Any ) -> Awaitable[AsyncHttpResponse]: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest >>> request = HttpRequest("GET", "https://www.example.org/") <HttpRequest [GET], url: 'https://www.example.org/'> >>> response = await client._send_request(request) <AsyncHttpResponse: 200 OK> For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart :param request: The network request you want to make. Required. :type request: ~azure.core.rest.HttpRequest :keyword bool stream: Whether the response payload will be streamed. Defaults to False. :return: The response of your network call. Does not do error handling on your response. :rtype: ~azure.core.rest.AsyncHttpResponse """ request_copy = deepcopy(request) request_copy.url = self._client.format_url(request_copy.url) return self._client.send_request(request_copy, **kwargs) async def close(self) -> None: await self._client.close() async def __aenter__(self) -> "OpenEnergyPlatformManagementServiceAPIs": await self._client.__aenter__() return self async def __aexit__(self, *exc_details) -> None: await self._client.__aexit__(*exc_details)
46.363636
141
0.715904
73ce26cc8ba81e189b3f8e33e00f0647fddb4e06
10,825
py
Python
silx/io/h5py_utils.py
woutdenolf/silx
b590bece15588792a0ff41a71301975a63be8e09
[ "CC0-1.0", "MIT" ]
1
2019-12-11T14:11:03.000Z
2019-12-11T14:11:03.000Z
silx/io/h5py_utils.py
juliagarriga/silx
f0940c701fb6e625f9f6014840f0890011e7a562
[ "CC0-1.0", "MIT" ]
null
null
null
silx/io/h5py_utils.py
juliagarriga/silx
f0940c701fb6e625f9f6014840f0890011e7a562
[ "CC0-1.0", "MIT" ]
null
null
null
# coding: utf-8 # /*########################################################################## # Copyright (C) 2016-2017 European Synchrotron Radiation Facility # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # # ############################################################################*/ """ This module provides utility methods on top of h5py, mainly to handle parallel writing and reading. """ __authors__ = ["W. de Nolf"] __license__ = "MIT" __date__ = "27/01/2020" import os import traceback import h5py from .._version import calc_hexversion from ..utils import retry as retry_mod H5PY_HEX_VERSION = calc_hexversion(*h5py.version.version_tuple[:3]) HDF5_HEX_VERSION = calc_hexversion(*h5py.version.hdf5_version_tuple[:3]) HDF5_SWMR_VERSION = calc_hexversion(*h5py.get_config().swmr_min_hdf5_version[:3]) HDF5_TRACK_ORDER_VERSION = calc_hexversion(2, 9, 0) HAS_SWMR = HDF5_HEX_VERSION >= HDF5_SWMR_VERSION HAS_TRACK_ORDER = H5PY_HEX_VERSION >= HDF5_TRACK_ORDER_VERSION def _is_h5py_exception(e): for frame in traceback.walk_tb(e.__traceback__): if frame[0].f_locals.get("__package__", None) == "h5py": return True return False def _retry_h5py_error(e): """ :param BaseException e: :returns bool: """ if _is_h5py_exception(e): if isinstance(e, (OSError, RuntimeError)): return True elif isinstance(e, KeyError): # For example this needs to be retried: # KeyError: 'Unable to open object (bad object header version number)' return "Unable to open object" in str(e) elif isinstance(e, retry_mod.RetryError): return True return False def retry(**kw): """Decorator for a method that needs to be executed until it not longer fails on HDF5 IO. Mainly used for reading an HDF5 file that is being written. :param **kw: see `silx.utils.retry` """ kw.setdefault("retry_on_error", _retry_h5py_error) return retry_mod.retry(**kw) def retry_contextmanager(**kw): """Decorator to make a context manager from a method that needs to be entered until it not longer fails on HDF5 IO. Mainly used for reading an HDF5 file that is being written. :param **kw: see `silx.utils.retry_contextmanager` """ kw.setdefault("retry_on_error", _retry_h5py_error) return retry_mod.retry_contextmanager(**kw) def retry_in_subprocess(**kw): """Same as `retry` but it also retries segmentation faults. On Window you cannot use this decorator with the "@" syntax: .. code-block:: python def _method(*args, **kw): ... method = retry_in_subprocess()(_method) :param **kw: see `silx.utils.retry_in_subprocess` """ kw.setdefault("retry_on_error", _retry_h5py_error) return retry_mod.retry_in_subprocess(**kw) def group_has_end_time(h5item): """Returns True when the HDF5 item is a Group with an "end_time" dataset. A reader can use this as an indication that the Group has been fully written (at least if the writer supports this). :param Group or Dataset h5item: :returns bool: """ if isinstance(h5item, h5py.Group): return "end_time" in h5item else: return False @retry_contextmanager() def open_item(filename, name, retry_invalid=False, validate=None): """Yield an HDF5 dataset or group (retry until it can be instantiated). :param str filename: :param bool retry_invalid: retry when item is missing or not valid :param callable or None validate: :yields Dataset, Group or None: """ with File(filename) as h5file: try: item = h5file[name] except KeyError as e: if "doesn't exist" in str(e): if retry_invalid: raise retry_mod.RetryError else: item = None else: raise if callable(validate) and item is not None: if not validate(item): if retry_invalid: raise retry_mod.RetryError else: item = None yield item def _top_level_names(filename, include_only=group_has_end_time): """Return all valid top-level HDF5 names. :param str filename: :param callable or None include_only: :returns list(str): """ with File(filename) as h5file: try: if callable(include_only): return [name for name in h5file["/"] if include_only(h5file[name])] else: return list(h5file["/"]) except KeyError: raise retry_mod.RetryError top_level_names = retry()(_top_level_names) safe_top_level_names = retry_in_subprocess()(_top_level_names) class File(h5py.File): """Takes care of HDF5 file locking and SWMR mode without the need to handle those explicitely. When using this class, you cannot open different files simultatiously with different modes because the locking flag is an environment variable. """ _HDF5_FILE_LOCKING = None _NOPEN = 0 _SWMR_LIBVER = "latest" def __init__( self, filename, mode=None, enable_file_locking=None, swmr=None, libver=None, **kwargs ): """The arguments `enable_file_locking` and `swmr` should not be specified explicitly for normal use cases. :param str filename: :param str or None mode: read-only by default :param bool or None enable_file_locking: by default it is disabled for `mode='r'` and `swmr=False` and enabled for all other modes. :param bool or None swmr: try both modes when `mode='r'` and `swmr=None` :param **kwargs: see `h5py.File.__init__` """ if mode is None: mode = "r" elif mode not in ("r", "w", "w-", "x", "a", "r+"): raise ValueError("invalid mode {}".format(mode)) if not HAS_SWMR: swmr = False if enable_file_locking is None: enable_file_locking = bool(mode != "r" or swmr) if self._NOPEN: self._check_locking_env(enable_file_locking) else: self._set_locking_env(enable_file_locking) if swmr and libver is None: libver = self._SWMR_LIBVER if HAS_TRACK_ORDER: kwargs.setdefault("track_order", True) try: super().__init__(filename, mode=mode, swmr=swmr, libver=libver, **kwargs) except OSError as e: # wlock wSWMR rlock rSWMR OSError: Unable to open file (...) # 1 TRUE FALSE FALSE FALSE - # 2 TRUE FALSE FALSE TRUE - # 3 TRUE FALSE TRUE FALSE unable to lock file, errno = 11, error message = 'Resource temporarily unavailable' # 4 TRUE FALSE TRUE TRUE unable to lock file, errno = 11, error message = 'Resource temporarily unavailable' # 5 TRUE TRUE FALSE FALSE file is already open for write (may use <h5clear file> to clear file consistency flags) # 6 TRUE TRUE FALSE TRUE - # 7 TRUE TRUE TRUE FALSE file is already open for write (may use <h5clear file> to clear file consistency flags) # 8 TRUE TRUE TRUE TRUE - if ( mode == "r" and swmr is None and "file is already open for write" in str(e) ): # Try reading in SWMR mode (situation 5 and 7) swmr = True if libver is None: libver = self._SWMR_LIBVER super().__init__( filename, mode=mode, swmr=swmr, libver=libver, **kwargs ) else: raise else: self._add_nopen(1) try: if mode != "r" and swmr: # Try setting writer in SWMR mode self.swmr_mode = True except Exception: self.close() raise @classmethod def _add_nopen(cls, v): cls._NOPEN = max(cls._NOPEN + v, 0) def close(self): super().close() self._add_nopen(-1) if not self._NOPEN: self._restore_locking_env() def _set_locking_env(self, enable): self._backup_locking_env() if enable: os.environ["HDF5_USE_FILE_LOCKING"] = "TRUE" elif enable is None: try: del os.environ["HDF5_USE_FILE_LOCKING"] except KeyError: pass else: os.environ["HDF5_USE_FILE_LOCKING"] = "FALSE" def _get_locking_env(self): v = os.environ.get("HDF5_USE_FILE_LOCKING") if v == "TRUE": return True elif v is None: return None else: return False def _check_locking_env(self, enable): if enable != self._get_locking_env(): if enable: raise RuntimeError( "Close all HDF5 files before enabling HDF5 file locking" ) else: raise RuntimeError( "Close all HDF5 files before disabling HDF5 file locking" ) def _backup_locking_env(self): v = os.environ.get("HDF5_USE_FILE_LOCKING") if v is None: self._HDF5_FILE_LOCKING = None else: self._HDF5_FILE_LOCKING = v == "TRUE" def _restore_locking_env(self): self._set_locking_env(self._HDF5_FILE_LOCKING) self._HDF5_FILE_LOCKING = None
34.040881
135
0.602402
73ce3f7b51071705690f77fb74126eda39ac89a1
1,181
py
Python
codemod_unittest_to_pytest_asserts/tests/unittest_code.py
hanswilw/codemod-unittest-to-pytest-asserts
0e4ba1a221003b8c0f7ca105dcf7180bce5d3cbc
[ "MIT" ]
8
2020-01-09T08:20:20.000Z
2021-06-30T19:00:15.000Z
codemod_unittest_to_pytest_asserts/tests/unittest_code.py
hanswilw/codemod-unittest-to-pytest-asserts
0e4ba1a221003b8c0f7ca105dcf7180bce5d3cbc
[ "MIT" ]
4
2020-03-02T20:54:49.000Z
2021-02-01T16:20:31.000Z
codemod_unittest_to_pytest_asserts/tests/unittest_code.py
hanswilw/codemod-unittest-to-pytest-asserts
0e4ba1a221003b8c0f7ca105dcf7180bce5d3cbc
[ "MIT" ]
1
2021-01-27T12:29:25.000Z
2021-01-27T12:29:25.000Z
class ExampleTest: def test_something(self): self.assertEqual(1, 1) self.assertEqual(1, 1, msg="1 should always be 1") self.assertEqual(1, 1, "1 should always be 1") self.assertEqual(1, 1) # 1 should always be one def inner_test_method(): self.assertEqual(1, 1) with self.assertRaises(ValueError): # This error is always raised! raise ValueError("SomeError") innerTestMethod() self.assertEqual(1, 1) def test_lots_of_arguments(self): def inside_another_function(): self.assertEqual( get_product_from_backend_product_with_supplier_product_and_cart_etc_etc_etc( product__backend_product_id=self.backend_product.id, product_id=self.product.id, name="Julebrus", ), True ) self.assertTrue(True) self.assertFalse(False) def test_assert_raises(self): with self.assertRaises(ZeroDivisionError) as exc: divide_by_zero = 3 / 0 self.assertEqual(exc.exception.args[0], 'division by zero')
32.805556
92
0.595258
73ce4384048cfb48fb719783de737bfc26cf4cb9
3,170
py
Python
cloud_snitch/snitchers/pip.py
bunchc/FleetDeploymentReporting
a93af06573f2715e14b4d18b7a394c26fc19a32d
[ "Apache-2.0" ]
1
2018-07-11T20:20:21.000Z
2018-07-11T20:20:21.000Z
cloud_snitch/snitchers/pip.py
bunchc/FleetDeploymentReporting
a93af06573f2715e14b4d18b7a394c26fc19a32d
[ "Apache-2.0" ]
46
2018-08-08T19:46:15.000Z
2019-02-22T20:33:49.000Z
cloud_snitch/snitchers/pip.py
dani4571/FleetDeploymentReporting
8b132a20fa3e9c4e4215feade79f689be6e4b364
[ "Apache-2.0" ]
7
2018-06-26T15:27:55.000Z
2018-12-05T19:41:25.000Z
import logging from .base import BaseSnitcher from cloud_snitch.models import EnvironmentEntity from cloud_snitch.models import HostEntity from cloud_snitch.models import PythonPackageEntity from cloud_snitch.models import VirtualenvEntity logger = logging.getLogger(__name__) class PipSnitcher(BaseSnitcher): """Models path host -> virtualenv -> python package path in graph.""" file_pattern = '^pip_list_(?P<hostname>.*).json$' def _update_python_package(self, session, virtualenv, pkg): """Updates python package in graph. :param session: neo4j driver session :type session: neo4j.v1.session.BoltSession :param virtualenv: Parent Virtualenv object :type virtualenv: VirtualenvEntity :param pkg: Python package dict. should contain name and version. :type pkg: dict :returns: PythonPackage object :rtype: PythonPackageEntity """ pythonpkg = PythonPackageEntity( name=pkg.get('name'), version=pkg.get('version') ) pythonpkg.update(session, self.time_in_ms) return pythonpkg def _update_virtualenv(self, session, host, path, pkglist): """Update virtualenv and update child pythonpackages :param session: neo4j driver session :type session: neo4j.v1.session.BoltSession :param host: Parent host object :type host: HostEntity :param path: Path of virtualenv :type path: str :param pkglist: List of python package dicts :type pkglist: list :returns: Virtualenv object :rtype: VirtualenvEntity """ virtualenv = VirtualenvEntity(host=host.identity, path=path) pkgs = [] virtualenv.update(session, self.time_in_ms) for pkgdict in pkglist: pkgs.append(self._update_python_package( session, virtualenv, pkgdict) ) virtualenv.pythonpackages.update(session, pkgs, self.time_in_ms) return virtualenv def _snitch(self, session): """Orchestrates the creation of the environment. :param session: neo4j driver session :type session: neo4j.v1.session.BoltSession """ env = EnvironmentEntity(uuid=self.run.environment_uuid) for hostname, filename in self._find_host_tuples(self.file_pattern): virtualenvs = [] host = HostEntity(hostname=hostname, environment=env.identity) host = HostEntity.find(session, host.identity) if host is None: logger.warning( 'Unable to locate host entity {}'.format(hostname) ) continue pipdict = self.run.get_object(filename).get('data', {}) for path, pkglist in pipdict.items(): virtualenv = self._update_virtualenv( session, host, path, pkglist ) virtualenvs.append(virtualenv) host.virtualenvs.update(session, virtualenvs, self.time_in_ms)
34.835165
76
0.619243
73ce47a04a9e17a6265af97873b32ae5dbcaa6c6
1,727
py
Python
datasets/DET/viewer/qt5_viewer.py
zhangzhengde0225/SwinTrack
526be17f8ef266cb924c6939bd8dda23e9b73249
[ "MIT" ]
143
2021-12-03T02:33:36.000Z
2022-03-29T00:01:48.000Z
datasets/DET/viewer/qt5_viewer.py
zhangzhengde0225/SwinTrack
526be17f8ef266cb924c6939bd8dda23e9b73249
[ "MIT" ]
33
2021-12-03T10:32:05.000Z
2022-03-31T02:13:55.000Z
datasets/DET/viewer/qt5_viewer.py
zhangzhengde0225/SwinTrack
526be17f8ef266cb924c6939bd8dda23e9b73249
[ "MIT" ]
24
2021-12-04T06:46:42.000Z
2022-03-30T07:57:47.000Z
from datasets.DET.dataset import DetectionDataset_MemoryMapped from datasets.base.common.viewer.qt5_viewer import draw_object from miscellanies.viewer.qt5_viewer import Qt5Viewer from PyQt5.QtGui import QPixmap, QColor import random class DetectionDatasetQt5Viewer: def __init__(self, dataset: DetectionDataset_MemoryMapped): self.dataset = dataset self.viewer = Qt5Viewer() self.canvas = self.viewer.get_subplot().create_canvas() if self.dataset.has_category_id_name_map(): self.category_id_color_map = {} for category_id in self.dataset.get_category_id_name_map().keys(): color = [random.randint(0, 255) for _ in range(3)] self.category_id_color_map[category_id] = QColor(color[0], color[1], color[2], int(0.5 * 255)) else: self.category_id_color_map = None image_names = [] for index in range(len(self.dataset)): image_names.append(str(index)) self.viewer.get_content_region().new_list(image_names, self._image_selected_callback) def _image_selected_callback(self, index: int): if index < 0: return image = self.dataset[index] pixmap = QPixmap() assert pixmap.load(image.get_image_path()) canvas = self.canvas canvas.set_background(pixmap) if len(image) > 0: with canvas.get_painter() as painter: for object_ in image: draw_object(painter, object_, object_, object_, None, self.category_id_color_map, self.dataset, self.dataset) canvas.update() def run(self): return self.viewer.run_event_loop()
37.543478
110
0.649682
73ce6a98960e99525adda35711dd41113709ca8c
1,209
py
Python
dipy/utils/tripwire.py
martcous/dipy
6bff5655f03db19bde5aa951ffb91987983a889b
[ "MIT" ]
null
null
null
dipy/utils/tripwire.py
martcous/dipy
6bff5655f03db19bde5aa951ffb91987983a889b
[ "MIT" ]
null
null
null
dipy/utils/tripwire.py
martcous/dipy
6bff5655f03db19bde5aa951ffb91987983a889b
[ "MIT" ]
null
null
null
""" Class to raise error for missing modules or other misfortunes """ class TripWireError(AttributeError): """ Exception if trying to use TripWire object """ def is_tripwire(obj): """ Returns True if `obj` appears to be a TripWire object Examples -------- >>> is_tripwire(object()) False >>> is_tripwire(TripWire('some message')) True """ try: obj.any_attribute except TripWireError: return True except: pass return False class TripWire(object): """ Class raising error if used Standard use is to proxy modules that we could not import Examples -------- >>> try: ... import silly_module_name ... except ImportError: ... silly_module_name = TripWire('We do not have silly_module_name') >>> silly_module_name.do_silly_thing('with silly string') #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ... TripWireError: We do not have silly_module_name """ def __init__(self, msg): self._msg = msg def __getattr__(self, attr_name): ''' Raise informative error accessing attributes ''' raise TripWireError(self._msg)
24.18
96
0.637717
73ce7286f3790c9aeb4169eb7b99bc3125364774
1,533
py
Python
src/pybricks/geometry.py
TheVinhLuong102/pybricks-api
1259d5d33acb41b383445a4b1776b38084efb481
[ "MIT" ]
51
2020-04-02T10:03:45.000Z
2022-03-27T23:49:39.000Z
src/pybricks/geometry.py
LEGO-Robotics/Pybricks-API
1259d5d33acb41b383445a4b1776b38084efb481
[ "MIT" ]
77
2020-03-22T17:32:14.000Z
2022-03-28T18:02:43.000Z
src/pybricks/geometry.py
LEGO-Robotics/Pybricks-API
1259d5d33acb41b383445a4b1776b38084efb481
[ "MIT" ]
25
2020-03-18T23:35:17.000Z
2022-01-01T12:52:01.000Z
# SPDX-License-Identifier: MIT # Copyright (c) 2018-2021 The Pybricks Authors """Core linear algebra functionality for orientation sensors and robotics.""" class Matrix: """Mathematical representation of a matrix. It supports common operations such as matrix addition (``+``), subtraction (``-``), and multiplication (``*``). A :class:`.Matrix` object is immutable.""" def __init__(self, rows): """ Arguments: rows (list): List of rows. Each row is itself a list of numbers. """ @property def T(self): """Returns a new :class:`.Matrix` that is the transpose of the original.""" pass @property def shape(self): """Returns a tuple (``m``, ``n``), where ``m`` is the number of rows and ``n`` is the number of columns. """ pass def vector(x, y, z=None): """Convenience function to create a :class:`.Matrix` with the shape (``3``, ``1``) or (``2``, ``1``). Arguments: x (float): x-coordinate of the vector. y (float): y-coordinate of the vector. z (float): z-coordinate of the vector (optional). Returns: Matrix: A matrix with the shape of a column vector. """ pass class Axis: """Unit axes of a coordinate system. .. data:: X = vector(1, 0, 0) .. data:: Y = vector(0, 1, 0) .. data:: Z = vector(0, 0, 1) .. data:: ANY = None """ X = vector(1, 0, 0) Y = vector(0, 1, 0) Z = vector(0, 0, 1) ANY = None
24.333333
77
0.558382
73ce7cb7854fd3205489f1dbc2e9ce60ec84f5bf
558
py
Python
src/utils/tokenizer.py
etalab-ia/pseudo_conseil_etat
c2d8be0289049fe29c3cf5179415a8452605c22e
[ "MIT" ]
4
2020-03-28T14:44:23.000Z
2022-02-01T14:01:34.000Z
src/utils/tokenizer.py
etalab-ia/pseudo_conseil_etat
c2d8be0289049fe29c3cf5179415a8452605c22e
[ "MIT" ]
null
null
null
src/utils/tokenizer.py
etalab-ia/pseudo_conseil_etat
c2d8be0289049fe29c3cf5179415a8452605c22e
[ "MIT" ]
1
2020-02-05T17:57:39.000Z
2020-02-05T17:57:39.000Z
import re from sacremoses import MosesPunctNormalizer, MosesTokenizer, MosesDetokenizer mpn = MosesPunctNormalizer(lang="fr") mt = MosesTokenizer(lang="fr") detokenizer = MosesDetokenizer() def moses_detokenize(list_strings): return detokenizer.detokenize(list_strings) def moses_tokenize(phrase): phrase = mpn.normalize(phrase) tokens = mt.tokenize(phrase) return tokens def tokenize(phrase): # TODO: Tokenize with proper tokenizer tokens = re.split("[\s,.]+", phrase) tokens = [t for t in tokens if t] return tokens
22.32
77
0.732975
73ce931516d3ff0c26ba5fca2ccbb4c0be5cd37c
22,456
py
Python
TA-zscaler-api/bin/ta_zscaler_api/aob_py3/splunktaucclib/modinput_wrapper/base_modinput.py
LetMeR00t/TA-zscaler-api
9a2cee3954bf75a814bb057cf36eb2b2b4c093f6
[ "MIT" ]
4
2022-03-04T11:11:30.000Z
2022-03-07T09:55:07.000Z
TA-zscaler-api/bin/ta_zscaler_api/aob_py3/splunktaucclib/modinput_wrapper/base_modinput.py
LetMeR00t/TA-zscaler-api
9a2cee3954bf75a814bb057cf36eb2b2b4c093f6
[ "MIT" ]
null
null
null
TA-zscaler-api/bin/ta_zscaler_api/aob_py3/splunktaucclib/modinput_wrapper/base_modinput.py
LetMeR00t/TA-zscaler-api
9a2cee3954bf75a814bb057cf36eb2b2b4c093f6
[ "MIT" ]
null
null
null
# # Copyright 2021 Splunk Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # encoding = utf-8 import copy import json import logging import os import sys import tempfile from solnlib import utils as sutils from solnlib.log import Logs from solnlib.modular_input import checkpointer from splunklib import modularinput as smi from splunktaucclib.global_config import GlobalConfig, GlobalConfigSchema from splunktaucclib.splunk_aoblib.rest_helper import TARestHelper from splunktaucclib.splunk_aoblib.setup_util import Setup_Util DATA_INPUTS_OPTIONS = "data_inputs_options" AOB_TEST_FLAG = "AOB_TEST" FIELD_TYPE = "type" FIELD_FORMAT = "format_type" CUSTOMIZED_VAR = "customized_var" TYPE_CHECKBOX = "checkbox" TYPE_ACCOUNT = "global_account" class BaseModInput(smi.Script): """ This is a modular input wrapper, which provides some helper functions to read the paramters from setup pages and the arguments from input definition """ LogLevelMapping = { "debug": logging.DEBUG, "info": logging.INFO, "warning": logging.WARNING, "error": logging.ERROR, "critical": logging.CRITICAL, } def __init__(self, app_namespace, input_name, use_single_instance=False): super().__init__() self.use_single_instance = use_single_instance self._canceled = False self.input_type = input_name self.input_stanzas = {} self.context_meta = {} self.namespace = app_namespace # redirect all the logging to one file Logs.set_context(namespace=app_namespace, root_logger_log_file=input_name) self.logger = logging.getLogger() self.logger.setLevel(logging.INFO) self.rest_helper = TARestHelper(self.logger) # check point self.ckpt = None self.setup_util = None @property def app(self): return self.get_app_name() @property def global_setup_util(self): """ This is a private API used in AoB code internally. It is not allowed to be used in user's code. :return: setup util instance to read global configurations """ return self.setup_util def get_app_name(self): """Get TA name. :return: the name of TA this modular input is in """ raise NotImplemented def get_scheme(self): """Get basic scheme, with use_single_instance field set. :return: a basic input scheme """ scheme = smi.Scheme(self.input_type) scheme.use_single_instance = self.use_single_instance return scheme def stream_events(self, inputs, ew): """The method called to stream events into Splunk. This method overrides method in splunklib modular input. It pre-processes the input args and call collect_events to stream events. :param inputs: An ``InputDefinition`` object. :param ew: An object with methods to write events and log messages to Splunk. """ # the input metadata is like # { # 'server_uri': 'https://127.0.0.1:8089', # 'server_host': 'localhost', # 'checkpoint_dir': '...', # 'session_key': 'ceAvf3z^hZHYxe7wjTyTNo6_0ZRpf5cvWPdtSg' # } self.context_meta = inputs.metadata # init setup util uri = inputs.metadata["server_uri"] session_key = inputs.metadata["session_key"] self.setup_util = Setup_Util(uri, session_key, self.logger) input_definition = smi.input_definition.InputDefinition() input_definition.metadata = copy.deepcopy(inputs.metadata) input_definition.inputs = copy.deepcopy(inputs.inputs) try: self.parse_input_args(input_definition) except Exception as e: import traceback self.log_error(traceback.format_exc()) print(traceback.format_exc(), file=sys.stderr) # print >> sys.stderr, traceback.format_exc() self.input_stanzas = {} if not self.input_stanzas: # if no stanza found. Just return return try: self.set_log_level(self.log_level) except: self.log_debug("set log level fails.") try: self.collect_events(ew) except Exception as e: import traceback self.log_error( "Get error when collecting events.\n" + traceback.format_exc() ) print(traceback.format_exc(), file=sys.stderr) # print >> sys.stderr, traceback.format_exc() raise RuntimeError(str(e)) def collect_events(self, event_writer): """Collect events and stream to Splunk using event writer provided. Note: This method is originally collect_events(self, inputs, event_writer). :param event_writer: An object with methods to write events and log messages to Splunk. """ raise NotImplemented() def parse_input_args(self, inputs): """Parse input arguments, either from os environment when testing or from global configuration. :param inputs: An ``InputDefinition`` object. :return: """ if os.environ.get(AOB_TEST_FLAG, "false") == "true": self._parse_input_args_from_env(inputs) else: self._parse_input_args_from_global_config(inputs) if not self.use_single_instance: assert len(self.input_stanzas) == 1 def _parse_input_args_from_global_config(self, inputs): """Parse input arguments from global configuration. :param inputs: """ # dirname at this point will be <splunk_home>/etc/apps/<ta-name>/lib/splunktaucclib/modinput_wrapper, go up 3 dirs from this file to find the root TA directory dirname = os.path.dirname config_path = os.path.join( dirname(dirname(dirname(dirname(__file__)))), "appserver", "static", "js", "build", "globalConfig.json", ) with open(config_path) as f: schema_json = "".join([l for l in f]) global_schema = GlobalConfigSchema(json.loads(schema_json)) uri = inputs.metadata["server_uri"] session_key = inputs.metadata["session_key"] global_config = GlobalConfig(uri, session_key, global_schema) ucc_inputs = global_config.inputs.load(input_type=self.input_type) all_stanzas = ucc_inputs.get(self.input_type, {}) if not all_stanzas: # for single instance input. There might be no input stanza. # Only the default stanza. In this case, modinput should exit. self.log_warning("No stanza found for input type: " + self.input_type) sys.exit(0) account_fields = self.get_account_fields() checkbox_fields = self.get_checkbox_fields() self.input_stanzas = {} for stanza in all_stanzas: full_stanza_name = "{}://{}".format(self.input_type, stanza.get("name")) if full_stanza_name in inputs.inputs: if stanza.get("disabled", False): raise RuntimeError("Running disabled data input!") stanza_params = {} for k, v in stanza.items(): if k in checkbox_fields: stanza_params[k] = sutils.is_true(v) elif k in account_fields: stanza_params[k] = copy.deepcopy(v) else: stanza_params[k] = v self.input_stanzas[stanza.get("name")] = stanza_params def _parse_input_args_from_env(self, inputs): """Parse input arguments from os environment. This is used for testing inputs. :param inputs: """ data_inputs_options = json.loads(os.environ.get(DATA_INPUTS_OPTIONS, "[]")) account_fields = self.get_account_fields() checkbox_fields = self.get_checkbox_fields() self.input_stanzas = {} while len(inputs.inputs) > 0: input_stanza, stanza_args = inputs.inputs.popitem() kind_and_name = input_stanza.split("://") if len(kind_and_name) == 2: stanza_params = {} for arg_name, arg_value in stanza_args.items(): try: arg_value_trans = json.loads(arg_value) except ValueError: arg_value_trans = arg_value stanza_params[arg_name] = arg_value_trans if arg_name in account_fields: stanza_params[arg_name] = self.get_user_credential_by_id( arg_value_trans ) elif arg_name in checkbox_fields: stanza_params[arg_name] = sutils.is_true(arg_value_trans) self.input_stanzas[kind_and_name[1]] = stanza_params def get_account_fields(self): """Get the names of account variables. Should be implemented in subclass. :return: a list of variable names """ raise NotImplemented def get_checkbox_fields(self): """Get the names of checkbox variables. Should be implemented in subclass. :return: a list of variable names """ raise NotImplemented def get_global_checkbox_fields(self): """Get the names of checkbox global parameters. :return: a list of global variable names """ raise NotImplemented # Global setting related functions. # Global settings consist of log setting, proxy, account(user_credential) and customized settings. @property def log_level(self): return self.get_log_level() def get_log_level(self): """Get the log level configured in global configuration. :return: log level set in global configuration or "INFO" by default. """ return self.setup_util.get_log_level() def set_log_level(self, level): """Set the log level this python process uses. :param level: log level in `string`. Accept "DEBUG", "INFO", "WARNING", "ERROR" and "CRITICAL". """ if isinstance(level, str): level = level.lower() if level in self.LogLevelMapping: level = self.LogLevelMapping[level] else: level = logging.INFO self.logger.setLevel(level) def log(self, msg): """Log msg using logging level in global configuration. :param msg: log `string` """ self.logger.log(level=self.log_level, msg=msg) def log_debug(self, msg): """Log msg using logging.DEBUG level. :param msg: log `string` """ self.logger.debug(msg) def log_info(self, msg): """Log msg using logging.INFO level. :param msg: log `string` """ self.logger.info(msg) def log_warning(self, msg): """Log msg using logging.WARNING level. :param msg: log `string` """ self.logger.warning(msg) def log_error(self, msg): """Log msg using logging.ERROR level. :param msg: log `string` """ self.logger.error(msg) def log_critical(self, msg): """Log msg using logging.CRITICAL level. :param msg: log `string` """ self.logger.critical(msg) @property def proxy(self): return self.get_proxy() def get_proxy(self): """Get proxy settings in global configuration. Proxy settings include fields "proxy_url", "proxy_port", "proxy_username", "proxy_password", "proxy_type" and "proxy_rdns". :return: a `dict` containing proxy parameters or empty `dict` if proxy is not set. """ return self.setup_util.get_proxy_settings() def get_user_credential_by_username(self, username): """Get global credential information based on username. Credential settings include fields "name"(account id), "username" and "password". :param username: `string` :return: if credential with username exists, return a `dict`, else None. """ return self.setup_util.get_credential_by_username(username) def get_user_credential_by_id(self, account_id): """Get global credential information based on account id. Credential settings include fields "name"(account id), "username" and "password". :param account_id: `string` :return: if credential with account_id exists, return a `dict`, else None. """ return self.setup_util.get_credential_by_id(account_id) def get_global_setting(self, var_name): """Get customized setting value configured in global configuration. :param var_name: `string` :return: customized global configuration value or None """ var_value = self.setup_util.get_customized_setting(var_name) if var_value is not None and var_name in self.get_global_checkbox_fields(): var_value = sutils.is_true(var_value) return var_value # Functions to help create events. def new_event( self, data, time=None, host=None, index=None, source=None, sourcetype=None, done=True, unbroken=True, ): """Create a Splunk event object. :param data: ``string``, the event's text. :param time: ``float``, time in seconds, including up to 3 decimal places to represent milliseconds. :param host: ``string``, the event's host, ex: localhost. :param index: ``string``, the index this event is specified to write to, or None if default index. :param source: ``string``, the source of this event, or None to have Splunk guess. :param sourcetype: ``string``, source type currently set on this event, or None to have Splunk guess. :param done: ``boolean``, is this a complete ``Event``? False if an ``Event`` fragment. :param unbroken: ``boolean``, Is this event completely encapsulated in this ``Event`` object? :return: ``Event`` object """ return smi.Event( data=data, time=time, host=host, index=index, source=source, sourcetype=sourcetype, done=done, unbroken=unbroken, ) # Basic get functions. To get params in input stanza. def get_input_type(self): """Get input type. :return: the modular input type """ return self.input_type def get_input_stanza(self, input_stanza_name=None): """Get input stanzas. If stanza name is None, return a dict with stanza name as key and params as values. Else return a dict with param name as key and param value as value. :param input_stanza_name: None or `string` :return: `dict` """ if input_stanza_name: return self.input_stanzas.get(input_stanza_name, None) return self.input_stanzas def get_input_stanza_names(self): """Get all stanza names this modular input instance is given. For multi instance mode, a single string value will be returned. For single instance mode, stanza names will be returned in a list. :return: `string` or `list` """ if self.input_stanzas: names = list(self.input_stanzas.keys()) if self.use_single_instance: return names else: assert len(names) == 1 return names[0] return None def get_arg(self, arg_name, input_stanza_name=None): """Get the input argument. If input_stanza_name is not provided: For single instance mode, return a dict <input_name, arg_value>. For multi instance mode, return a single value or None. If input_stanza_name is provided, return a single value or None. :param arg_name: `string`, argument name :param input_stanza_name: None or `string`, a stanza name :return: `dict` or `string` or None """ if input_stanza_name is None: args_dict = { k: args[arg_name] for k, args in self.input_stanzas.items() if arg_name in args } if self.use_single_instance: return args_dict else: if len(args_dict) == 1: return list(args_dict.values())[0] return None else: return self.input_stanzas.get(input_stanza_name, {}).get(arg_name, None) def get_output_index(self, input_stanza_name=None): """Get output Splunk index. :param input_stanza_name: `string` :return: `string` output index """ return self.get_arg("index", input_stanza_name) def get_sourcetype(self, input_stanza_name=None): """Get sourcetype to index. :param input_stanza_name: `string` :return: the sourcetype to index to """ return self.get_arg("sourcetype", input_stanza_name) # HTTP request helper def send_http_request( self, url, method, parameters=None, payload=None, headers=None, cookies=None, verify=True, cert=None, timeout=None, use_proxy=True, ): """Send http request and get response. :param url: URL for the new Request object. :param method: method for the new Request object. Can be "GET", "POST", "PUT", "DELETE" :param parameters: (optional) Dictionary or bytes to be sent in the query string for the Request. :param payload: (optional) Dictionary, bytes, or file-like object to send in the body of the Request. :param headers: (optional) Dictionary of HTTP Headers to send with the Request. :param cookies: (optional) Dict or CookieJar object to send with the Request. :param verify: (optional) whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a (connect timeout, read timeout) tuple. Default to (10.0, 5.0). :param use_proxy: (optional) whether to use proxy. If set to True, proxy in global setting will be used. :return: Response """ return self.rest_helper.send_http_request( url=url, method=method, parameters=parameters, payload=payload, headers=headers, cookies=cookies, verify=verify, cert=cert, timeout=timeout, proxy_uri=self._get_proxy_uri() if use_proxy else None, ) def _get_proxy_uri(self): uri = None proxy = self.get_proxy() if proxy and proxy.get("proxy_url") and proxy.get("proxy_type"): uri = proxy["proxy_url"] if proxy.get("proxy_port"): uri = "{}:{}".format(uri, proxy.get("proxy_port")) if proxy.get("proxy_username") and proxy.get("proxy_password"): uri = "{}://{}:{}@{}/".format( proxy["proxy_type"], proxy["proxy_username"], proxy["proxy_password"], uri, ) else: uri = "{}://{}".format(proxy["proxy_type"], uri) return uri # Checkpointing related functions def _init_ckpt(self): if self.ckpt is None: if "AOB_TEST" in os.environ: ckpt_dir = self.context_meta.get("checkpoint_dir", tempfile.mkdtemp()) if not os.path.exists(ckpt_dir): os.makedirs(ckpt_dir) self.ckpt = checkpointer.FileCheckpointer(ckpt_dir) else: if "server_uri" not in self.context_meta: raise ValueError("server_uri not found in input meta.") if "session_key" not in self.context_meta: raise ValueError("session_key not found in input meta.") dscheme, dhost, dport = sutils.extract_http_scheme_host_port( self.context_meta["server_uri"] ) self.ckpt = checkpointer.KVStoreCheckpointer( self.app + "_checkpointer", self.context_meta["session_key"], self.app, scheme=dscheme, host=dhost, port=dport, ) def get_check_point(self, key): """Get checkpoint. :param key: `string` :return: Checkpoint state if exists else None. """ if self.ckpt is None: self._init_ckpt() return self.ckpt.get(key) def save_check_point(self, key, state): """Update checkpoint. :param key: Checkpoint key. `string` :param state: Checkpoint state. """ if self.ckpt is None: self._init_ckpt() self.ckpt.update(key, state) def batch_save_check_point(self, states): """Batch update checkpoint. :param states: a `dict` states with checkpoint key as key and checkpoint state as value. """ if self.ckpt is None: self._init_ckpt() self.ckpt.batch_update(states) def delete_check_point(self, key): """Delete checkpoint. :param key: Checkpoint key. `string` """ if self.ckpt is None: self._init_ckpt() self.ckpt.delete(key)
38.452055
167
0.607009
73ce9fcc4ebbb916ead89f82e6ec96d7e3730701
3,230
py
Python
mountaintools/vdomr/devel/devel.py
tjd2002/spikeforest2
2e393564b858b2995aa2ccccd9bd73065681b5de
[ "Apache-2.0" ]
null
null
null
mountaintools/vdomr/devel/devel.py
tjd2002/spikeforest2
2e393564b858b2995aa2ccccd9bd73065681b5de
[ "Apache-2.0" ]
null
null
null
mountaintools/vdomr/devel/devel.py
tjd2002/spikeforest2
2e393564b858b2995aa2ccccd9bd73065681b5de
[ "Apache-2.0" ]
null
null
null
from IPython.display import HTML import os import vdomr as vd import base64 def loadBootstrap(): url = 'https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css' integrity = 'sha384-BVYiiSIFeK1dGmJRAkycuHAHRg32OmUcww7on3RYdg4Va+PmSTsz/K68vbdEjh4u' crossorigin = 'anonymous' loadCss(url=url, integrity=integrity, crossorigin=crossorigin) # use one of url, path, css def loadCss(*, url=None, path=None, css=None, integrity=None, crossorigin=None): if path: with open(path) as f: css = f.read() loadCss(css=css) return if css: js = """ let style = document.createElement( "style" ); style.appendChild(document.createTextNode(atob('{css_b64}'))); document.getElementsByTagName( "head" )[0].appendChild( style ); """ css_b64 = base64.b64encode(css.encode('utf-8')).decode() js = css_b64.join(js.split('{css_b64}')) loadJavascript(js=js) # display(HTML('<style>{}</style>'.format(css))) return if url: attrs = [] # if integrity: # attrs.append('integrity={}'.format(integrity)) # if crossorigin: # attrs.append('crossorigin={}'.format(crossorigin)) html = '<link rel="stylesheet" href="{}" {}'.format( url, ' '.join(attrs)) js = """ let link = document.createElement( "link" ); link.href = '{url}'; link.type = "text/css"; link.rel = "stylesheet"; document.getElementsByTagName( "head" )[0].appendChild( link ); """ js = url.join(js.split('{url}')) loadJavascript(js=js) # display(HTML(html)) loaded_javascript_files = {} # use one of url, path, js def loadJavascript(*, url=None, path=None, js=None, delay=None): if path: modified_timestamp = os.path.getmtime(path) colab_mode = (vd.mode() == 'colab') # if (not colab_mode) and (path in loaded_javascript_files): # if loaded_javascript_files[path]['mtime']==modified_timestamp: # already loaded # return with open(path) as f: js = f.read() loadJavascript(js=js, delay=delay) # loaded_javascript_files[path]=dict(mtime=modified_timestamp) return if js: if delay is not None: js2 = """ setTimeout(function() { {js} },{delay}) """ js2 = str(delay).join(js2.split('{delay}')) js2 = js.join(js2.split('{js}')) js = js2 js = '{\n'+js+'\n}' vd.exec_javascript(js) return if url: colab_mode = (vd.mode() == 'colab') # if (not colab_mode) and (url in loaded_javascript_files): # already loaded # return if delay is not None: raise Exception('Cannot use delay with url parameter') js0 = """ let script = document.createElement( "script" ); script.src = '{url}'; document.getElementsByTagName( "head" )[0].appendChild( script ); """ js0 = url.join(js0.split('{url}')) loadJavascript(js=js0) # display(HTML('<script src="{}"></script>'.format(url))) # loaded_javascript_files[path]=dict(mtime=True)
30.471698
89
0.581734
73cea7090458b9b120ff94fb70d5e50230f9a772
2,974
py
Python
test/test_gpy.py
gregorgebhardt/kb_learning
41e18c1238e0ea891d48aff63588366dae64e4c8
[ "BSD-3-Clause" ]
null
null
null
test/test_gpy.py
gregorgebhardt/kb_learning
41e18c1238e0ea891d48aff63588366dae64e4c8
[ "BSD-3-Clause" ]
null
null
null
test/test_gpy.py
gregorgebhardt/kb_learning
41e18c1238e0ea891d48aff63588366dae64e4c8
[ "BSD-3-Clause" ]
null
null
null
import matplotlib matplotlib.use('PS') import matplotlib.pyplot as plt import numpy as np import pandas as pd import scipy.stats import GPy from paramz.transformations import Logexp from GPy.core.parameterization import Param from kb_learning.kernel import KilobotEnvKernel # it_sars = pd.read_pickle('/home/gebhardt/Desktop/it_sars_15kbts.pkl') # # kernel = KilobotEnvKernel(kilobots_dim=30, light_dim=2) # # kernel.kilobots_bandwidth = [.04, .04] # kernel.light_bandwidth = [.1] kernel_function = GPy.kern.RBF(input_dim=1) # sinus curve with Gaussian noise sample_size = 100 samples_x = np.random.rand(sample_size) * 2 * np.pi - np.pi noise_std = .2 sigma_sqr = noise_std ** 2 samples_y = np.sin(samples_x) + np.random.normal(0, noise_std, (sample_size)) # weights loc = -2. scale = .5 weights = scipy.stats.norm.pdf(samples_x, loc=loc, scale=scale) weights /= weights.max() # sparse set sparse_size = 10 sparse_index = [np.random.randint(sample_size)] sparse_x = samples_x[sparse_index] for i in range(1, sparse_size): K = kernel_function.K(sparse_x[:, None], samples_x[:, None]) sparse_index += [K.max(axis=0).argmin()] sparse_x = samples_x[sparse_index] # Y_metadata = dict(output_index=np.arange(100)[:, None]) # llh = GPy.likelihoods.HeteroscedasticGaussian(variance=sigma_sqr, Y_metadata=Y_metadata) # llh.variance *= 1 / weights[:, None] # llh.variance.fix() # predict_llh = GPy.likelihoods.Gaussian(variance=sigma_sqr) # gp_sparse = GPy.core.SparseGP(X=samples_x[:, None], Y=samples_y[:, None], Z=sparse_x[:, None], # kernel=kernel_function, likelihood=llh, Y_metadata=dict(output_index=np.arange(100))) # # gp_sparse.inducing_inputs.fix() # gp_sparse.optimize('bfgs') # _ = gp_sparse.plot(lower=-2, upper=2, predict_kw=dict(likelihood=predict_llh)) # _ = plt.scatter(samples_x, weights, color='grey', marker='.') # print(gp_sparse) Y_metadata = dict(output_index=np.arange(100)) het_llh = GPy.likelihoods.HeteroscedasticGaussian(Y_metadata=Y_metadata) het_llh.variance = 1 / weights het_llh.variance.fix() noise_llh = GPy.likelihoods.Gaussian(variance=sigma_sqr) llh2 = GPy.likelihoods.MixedNoise(likelihoods_list=[het_llh, noise_llh]) gp_sparse = GPy.core.SparseGP(X=samples_x[:, None], Y=samples_y[:, None], Z=sparse_x[:, None], kernel=GPy.kern.RBF(input_dim=1), likelihood=llh2, inference_method=MyVarDTC(limit=3), Y_metadata=dict(output_index=np.arange(100))) class SemiHeteroscedasticGaussian(GPy.likelihoods.Gaussian): def __init__(self, het_variance, variance=1., name='semi_het_Gauss'): super().__init__(variance=variance, name=name) self.het_variance = Param('het_variance', het_variance, Logexp()) self.link_parameter(self.het_variance) self.het_variance.fix() def gaussian_variance(self, Y_metadata=None): return self.het_variance[Y_metadata['output_index'].flatten()] * self.variance
35.404762
117
0.725958
73cea868baa85352231e0f813ae999d86cbe57da
2,613
py
Python
mlinspect/backends/backend_utils.py
stefan-grafberger/mlinspect-cidr
4a15068a1652c043021d04795bd89da32ec20992
[ "Apache-2.0" ]
5
2020-08-26T13:32:13.000Z
2020-10-20T15:34:57.000Z
mlinspect/backends/backend_utils.py
stefan-grafberger/mlinspect-cidr
4a15068a1652c043021d04795bd89da32ec20992
[ "Apache-2.0" ]
1
2020-08-29T22:53:27.000Z
2020-08-29T22:53:27.000Z
mlinspect/backends/backend_utils.py
stefan-grafberger/mlinspect-cidr
4a15068a1652c043021d04795bd89da32ec20992
[ "Apache-2.0" ]
null
null
null
""" Some utility functions the different instrumentation backends """ import itertools from functools import partial import numpy from pandas import DataFrame from ..inspections.inspection_input import InspectionInputRow def build_annotation_df_from_iters(inspections, annotation_iterators): """ Build the annotations dataframe """ annotation_iterators = itertools.zip_longest(*annotation_iterators) inspection_names = [str(inspection) for inspection in inspections] annotations_df = DataFrame(annotation_iterators, columns=inspection_names) return annotations_df def get_df_row_iterator(dataframe): """ Create an efficient iterator for the data frame rows. The implementation is inspired by the implementation of the pandas DataFrame.itertuple method """ arrays = [] fields = list(dataframe.columns) arrays.extend(dataframe.iloc[:, k] for k in range(0, len(dataframe.columns))) partial_func_create_row = partial(InspectionInputRow, fields=fields) return map(partial_func_create_row, map(list, zip(*arrays))) def get_series_row_iterator(series): """ Create an efficient iterator for the data frame rows. The implementation is inspired by the implementation of the pandas DataFrame.itertuple method """ fields = list(["array"]) numpy_iterator = series.__iter__() partial_func_create_row = partial(InspectionInputRow, fields=fields) return map(partial_func_create_row, map(list, zip(numpy_iterator))) def get_numpy_array_row_iterator(nparray, nditer=True): """ Create an efficient iterator for the data frame rows. The implementation is inspired by the implementation of the pandas DataFrame.itertuple method """ fields = list(["array"]) if nditer is True: numpy_iterator = numpy.nditer(nparray, ["refs_ok"]) else: numpy_iterator = nparray.__iter__() partial_func_create_row = partial(InspectionInputRow, fields=fields) return map(partial_func_create_row, map(list, zip(numpy_iterator))) def get_csr_row_iterator(csr): """ Create an efficient iterator for csr rows. The implementation is inspired by the implementation of the pandas DataFrame.itertuple method """ # TODO: Maybe there is a way to use sparse rows that is faster # However, this is the fastest way I discovered so far np_array = csr.toarray() fields = list(["array"]) numpy_iterator = np_array.__iter__() partial_func_create_row = partial(InspectionInputRow, fields=fields) return map(partial_func_create_row, map(list, zip(numpy_iterator)))
34.381579
97
0.745886
73cee5308f0060794b159143fa7bd12a6921568c
13,246
py
Python
libcity/model/traffic_speed_prediction/STTN.py
LibCity/Bigscity-LibCity-Docs-zh_CN
2be639c3fe7d75727ade18f473d6f625900f73f2
[ "Apache-2.0" ]
5
2021-09-28T12:32:50.000Z
2022-02-03T09:04:35.000Z
libcity/model/traffic_speed_prediction/STTN.py
aptx1231/Bigscity-TrafficDL-Docs-zh_CN
2be639c3fe7d75727ade18f473d6f625900f73f2
[ "Apache-2.0" ]
null
null
null
libcity/model/traffic_speed_prediction/STTN.py
aptx1231/Bigscity-TrafficDL-Docs-zh_CN
2be639c3fe7d75727ade18f473d6f625900f73f2
[ "Apache-2.0" ]
1
2021-12-16T05:10:35.000Z
2021-12-16T05:10:35.000Z
from logging import getLogger import math import torch import torch.nn as nn import torch.nn.functional as F from libcity.model import loss from libcity.model.abstract_traffic_state_model import AbstractTrafficStateModel class SSelfAttention(nn.Module): def __init__(self, embed_dim, num_heads): super().__init__() self.embed_dim = embed_dim self.num_heads = num_heads self.head_dim = embed_dim // num_heads assert ( self.head_dim * num_heads == embed_dim ), "Embedding dim needs to be divisible by num_heads" self.values = nn.Linear(self.head_dim, self.head_dim, bias=False) self.keys = nn.Linear(self.head_dim, self.head_dim, bias=False) self.queries = nn.Linear(self.head_dim, self.head_dim, bias=False) self.fc_out = nn.Linear(num_heads * self.head_dim, embed_dim) def forward(self, values, keys, query): batch_size, num_nodes, input_window, embed_dim = query.shape values = values.reshape(batch_size, num_nodes, input_window, self.num_heads, self.head_dim) keys = keys.reshape(batch_size, num_nodes, input_window, self.num_heads, self.head_dim) query = query.reshape(batch_size, num_nodes, input_window, self.num_heads, self.head_dim) values = self.values(values) keys = self.keys(keys) queries = self.queries(query) energy = torch.einsum("bqthd,bkthd->bqkth", [queries, keys]) attention = torch.softmax(energy / (self.embed_dim ** (1 / 2)), dim=2) out = torch.einsum("bqkth,bkthd->bqthd", [attention, values]).reshape( batch_size, num_nodes, input_window, self.num_heads * self.head_dim ) out = self.fc_out(out) return out class TSelfAttention(nn.Module): def __init__(self, embed_dim, num_heads): super().__init__() self.embed_dim = embed_dim self.num_heads = num_heads self.head_dim = embed_dim // num_heads assert ( self.head_dim * num_heads == embed_dim ), "Embedding dim needs to be divisible by num_heads" self.values = nn.Linear(self.head_dim, self.head_dim, bias=False) self.keys = nn.Linear(self.head_dim, self.head_dim, bias=False) self.queries = nn.Linear(self.head_dim, self.head_dim, bias=False) self.fc_out = nn.Linear(num_heads * self.head_dim, embed_dim) def forward(self, values, keys, query): batch_size, num_nodes, input_window, embed_dim = query.shape values = values.reshape(batch_size, num_nodes, input_window, self.num_heads, self.head_dim) keys = keys.reshape(batch_size, num_nodes, input_window, self.num_heads, self.head_dim) query = query.reshape(batch_size, num_nodes, input_window, self.num_heads, self.head_dim) values = self.values(values) keys = self.keys(keys) queries = self.queries(query) energy = torch.einsum("bnqhd,bnkhd->bnqkh", [queries, keys]) attention = torch.softmax(energy / (self.embed_dim ** (1 / 2)), dim=3) out = torch.einsum("bnqkh,bnkhd->bnqhd", [attention, values]).reshape( batch_size, num_nodes, input_window, self.num_heads * self.head_dim ) out = self.fc_out(out) return out class GraphConvolution(nn.Module): def __init__(self, in_features, out_features, bias=True, device=torch.device('cpu')): super().__init__() self.weight = nn.Parameter(torch.FloatTensor(in_features, out_features).to(device)) if bias: self.bias = nn.Parameter(torch.FloatTensor(out_features).to(device)) else: self.register_parameter('bias', None) self.reset_parameters() def reset_parameters(self): stdv = 1. / math.sqrt(self.weight.size(1)) self.weight.data.uniform_(-stdv, stdv) if self.bias is not None: self.bias.data.uniform_(-stdv, stdv) def forward(self, x, adj_mx): support = torch.einsum("bnd, dh->bnh", [x, self.weight]) output = torch.einsum("mn,bnh->bmh", [adj_mx, support]) if self.bias is not None: return output + self.bias else: return output def __repr__(self): return self.__class__.__name__ + ' (' \ + str(self.in_features) + ' -> ' \ + str(self.out_features) + ')' class GCN(nn.Module): def __init__(self, nfeat, nhid, nclass, dropout_rate=0, device=torch.device('cpu')): super().__init__() self.gc1 = GraphConvolution(nfeat, nhid, device=device) self.gc2 = GraphConvolution(nhid, nclass, device=device) self.dropout_rate = dropout_rate def forward(self, x, adj_mx): x = F.relu(self.gc1(x, adj_mx)) x = F.dropout(x, self.dropout_rate, training=self.training) x = self.gc2(x, adj_mx) return F.log_softmax(x, dim=2) class STransformer(nn.Module): def __init__(self, adj_mx, embed_dim=64, num_heads=2, forward_expansion=4, dropout_rate=0, device=torch.device('cpu')): super().__init__() self.device = device self.adj_mx = torch.FloatTensor(adj_mx).to(device) self.D_S = nn.Parameter(torch.FloatTensor(adj_mx).to(device)) self.embed_linear = nn.Linear(adj_mx.shape[0], embed_dim) self.attention = SSelfAttention(embed_dim, num_heads) self.norm1 = nn.LayerNorm(embed_dim) self.norm2 = nn.LayerNorm(embed_dim) self.feed_forward = nn.Sequential( nn.Linear(embed_dim, forward_expansion * embed_dim), nn.ReLU(), nn.Linear(forward_expansion * embed_dim, embed_dim), ) self.gcn = GCN(embed_dim, embed_dim * 2, embed_dim, dropout_rate, device=device) self.norm_adj = nn.InstanceNorm2d(1) self.dropout_layer = nn.Dropout(dropout_rate) self.fs = nn.Linear(embed_dim, embed_dim) self.fg = nn.Linear(embed_dim, embed_dim) def forward(self, value, key, query): batch_size, num_nodes, input_windows, embed_dim = query.shape D_S = self.embed_linear(self.D_S) D_S = D_S.expand(batch_size, input_windows, num_nodes, embed_dim) D_S = D_S.permute(0, 2, 1, 3) X_G = torch.Tensor(query.shape[0], query.shape[1], 0, query.shape[3]).to(self.device) self.adj_mx = self.adj_mx.unsqueeze(0).unsqueeze(0) self.adj_mx = self.norm_adj(self.adj_mx) self.adj_mx = self.adj_mx.squeeze(0).squeeze(0) for t in range(query.shape[2]): o = self.gcn(query[:, :, t, :], self.adj_mx) o = o.unsqueeze(2) X_G = torch.cat((X_G, o), dim=2) query = query + D_S attention = self.attention(value, key, query) x = self.dropout_layer(self.norm1(attention + query)) forward = self.feed_forward(x) U_S = self.dropout_layer(self.norm2(forward + x)) g = torch.sigmoid(self.fs(U_S) + self.fg(X_G)) out = g * U_S + (1 - g) * X_G return out class TTransformer(nn.Module): def __init__(self, TG_per_day=228, embed_dim=64, num_heads=2, forward_expansion=4, dropout_rate=0, device=torch.device('cpu')): super().__init__() self.device = device self.temporal_embedding = nn.Embedding(TG_per_day, embed_dim) self.attention = TSelfAttention(embed_dim, num_heads) self.norm1 = nn.LayerNorm(embed_dim) self.norm2 = nn.LayerNorm(embed_dim) self.feed_forward = nn.Sequential( nn.Linear(embed_dim, forward_expansion * embed_dim), nn.ReLU(), nn.Linear(forward_expansion * embed_dim, embed_dim), ) self.dropout_layer = nn.Dropout(dropout_rate) def forward(self, value, key, query): batch_size, num_nodes, input_windows, embed_dim = query.shape D_T = self.temporal_embedding(torch.arange(0, input_windows).to(self.device)) D_T = D_T.expand(batch_size, num_nodes, input_windows, embed_dim) query = query + D_T attention = self.attention(value, key, query) x = self.dropout_layer(self.norm1(attention + query)) forward = self.feed_forward(x) out = self.dropout_layer(self.norm2(forward + x)) return out class STTransformerBlock(nn.Module): def __init__(self, adj_mx, embed_dim=64, num_heads=2, TG_per_day=288, forward_expansion=4, dropout_rate=0, device=torch.device('cpu')): super().__init__() self.STransformer = STransformer( adj_mx, embed_dim=embed_dim, num_heads=num_heads, forward_expansion=forward_expansion, dropout_rate=dropout_rate, device=device) self.TTransformer = TTransformer( TG_per_day=TG_per_day, embed_dim=embed_dim, num_heads=num_heads, forward_expansion=forward_expansion, dropout_rate=dropout_rate, device=device) self.norm1 = nn.LayerNorm(embed_dim) self.norm2 = nn.LayerNorm(embed_dim) self.dropout_layer = nn.Dropout(dropout_rate) def forward(self, value, key, query): x1 = self.norm1(self.STransformer(value, key, query) + query) x2 = self.dropout_layer(self.norm2(self.TTransformer(x1, x1, x1) + x1)) return x2 class Encoder(nn.Module): def __init__(self, adj_mx, embed_dim=64, num_layers=3, num_heads=2, TG_per_day=288, forward_expansion=4, dropout_rate=0, device=torch.device('cpu')): super().__init__() self.layers = nn.ModuleList([ STTransformerBlock( adj_mx, embed_dim=embed_dim, num_heads=num_heads, TG_per_day=TG_per_day, forward_expansion=forward_expansion, dropout_rate=dropout_rate, device=device ) for _ in range(num_layers) ]) self.dropout_layer = nn.Dropout(dropout_rate) def forward(self, x): out = self.dropout_layer(x) for layer in self.layers: out = layer(out, out, out) return out class Transformer(nn.Module): def __init__(self, adj_mx, embed_dim=64, num_layers=3, num_heads=2, TG_per_day=288, forward_expansion=4, dropout_rate=0, device=torch.device('cpu')): super().__init__() self.encoder = Encoder(adj_mx, embed_dim=embed_dim, num_layers=num_layers, num_heads=num_heads, TG_per_day=TG_per_day, forward_expansion=forward_expansion, dropout_rate=dropout_rate, device=device) def forward(self, src): enc_src = self.encoder(src) return enc_src class STTN(AbstractTrafficStateModel): def __init__(self, config, data_feature): super().__init__(config, data_feature) self._scaler = self.data_feature.get('scaler') self.adj_mx = self.data_feature.get('adj_mx', 1) # self.num_nodes = self.data_feature.get('num_nodes', 1) self.feature_dim = self.data_feature.get('feature_dim', 1) self.output_dim = self.data_feature.get('output_dim', 1) # self.len_row = self.data_feature.get('len_row', 1) # self.len_column = self.data_feature.get('len_column', 1) self._logger = getLogger() self.device = config.get('device', torch.device('cpu')) self.embed_dim = config.get('embed_dim', 64) self.num_layers = config.get('num_layers', 3) self.num_heads = config.get('num_heads', 2) self.TG_per_day = config.get('TG_in_one_day', 288) # number of time intevals per day self.forward_expansion = config.get('forward_expansion', 4) self.dropout_rate = config.get('dropout_rate', 0) self.input_window = config.get('input_window', 1) self.output_window = config.get('output_window', 1) self.conv1 = nn.Conv2d(self.feature_dim, self.embed_dim, 1) self.transformer = Transformer( self.adj_mx, embed_dim=self.embed_dim, num_layers=self.num_layers, num_heads=self.num_heads, TG_per_day=self.TG_per_day, forward_expansion=self.forward_expansion, dropout_rate=self.dropout_rate, device=self.device) self.conv2 = nn.Conv2d(self.input_window, self.output_window, 1) self.conv3 = nn.Conv2d(self.embed_dim, self.output_dim, 1) self.act_layer = nn.ReLU() def forward(self, batch): inputs = batch['X'] inputs = inputs.permute(0, 3, 2, 1) input_transformer = self.conv1(inputs) input_transformer = input_transformer.permute(0, 2, 3, 1) output_transformer = self.transformer(input_transformer) output_transformer = output_transformer.permute(0, 2, 1, 3) out = self.act_layer(self.conv2(output_transformer)) out = out.permute(0, 3, 2, 1) out = self.conv3(out) out = out.permute(0, 3, 2, 1) return out def calculate_loss(self, batch): y_true = batch['y'] y_predicted = self.predict(batch) y_true = self._scaler.inverse_transform(y_true[..., :self.output_dim]) y_predicted = self._scaler.inverse_transform(y_predicted[..., :self.output_dim]) return loss.masked_mae_torch(y_predicted, y_true) def predict(self, batch): return self.forward(batch)
39.777778
132
0.646686
73cf094cf77e18c95fada7abbb805a0feed41fec
526
py
Python
auto_pilot/common/registrable.py
farrellsc/zAutoPilot
652d93690237dcb21c3cbdbdad95f917b7fec6e3
[ "MIT" ]
1
2018-03-05T08:27:58.000Z
2018-03-05T08:27:58.000Z
auto_pilot/common/registrable.py
farrellsc/zAutoPilot
652d93690237dcb21c3cbdbdad95f917b7fec6e3
[ "MIT" ]
null
null
null
auto_pilot/common/registrable.py
farrellsc/zAutoPilot
652d93690237dcb21c3cbdbdad95f917b7fec6e3
[ "MIT" ]
null
null
null
from typing import Callable, TypeVar, List T = TypeVar('T') class Registrable(object): reg_list = dict() @classmethod def register(cls, class_name: str) -> Callable: def register_inner(class_type: T) -> None: cls.reg_list[class_name] = class_type return register_inner @classmethod def list_available(cls) -> List[str]: return list(cls.reg_list.keys()) @classmethod def by_name(cls, class_name: str) -> T: return cls.reg_list.get(class_name, None)
23.909091
51
0.65019
73cf1b339ffadfcb0c74ad1084fefba313a602cd
1,816
py
Python
aws_lambda_powertools/utilities/data_classes/common.py
jamesls/aws-lambda-powertools-python
52314e6d3b002d2d8bbacf3d1348e161b8e727b2
[ "MIT-0" ]
null
null
null
aws_lambda_powertools/utilities/data_classes/common.py
jamesls/aws-lambda-powertools-python
52314e6d3b002d2d8bbacf3d1348e161b8e727b2
[ "MIT-0" ]
null
null
null
aws_lambda_powertools/utilities/data_classes/common.py
jamesls/aws-lambda-powertools-python
52314e6d3b002d2d8bbacf3d1348e161b8e727b2
[ "MIT-0" ]
null
null
null
from typing import Any, Dict, Optional class DictWrapper: """Provides a single read only access to a wrapper dict""" def __init__(self, data: Dict[str, Any]): self._data = data def __getitem__(self, key: str) -> Any: return self._data[key] def get(self, key: str) -> Optional[Any]: return self._data.get(key) class BaseProxyEvent(DictWrapper): @property def headers(self) -> Dict[str, str]: return self["headers"] @property def query_string_parameters(self) -> Optional[Dict[str, str]]: return self.get("queryStringParameters") @property def is_base64_encoded(self) -> bool: return self.get("isBase64Encoded") @property def body(self) -> Optional[str]: return self.get("body") def get_query_string_value(self, name: str, default_value: Optional[str] = None) -> Optional[str]: """Get query string value by name Parameters ---------- name: str Query string parameter name default_value: str, optional Default value if no value was found by name Returns ------- str, optional Query string parameter value """ params = self.query_string_parameters return default_value if params is None else params.get(name, default_value) def get_header_value(self, name: str, default_value: Optional[str] = None) -> Optional[str]: """Get header value by name Parameters ---------- name: str Header name default_value: str, optional Default value if no value was found by name Returns ------- str, optional Header value """ return self.headers.get(name, default_value)
27.515152
102
0.597467
73cf1e8a271674679de16378ea561e7f3dcc44fd
2,883
py
Python
app/inbound_sms/rest.py
cds-snc/notifier-api
90b385ec49efbaee7e607516fc7d9f08991af813
[ "MIT" ]
41
2019-11-28T16:58:41.000Z
2022-01-28T21:11:16.000Z
app/inbound_sms/rest.py
cds-snc/notification-api
b1c1064f291eb860b494c3fa65ac256ad70bf47c
[ "MIT" ]
1,083
2019-07-08T12:57:24.000Z
2022-03-08T18:53:40.000Z
app/inbound_sms/rest.py
cds-snc/notifier-api
90b385ec49efbaee7e607516fc7d9f08991af813
[ "MIT" ]
9
2020-01-24T19:56:43.000Z
2022-01-27T21:36:53.000Z
from flask import Blueprint, jsonify, request from notifications_utils.recipients import try_validate_and_format_phone_number from app.dao.inbound_sms_dao import ( dao_count_inbound_sms_for_service, dao_get_inbound_sms_by_id, dao_get_inbound_sms_for_service, dao_get_paginated_most_recent_inbound_sms_by_user_number_for_service, ) from app.dao.service_data_retention_dao import ( fetch_service_data_retention_by_notification_type, ) from app.errors import register_errors from app.inbound_sms.inbound_sms_schemas import get_inbound_sms_for_service_schema from app.schema_validation import validate inbound_sms = Blueprint("inbound_sms", __name__, url_prefix="/service/<uuid:service_id>/inbound-sms") register_errors(inbound_sms) @inbound_sms.route("", methods=["POST"]) def post_inbound_sms_for_service(service_id): form = validate(request.get_json(), get_inbound_sms_for_service_schema) user_number = form.get("phone_number") if user_number: # we use this to normalise to an international phone number - but this may fail if it's an alphanumeric user_number = try_validate_and_format_phone_number(user_number, international=True) inbound_data_retention = fetch_service_data_retention_by_notification_type(service_id, "sms") limit_days = inbound_data_retention.days_of_retention if inbound_data_retention else 7 results = dao_get_inbound_sms_for_service(service_id, user_number=user_number, limit_days=limit_days) return jsonify(data=[row.serialize() for row in results]) @inbound_sms.route("/most-recent", methods=["GET"]) def get_most_recent_inbound_sms_for_service(service_id): # used on the service inbox page page = request.args.get("page", 1) inbound_data_retention = fetch_service_data_retention_by_notification_type(service_id, "sms") limit_days = inbound_data_retention.days_of_retention if inbound_data_retention else 7 # get most recent message for each user for service results = dao_get_paginated_most_recent_inbound_sms_by_user_number_for_service(service_id, int(page), limit_days) return jsonify(data=[row.serialize() for row in results.items], has_next=results.has_next) @inbound_sms.route("/summary") def get_inbound_sms_summary_for_service(service_id): # this is for the dashboard, so always limit to 7 days, even if they have a longer data retention count = dao_count_inbound_sms_for_service(service_id, limit_days=7) most_recent = dao_get_inbound_sms_for_service(service_id, limit=1) return jsonify( count=count, most_recent=most_recent[0].created_at.isoformat() if most_recent else None, ) @inbound_sms.route("/<uuid:inbound_sms_id>", methods=["GET"]) def get_inbound_by_id(service_id, inbound_sms_id): message = dao_get_inbound_sms_by_id(service_id, inbound_sms_id) return jsonify(message.serialize()), 200
42.397059
117
0.800902
73cf4f939426e3f835aa455e8b86707fc64f78d9
108
py
Python
src/typeDefs/hvdcPole.py
nagasudhirpulla/wrldc_codebook
8fbc795074e16e2012b29ae875b99aa721a7f021
[ "MIT" ]
null
null
null
src/typeDefs/hvdcPole.py
nagasudhirpulla/wrldc_codebook
8fbc795074e16e2012b29ae875b99aa721a7f021
[ "MIT" ]
21
2021-01-08T18:03:32.000Z
2021-02-02T16:17:34.000Z
src/typeDefs/hvdcPole.py
nagasudhirpulla/wrldc_codebook
8fbc795074e16e2012b29ae875b99aa721a7f021
[ "MIT" ]
null
null
null
from src.typeDefs.element import IElement class IHvdcPole(IElement): substation: str voltage: str
15.428571
41
0.75
73cf87956b80ea5e54f069d9067bd15df893251f
3,922
py
Python
SCRAPE/Lib/site-packages/twisted/internet/test/test_sigchld.py
Chinmoy-Prasad-Dutta/scrapy_scraper
09f6abfc3bcf10ee28f486d83b450c89a07e066e
[ "MIT" ]
4,612
2015-01-01T12:57:23.000Z
2022-03-30T01:08:23.000Z
SCRAPE/Lib/site-packages/twisted/internet/test/test_sigchld.py
Chinmoy-Prasad-Dutta/scrapy_scraper
09f6abfc3bcf10ee28f486d83b450c89a07e066e
[ "MIT" ]
1,243
2015-01-23T17:23:59.000Z
2022-03-28T13:46:17.000Z
SCRAPE/Lib/site-packages/twisted/internet/test/test_sigchld.py
Chinmoy-Prasad-Dutta/scrapy_scraper
09f6abfc3bcf10ee28f486d83b450c89a07e066e
[ "MIT" ]
1,236
2015-01-13T14:41:26.000Z
2022-03-17T07:12:36.000Z
# Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ Tests for L{twisted.internet._sigchld}, an alternate, superior SIGCHLD monitoring API. """ import errno import os import signal from twisted.python.log import msg from twisted.python.runtime import platformType from twisted.trial.unittest import SynchronousTestCase if platformType == "posix": from twisted.internet._signals import installHandler, isDefaultHandler from twisted.internet.fdesc import setNonBlocking else: skip = "These tests can only run on POSIX platforms." class SetWakeupSIGCHLDTests(SynchronousTestCase): """ Tests for the L{signal.set_wakeup_fd} implementation of the L{installHandler} and L{isDefaultHandler} APIs. """ def pipe(self): """ Create a non-blocking pipe which will be closed after the currently running test. """ read, write = os.pipe() self.addCleanup(os.close, read) self.addCleanup(os.close, write) setNonBlocking(read) setNonBlocking(write) return read, write def setUp(self): """ Save the current SIGCHLD handler as reported by L{signal.signal} and the current file descriptor registered with L{installHandler}. """ handler = signal.getsignal(signal.SIGCHLD) if handler != signal.SIG_DFL: self.signalModuleHandler = handler signal.signal(signal.SIGCHLD, signal.SIG_DFL) else: self.signalModuleHandler = None self.oldFD = installHandler(-1) if self.signalModuleHandler is not None and self.oldFD != -1: msg( "Previous test didn't clean up after its SIGCHLD setup: %r %r" % (self.signalModuleHandler, self.oldFD) ) def tearDown(self): """ Restore whatever signal handler was present when setUp ran. """ # If tests set up any kind of handlers, clear them out. installHandler(-1) signal.signal(signal.SIGCHLD, signal.SIG_DFL) # Now restore whatever the setup was before the test ran. if self.signalModuleHandler is not None: signal.signal(signal.SIGCHLD, self.signalModuleHandler) elif self.oldFD != -1: installHandler(self.oldFD) def test_isDefaultHandler(self): """ L{isDefaultHandler} returns true if the SIGCHLD handler is SIG_DFL, false otherwise. """ self.assertTrue(isDefaultHandler()) signal.signal(signal.SIGCHLD, signal.SIG_IGN) self.assertFalse(isDefaultHandler()) signal.signal(signal.SIGCHLD, signal.SIG_DFL) self.assertTrue(isDefaultHandler()) signal.signal(signal.SIGCHLD, lambda *args: None) self.assertFalse(isDefaultHandler()) def test_returnOldFD(self): """ L{installHandler} returns the previously registered file descriptor. """ read, write = self.pipe() oldFD = installHandler(write) self.assertEqual(installHandler(oldFD), write) def test_uninstallHandler(self): """ C{installHandler(-1)} removes the SIGCHLD handler completely. """ read, write = self.pipe() self.assertTrue(isDefaultHandler()) installHandler(write) self.assertFalse(isDefaultHandler()) installHandler(-1) self.assertTrue(isDefaultHandler()) def test_installHandler(self): """ The file descriptor passed to L{installHandler} has a byte written to it when SIGCHLD is delivered to the process. """ read, write = self.pipe() installHandler(write) exc = self.assertRaises(OSError, os.read, read, 1) self.assertEqual(exc.errno, errno.EAGAIN) os.kill(os.getpid(), signal.SIGCHLD) self.assertEqual(len(os.read(read, 5)), 1)
31.886179
78
0.644824
73cf93ec2c2d922992ef9c7b5415799a5765c969
1,260
py
Python
netbox_prometheus_sd/tests/test_api.py
feuri/netbox-plugin-prometheus-sd
0a294748541662cf0ce1dd87e2bc2462476312a4
[ "MIT" ]
null
null
null
netbox_prometheus_sd/tests/test_api.py
feuri/netbox-plugin-prometheus-sd
0a294748541662cf0ce1dd87e2bc2462476312a4
[ "MIT" ]
null
null
null
netbox_prometheus_sd/tests/test_api.py
feuri/netbox-plugin-prometheus-sd
0a294748541662cf0ce1dd87e2bc2462476312a4
[ "MIT" ]
null
null
null
from django.test import TestCase from rest_framework.test import APIClient from rest_framework import status from tenancy.models import Tenant from virtualization.models import Cluster, ClusterType from dcim.models import Site from . import utils class AppMetricEndpointTests(TestCase): """Test cases for ensuring API endpoint is working properly.""" def setUp(self): # Base URL. self.url = "/api/plugins/prometheus-sd/targets/" self.client = APIClient() # Seed Data tenant = Tenant.objects.create(name="Starfleet", slug="starfleet") site = Site.objects.create(name="DS9", slug="ds9") cluster_type = ClusterType.objects.create(name="Datacenter", slug="datacenter") cluster = Cluster.objects.create( name="Default Cluster", type=cluster_type, site=site ) utils.create_vm("Instance-01", "10.10.10.10/24", cluster=cluster, tenant=tenant) utils.create_vm("Instance-02", "10.10.10.11/24", cluster=cluster, tenant=tenant) def test_endpoint(self): """Ensure the endpoint is working properly and is not protected by authentication.""" resp = self.client.get(self.url) self.assertEqual(resp.status_code, status.HTTP_200_OK)
35
93
0.692857
73cf975bbe0643bf6eddc3a4daf659c4a82450d8
4,850
py
Python
unit_tests/glhe/profiles/test_external_load.py
stianchris/GLHE
80c3eecca81ffd50d5077f87027c9441292452f5
[ "MIT" ]
2
2018-11-06T08:04:04.000Z
2020-10-09T14:52:36.000Z
unit_tests/glhe/profiles/test_external_load.py
stianchris/GLHE
80c3eecca81ffd50d5077f87027c9441292452f5
[ "MIT" ]
68
2018-03-27T01:43:22.000Z
2019-09-09T12:05:44.000Z
unit_tests/glhe/profiles/test_external_load.py
mitchute/GLHE
80c3eecca81ffd50d5077f87027c9441292452f5
[ "MIT" ]
4
2018-05-24T03:02:44.000Z
2021-08-16T13:54:09.000Z
import os import tempfile import unittest from glhe.input_processor.input_processor import InputProcessor from glhe.interface.response import SimulationResponse from glhe.output_processor.output_processor import OutputProcessor from glhe.profiles.external_load import ExternalLoad from glhe.utilities.functions import write_json class TestExternalLoad(unittest.TestCase): @staticmethod def add_instance(path): d = {'fluid': {'fluid-type': 'water'}, 'load-profile': [{'load-profile-type': 'external', 'name': 'my name', 'path': path}]} temp_dir = tempfile.mkdtemp() temp_file = os.path.join(temp_dir, 'temp.json') write_json(temp_file, d) ip = InputProcessor(temp_file) op = OutputProcessor(temp_dir, 'out.csv') return ExternalLoad(d['load-profile'][0], ip, op) def test_get_value(self): dir_name = os.path.dirname(__file__) relative_path = '../../../glhe/profiles/external_data/GSHP-GLHE_USA_IL_Chicago-OHare.Intl.AP.725300_TMY3.csv' path = os.path.normpath(os.path.join(dir_name, relative_path)) tst = self.add_instance(path) self.assertEqual(tst.get_value(0), 0) self.assertEqual(tst.get_value(10 * 3600), -4980.600013) self.assertEqual(tst.get_value(8759 * 3600), 0) def test_start_end_points(self): temp_dir = tempfile.mkdtemp() temp_data = os.path.join(temp_dir, 'temp_data.csv') with open(temp_data, 'w') as f: f.write('Date/Time, Meas. Total Power [W], mdot [kg/s]\n' '2018-01-01 00:00:00, 1, 1\n' '2018-01-01 01:00:00, 2, 2\n' '2018-01-01 02:00:00, 3, 3\n' '2018-01-01 03:00:00, 4, 4\n') tst = self.add_instance(temp_data) self.assertEqual(tst.get_value(0.0), 1.0) self.assertEqual(tst.get_value(1.0 * 3600), 2.0) self.assertEqual(tst.get_value(1.5 * 3600), 2.5) self.assertEqual(tst.get_value(2.0 * 3600), 3.0) self.assertEqual(tst.get_value(3.0 * 3600), 4.0) def test_repeated_points(self): temp_dir = tempfile.mkdtemp() temp_data = os.path.join(temp_dir, 'temp_data.csv') with open(temp_data, 'w') as f: f.write('Date/Time, Meas. Total Power [W], mdot [kg/s]\n' '2018-01-01 00:00:00, 1, 1\n' '2018-01-01 01:00:00, 2, 2\n' '2018-01-01 02:00:00, 3, 3\n' '2018-01-01 03:00:00, 4, 4\n') tst = self.add_instance(temp_data) self.assertEqual(tst.get_value(4.0 * 3600), 1.0) self.assertEqual(tst.get_value(4.5 * 3600), 1.5) self.assertEqual(tst.get_value(5.0 * 3600), 2.0) self.assertEqual(tst.get_value(6.0 * 3600), 3.0) self.assertEqual(tst.get_value(7.0 * 3600), 4.0) self.assertEqual(tst.get_value(8.0 * 3600), 1.0) self.assertEqual(tst.get_value(9.0 * 3600), 2.0) self.assertEqual(tst.get_value(10.0 * 3600), 3.0) self.assertEqual(tst.get_value(11.0 * 3600), 4.0) self.assertEqual(tst.get_value(12.0 * 3600), 1.0) def test_simulate_time_step(self): temp_dir = tempfile.mkdtemp() temp_data = os.path.join(temp_dir, 'temp_data.csv') with open(temp_data, 'w') as f: f.write('Date/Time, Meas. Total Power [W], mdot [kg/s]\n' '2018-01-01 00:00:00, 1, 1\n' '2018-01-01 01:00:00, 2, 2\n' '2018-01-01 02:00:00, 3, 3\n' '2018-01-01 03:00:00, 4, 4\n') tst = self.add_instance(temp_data) res = tst.simulate_time_step(SimulationResponse(0, 10, 0, 10)) self.assertEqual(res.time, 0) self.assertEqual(res.time_step, 10) self.assertEqual(res.flow_rate, 0) self.assertAlmostEqual(res.temperature, 10, delta=0.1) res = tst.simulate_time_step(SimulationResponse(0, 10, 0.00001, 10)) self.assertEqual(res.time, 0) self.assertEqual(res.time_step, 10) self.assertEqual(res.flow_rate, 0.00001) self.assertAlmostEqual(res.temperature, 33.9, delta=0.1) def test_report_outputs(self): temp_dir = tempfile.mkdtemp() temp_data = os.path.join(temp_dir, 'temp_data.csv') with open(temp_data, 'w') as f: f.write('Date/Time, Meas. Total Power [W], mdot [kg/s]\n' '2018-01-01 00:00:00, 1, 1\n' '2018-01-01 01:00:00, 2, 2\n' '2018-01-01 02:00:00, 3, 3\n' '2018-01-01 03:00:00, 4, 4\n') tst = self.add_instance(temp_data) d = tst.report_outputs() self.assertTrue('ExternalLoad:MY NAME:Outlet Temp. [C]' in d.keys()) self.assertTrue('ExternalLoad:MY NAME:Heat Rate [W]' in d.keys())
41.452991
117
0.595464
73cf98f8778484a97d543cc193e820ea1cbeb7ef
2,192
py
Python
setup.py
DedeKite/wxPlotLab
808d457aeb897ceb37535bcd11d15b65a0a14cd1
[ "MIT" ]
6
2016-03-21T18:44:23.000Z
2021-05-16T19:07:02.000Z
setup.py
DedeKite/wxPlotLab
808d457aeb897ceb37535bcd11d15b65a0a14cd1
[ "MIT" ]
1
2018-05-15T14:47:03.000Z
2018-05-15T14:47:03.000Z
setup.py
astyl/mplotlab
808d457aeb897ceb37535bcd11d15b65a0a14cd1
[ "MIT" ]
1
2016-01-29T12:38:20.000Z
2016-01-29T12:38:20.000Z
#!/usr/bin/env python from setuptools import setup version = "0.2" long_desc = """ MPLOTLAB: Interactive Matplotlib Application using wxPython ================================================================== .. _wxPython: http://www.wxpython.org/ .. _matplotlib: http://matplotlib.sourceforge.net/ .. _wxmplot: https://github.com/newville/wxmplot/ Mplotlab is an interactive plotting application using `matplotlib`_ and `wxPython`_. It provides an API that intends to help users to build their own application by leaving them to focus on mastering their data. Mplotlab is particularly meant to be suitable for **streaming data, real-time processing and interactive visualization**. It uses an enhanced graphic animation of matplotlib spoken for interactive application. It provides nice features such as tweakable data filters and source handlers (*sockets, ...*). Mplotlab engine is based on a figure factory that interprets a mplotlab model parsable in an .xml file. Thus, it allows GUI users to edit, save and load figures without typing any line of code. Mplotlab is closely modelled on the excellent project `wxmplot`_ developped by Matt Newville .. image:: https://raw.githubusercontent.com/astyl/mplotlab/master/doc/images/slide_dynamic_example.gif """ setup(name = 'mplotlab', version = version, author = 'Andre ASTYL', author_email = 'andreastyl@gmail.com', download_url = 'http://github.com/astyl/mplotlab/', requires = ('wx', 'numpy', 'matplotlib'), license = 'MIT License', description = 'Interactive Matplotlib Application using wxPython', long_description = long_desc, platforms = ('Windows', 'Linux', 'Mac OS X'), classifiers=['Intended Audience :: Science/Research', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Scientific/Engineering', 'Topic :: Scientific/Engineering :: Visualization'], packages = ['mplotlab', 'mplotlab.models', 'mplotlab.mpl_builders', 'mplotlab.graphics', 'mplotlab.utils'], )
42.980392
127
0.663777
73cfadcc1cb0870e6b97cd6d0e807fffe2be2e75
3,230
py
Python
test.py
bailingnan/PyTorch-Template
c81b424cdce2a0093425aed94c61a2679641310e
[ "MIT" ]
8
2020-08-05T13:26:15.000Z
2022-02-02T15:36:25.000Z
test.py
bailingnan/Pytorch-Template
c81b424cdce2a0093425aed94c61a2679641310e
[ "MIT" ]
null
null
null
test.py
bailingnan/Pytorch-Template
c81b424cdce2a0093425aed94c61a2679641310e
[ "MIT" ]
10
2020-08-05T13:23:03.000Z
2022-03-12T07:24:34.000Z
""" tricks: 1.torch-optimizer:实现了最新的一些优化器. 2.numba:import numba as nb,纯python或numpy加速,加@nb.njit或@nb.jit(nopython=True) 3.swifter:df.apply()→·df.swifter.apply(),加速pandas 4.cupy:1000万以上数据更快 5.modin:import modin.pandas as mdpd,用mdpd代替pd即可,加速pandas,加载数据和查询数据更快,统计方法pandas更快 """ import os import sys import argparse import time import random import wandb from tqdm import tqdm import numpy as np import numba as nb import pandas as pd import torch import hiddenlayer as hl import torch.nn as nn import torch.optim as optim from torch.utils import data from torch.utils.tensorboard import SummaryWriter from torchsummary import summary from models.module import Model from data.custom_dataset import MyDataset def test(): last = time.time() torch.cuda.empty_cache() test_losses = [] model.eval() with torch.no_grad(): for batch_idx, (inputs, targets) in enumerate(test_data_loader): inputs, targets = inputs.to(device), targets.to(device) outputs = model(inputs) loss = criterion(outputs, targets) test_losses.append(loss.item()) val_loss = np.mean(np.mean(test_losses)) if __name__ == "__main__": # #取每个 GPU 的剩余显存数,并存放到 tmp 文件中 # os.system("nvidia-smi -q -d Memory |grep -A4 GPU|grep Free >tmp") # memory_gpu = [int(x.split()[2]) for x in open("tmp", "r").readlines()] # torch.cuda.set_device(np.argmax(memory_gpu)) # os.system("rm tmp") # 删除临时生成的 tmp 文件 os.environ["CUDA_VISIBLE_DEVICES"] = "0,1" ##命令行执行 # CUDA_VISIBLE_DEVICES=0,1 python train.py # os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" # os.environ["CUDA_VISIBLE_DEVICES"] = "0,1" torch.backends.cudnn.benchmark = True torch.backends.cudnn.deterministic = True # argparse for additional flags for experiment parser = argparse.ArgumentParser(description="Train a network for ...") parser.add_argument("--seed", type=int, default=0) parser.add_argument("--epochs", type=int, default=1000) parser.add_argument("--resume", type=bool, default=False) parser.add_argument("--path_to_checkpoint", type=str, default="../checkpoint") opt = parser.parse_args() np.random.seed(opt.seed) torch.manual_seed(opt.seed) torch.cuda.manual_seed_all(opt.seed) wandb.init(project="my-project") wandb.config.xxx = opt.xxx # 准备数据 test_dataset = MyDataset("test_dataset_path") # 定义的数据集 test_data_loader = data.DataLoader( test_dataset, batch_size=128, shuffle=True, drop_last=True ) device = torch.device("cuda" if torch.cuda.is_available() else "cpu") # device_ids = [0, 1] model = Model(opt) ckpt = torch.load( opt.path_to_checkpoint + "lowest_val_loss_model.pt" ) # custom method for loading last checkpoint model.load_state_dict(ckpt["model_state_dict"]) model.to(device) # 并行运算,如果需要的话 # model = nn.DataParallel(model, device_ids=device_ids).to(device) # summary(model, input_size=(channels, H, W)) # hl.build_graph(model, torch.zeros([1, 2, 3])) # loss function, 比如交叉熵 criterion = nn.CrossEntropyLoss() criterion.to(device) wandb.watch(model, criterion) writer = SummaryWriter("runs/models") test()
31.666667
82
0.695975
73cfb4eea5645450f9eb349f059cd00b81769144
7,340
py
Python
apps/iiif/annotations/tests/tests.py
ecds/readux
4eac8b48efef8126f4f2be28b5eb943c85a89c2e
[ "Apache-2.0" ]
18
2017-06-12T09:58:02.000Z
2021-10-01T11:14:34.000Z
apps/iiif/annotations/tests/tests.py
ecds/readux
4eac8b48efef8126f4f2be28b5eb943c85a89c2e
[ "Apache-2.0" ]
276
2019-04-26T20:13:01.000Z
2022-03-31T10:26:28.000Z
apps/iiif/annotations/tests/tests.py
ecds/readux
4eac8b48efef8126f4f2be28b5eb943c85a89c2e
[ "Apache-2.0" ]
7
2018-03-13T23:44:26.000Z
2021-09-15T17:54:55.000Z
# pylint: disable = missing-function-docstring, invalid-name, line-too-long """Test cases for :class:`apps.iiif.annotations`.""" from django.test import TestCase, Client from django.test import RequestFactory from django.conf import settings from django.core.exceptions import ValidationError from django.core.management import call_command from django.urls import reverse from django.core.serializers import serialize from django.contrib.auth import get_user_model from ..views import AnnotationsForPage from ..models import Annotation from ..apps import AnnotationsConfig from ...canvases.models import Canvas from ...manifests.models import Manifest from bs4 import BeautifulSoup from io import StringIO import warnings import json USER = get_user_model() class AnnotationTests(TestCase): """Annotation test cases.""" fixtures = ['kollections.json', 'manifests.json', 'canvases.json', 'annotations.json'] def setUp(self): self.factory = RequestFactory() self.client = Client() self.view = AnnotationsForPage.as_view() self.volume = Manifest.objects.get(pid='readux:st7r6') self.canvas = Canvas.objects.get(pid='fedora:emory:5622') self.annotations = Annotation.objects.filter(canvas=self.canvas) def test_app_config(self): # pylint: disable = no-self-use assert AnnotationsConfig.verbose_name == 'Annotations' assert AnnotationsConfig.name == 'apps.iiif.annotations' def test_get_annotations_for_page(self): kwargs = {'vol': self.volume.pid, 'page': self.canvas.pid, 'version': 'v2'} url = reverse('page_annotations', kwargs=kwargs) response = self.client.get(url) annotations = json.loads(response.content.decode('UTF-8-sig')) assert len(annotations) == self.annotations.count() assert response.status_code == 200 def test_order(self): a = [] for o in self.annotations.values('order'): a.append(o['order']) b = a.copy() a.sort() assert a == b def test_ocr_span(self): ocr = Annotation() ocr.oa_annotation = {"annotatedBy": {"name": "ocr"}} ocr.x = 100 ocr.y = 10 ocr.w = 100 ocr.h = 10 ocr.content = "Obviously you're not a golfer" ocr.save() assert ocr.content == "<span id='{pk}' class='anno-{pk}' data-letter-spacing='0.003232758620689655'>Obviously you're not a golfer</span>".format(pk=ocr.pk) assert ocr.owner == USER.objects.get(username='ocr') def test_default_content(self): ocr = Annotation() ocr.oa_annotation = {"annotatedBy": {"name": "ocr"}} ocr.x = 100 ocr.y = 10 ocr.w = 100 ocr.h = 10 ocr.format = Annotation.HTML ocr.save() assert '> </span>' in ocr.content def test_annotation_string(self): anno = Annotation.objects.all().first() assert anno.__str__() == str(anno.pk) def test_annotation_choices(self): anno = Annotation() anno.format = Annotation.HTML assert anno.format == 'text/html' anno.format = Annotation.PLAIN assert anno.format == 'text/plain' anno.format = Annotation.OCR assert anno.format == 'cnt:ContentAsText' anno.format = Annotation.TEXT assert anno.format == 'dctypes:Text' anno.format = Annotation.COMMENTING assert anno.format == 'oa:commenting' anno.format = Annotation.PAINTING assert anno.format == 'sc:painting' assert Annotation.FORMAT_CHOICES == (('text/plain', 'plain text'), ('text/html', 'html')) assert Annotation.TYPE_CHOICES == (('cnt:ContentAsText', 'ocr'), ('dctypes:Text', 'text')) assert Annotation.MOTIVATION_CHOICES == (('oa:commenting', 'commenting'), ('sc:painting', 'painting')) def test_ocr_for_page(self): kwargs = {'vol': self.volume.pid, 'page': self.canvas.pid, 'version': 'v2'} url = reverse('ocr', kwargs=kwargs) response = self.client.get(url) annotations = json.loads(response.content.decode('UTF-8-sig'))['resources'] assert len(annotations) == self.canvas.annotation_set.filter(resource_type='cnt:ContentAsText', canvas=self.canvas).count() assert response.status_code == 200 def test_annotation_style(self): anno = Annotation.objects.all().first() assert anno.style == ".anno-{c}: {{ height: {h}px; width: {w}px; font-size: {f}px; letter-spacing: 15.125px;}}".format(c=(anno.pk), h=str(anno.h), w=str(anno.w), f=str(anno.h / 1.6)) def test_annotation_style_serialization(self): kwargs = {'vol': self.volume.pid, 'page': self.canvas.pid, 'version': 'v2'} url = reverse('ocr', kwargs=kwargs) response = self.client.get(url) serialized_anno = json.loads(response.content.decode('UTF-8-sig'))['resources'][0] assert serialized_anno['stylesheet']['type'] == 'CssStylesheet' assert serialized_anno['stylesheet']['value'].startswith(".anno-{id}".format(id=serialized_anno['@id'])) def test_serialize_list_of_annotations(self): data = json.loads(serialize('annotation_list', [self.canvas], is_list=True, owners=USER.objects.all())) assert data[0]['@type'] == 'sc:AnnotationList' assert isinstance(data, list) def test_ocr_char_with_zero_width(self): ocr = Annotation() ocr.oa_annotation = {"annotatedBy": {"name": "ocr"}} ocr.x = 100 ocr.y = 10 ocr.w = 0 ocr.h = 10 ocr.content = 'nice marmot' ocr.format = Annotation.HTML ocr.save() assert ocr.content == "<span id='{a}' class='anno-{a}' data-letter-spacing='0'>nice marmot</span>".format(a=ocr.id) assert ocr.style == ".anno-{a}: {{ height: 10px; width: 0px; font-size: 6.25px; letter-spacing: 0px;}}".format(a=ocr.id) assert ocr.format == 'text/html' def test_ocr_char_with_zero_height(self): ocr = Annotation() ocr.oa_annotation = {"annotatedBy": {"name": "ocr"}} ocr.x = 100 ocr.y = 10 ocr.w = 10 ocr.h = 0 ocr.content = 'nice marmot' ocr.format = Annotation.HTML ocr.save() assert ocr.content == "<span id='{a}' class='anno-{a}' data-letter-spacing='0.09090909090909091'>nice marmot</span>".format(a=ocr.id) assert ocr.style == ".anno-{a}: {{ height: 0px; width: 10px; font-size: 0.0px; letter-spacing: 0.9090909090909091px;}}".format(a=ocr.id) assert ocr.format == 'text/html' def test_command_output_remove_empty_ocr(self): anno_count = self.annotations.count() # anno = self.annotations[1] # anno.content = ' ' # anno.save() out = StringIO() call_command('remove_empty_ocr', stdout=out) assert 'Empty OCR annotations have been removed' in out.getvalue() # assert anno_count > self.annotations.count() def test_resaving_ocr_annotation(self): # Span should not change anno = Annotation.objects.all().first() orig_span = anno.content anno.save() anno.refresh_from_db() assert orig_span == anno.content assert anno.content.startswith('<span') assert BeautifulSoup(anno.content, 'html.parser').span.span is None
42.674419
190
0.63842
73cfb92e43b95a077a59a87170759fe028fa18d8
2,926
py
Python
src/python/pants/core_tasks/login.py
mpopenko-exos/pants
47d27037c8b13291fc9023e56ddd1b1defdf1b8e
[ "Apache-2.0" ]
null
null
null
src/python/pants/core_tasks/login.py
mpopenko-exos/pants
47d27037c8b13291fc9023e56ddd1b1defdf1b8e
[ "Apache-2.0" ]
1
2018-09-04T17:37:34.000Z
2018-09-04T19:42:58.000Z
src/python/pants/core_tasks/login.py
mpopenko-exos/pants
47d27037c8b13291fc9023e56ddd1b1defdf1b8e
[ "Apache-2.0" ]
null
null
null
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). import getpass from colors import cyan, green, red from pants.auth.basic_auth import BasicAuth, BasicAuthCreds, Challenged from pants.base.deprecated import deprecated_conditional from pants.base.exceptions import TaskError from pants.task.console_task import ConsoleTask class Login(ConsoleTask): """Task to auth against some identity provider. :API: public """ @classmethod def subsystem_dependencies(cls): return super().subsystem_dependencies() + (BasicAuth,) @classmethod def supports_passthru_args(cls): return True @classmethod def register_options(cls, register): super().register_options(register) register( '--to', type=str, fingerprint=True, help='Log in to the given provider from the `--basic-auth-providers` option. For ' 'example, if you had defined in `--basic-auth-providers` that the provider `prod` ' 'points to the URL `https://app.pantsbuild.org/auth`, then you ' 'could here use the option `--login-to=prod` to login at ' '`https://app.pantsbuild.org/auth`.' ) def console_output(self, targets): if targets: raise TaskError('The login task does not take any target arguments.') deprecated_conditional( lambda: self.get_passthru_args(), removal_version='1.26.0.dev1', entity_description='Using passthrough args with `./pants login`', hint_message="Instead of passing the provider through `--login-passthrough-args` or the " "style `./pants login -- prod`, use the option `--login-to`, such as " "`./pants login --to=prod`.", ) # TODO: When we have other auth methods (e.g., OAuth2), select one by provider name. requested_providers = list(filter(None, [self.get_options().to] + self.get_passthru_args())) if len(requested_providers) != 1: raise TaskError('Must specify exactly one provider.') provider = requested_providers[0] try: BasicAuth.global_instance().authenticate(provider) return ['', 'Logged in successfully using .netrc credentials.'] except Challenged as e: creds = self._ask_for_creds(provider, e.url, e.realm) BasicAuth.global_instance().authenticate(provider, creds=creds) return ['', 'Logged in successfully.'] @staticmethod def _ask_for_creds(provider, url, realm): print(green('\nEnter credentials for:\n')) print('{} {}'.format(green('Provider:'), cyan(provider))) print('{} {}'.format(green('Realm: '), cyan(realm))) print('{} {}'.format(green('URL: '), cyan(url))) print(red('\nONLY ENTER YOUR CREDENTIALS IF YOU TRUST THIS SITE!\n')) username = input(green('Username: ')) password = getpass.getpass(green('Password: ')) return BasicAuthCreds(username, password)
38.5
96
0.681818
73cfc1daf45ca813bb0919832b119d48b3b4f6b8
6,924
py
Python
python_pb2/go/chromium/org/luci/tokenserver/api/oauth_token_grant_pb2.py
allaparthi/monorail
e18645fc1b952a5a6ff5f06e0c740d75f1904473
[ "BSD-3-Clause" ]
null
null
null
python_pb2/go/chromium/org/luci/tokenserver/api/oauth_token_grant_pb2.py
allaparthi/monorail
e18645fc1b952a5a6ff5f06e0c740d75f1904473
[ "BSD-3-Clause" ]
null
null
null
python_pb2/go/chromium/org/luci/tokenserver/api/oauth_token_grant_pb2.py
allaparthi/monorail
e18645fc1b952a5a6ff5f06e0c740d75f1904473
[ "BSD-3-Clause" ]
null
null
null
# Generated by the protocol buffer compiler. DO NOT EDIT! # source: go.chromium.org/luci/tokenserver/api/oauth_token_grant.proto import sys _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='go.chromium.org/luci/tokenserver/api/oauth_token_grant.proto', package='tokenserver', syntax='proto3', serialized_options=None, serialized_pb=_b('\n<go.chromium.org/luci/tokenserver/api/oauth_token_grant.proto\x12\x0btokenserver\x1a\x1fgoogle/protobuf/timestamp.proto\"\xab\x01\n\x13OAuthTokenGrantBody\x12\x10\n\x08token_id\x18\x01 \x01(\x03\x12\x17\n\x0fservice_account\x18\x02 \x01(\t\x12\r\n\x05proxy\x18\x03 \x01(\t\x12\x10\n\x08\x65nd_user\x18\x04 \x01(\t\x12-\n\tissued_at\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x19\n\x11validity_duration\x18\x06 \x01(\x03\"W\n\x17OAuthTokenGrantEnvelope\x12\x12\n\ntoken_body\x18\x01 \x01(\x0c\x12\x0e\n\x06key_id\x18\x02 \x01(\t\x12\x18\n\x10pkcs1_sha256_sig\x18\x03 \x01(\x0c\x62\x06proto3') , dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) _OAUTHTOKENGRANTBODY = _descriptor.Descriptor( name='OAuthTokenGrantBody', full_name='tokenserver.OAuthTokenGrantBody', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='token_id', full_name='tokenserver.OAuthTokenGrantBody.token_id', index=0, number=1, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='service_account', full_name='tokenserver.OAuthTokenGrantBody.service_account', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='proxy', full_name='tokenserver.OAuthTokenGrantBody.proxy', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='end_user', full_name='tokenserver.OAuthTokenGrantBody.end_user', index=3, number=4, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='issued_at', full_name='tokenserver.OAuthTokenGrantBody.issued_at', index=4, number=5, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='validity_duration', full_name='tokenserver.OAuthTokenGrantBody.validity_duration', index=5, number=6, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=111, serialized_end=282, ) _OAUTHTOKENGRANTENVELOPE = _descriptor.Descriptor( name='OAuthTokenGrantEnvelope', full_name='tokenserver.OAuthTokenGrantEnvelope', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='token_body', full_name='tokenserver.OAuthTokenGrantEnvelope.token_body', index=0, number=1, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='key_id', full_name='tokenserver.OAuthTokenGrantEnvelope.key_id', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='pkcs1_sha256_sig', full_name='tokenserver.OAuthTokenGrantEnvelope.pkcs1_sha256_sig', index=2, number=3, type=12, cpp_type=9, label=1, has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=284, serialized_end=371, ) _OAUTHTOKENGRANTBODY.fields_by_name['issued_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP DESCRIPTOR.message_types_by_name['OAuthTokenGrantBody'] = _OAUTHTOKENGRANTBODY DESCRIPTOR.message_types_by_name['OAuthTokenGrantEnvelope'] = _OAUTHTOKENGRANTENVELOPE _sym_db.RegisterFileDescriptor(DESCRIPTOR) OAuthTokenGrantBody = _reflection.GeneratedProtocolMessageType('OAuthTokenGrantBody', (_message.Message,), dict( DESCRIPTOR = _OAUTHTOKENGRANTBODY, __module__ = 'go.chromium.org.luci.tokenserver.api.oauth_token_grant_pb2' # @@protoc_insertion_point(class_scope:tokenserver.OAuthTokenGrantBody) )) _sym_db.RegisterMessage(OAuthTokenGrantBody) OAuthTokenGrantEnvelope = _reflection.GeneratedProtocolMessageType('OAuthTokenGrantEnvelope', (_message.Message,), dict( DESCRIPTOR = _OAUTHTOKENGRANTENVELOPE, __module__ = 'go.chromium.org.luci.tokenserver.api.oauth_token_grant_pb2' # @@protoc_insertion_point(class_scope:tokenserver.OAuthTokenGrantEnvelope) )) _sym_db.RegisterMessage(OAuthTokenGrantEnvelope) # @@protoc_insertion_point(module_scope)
43.006211
626
0.765309
73cfe35701bc37bb417ef5ecf6bc444d39fe6642
15,346
py
Python
Lib/site-packages/numpy/core/tests/test_records.py
Aakash10399/simple-health-glucheck
1f7c4ff7778a44f09b1c8cb0089fef51dc26cea2
[ "bzip2-1.0.6" ]
5
2020-01-16T22:50:31.000Z
2021-07-19T19:16:48.000Z
Lib/site-packages/numpy/core/tests/test_records.py
Aakash10399/simple-health-glucheck
1f7c4ff7778a44f09b1c8cb0089fef51dc26cea2
[ "bzip2-1.0.6" ]
2
2018-01-22T23:21:36.000Z
2018-01-22T23:31:27.000Z
app/scripts/dxaruco/_naoqios/_numpy_UCS4/core/tests/test_records.py
softbankrobotics-labs/pepper-proactive-mobility
a9f6132ee5afd9bb6583741d9d4c481bd9597c65
[ "BSD-3-Clause" ]
5
2018-05-19T05:08:51.000Z
2021-04-29T16:03:45.000Z
from __future__ import division, absolute_import, print_function import sys import collections import pickle import warnings from os import path import numpy as np from numpy.testing import ( run_module_suite, assert_, assert_equal, assert_array_equal, assert_array_almost_equal, assert_raises, assert_warns ) class TestFromrecords(object): def test_fromrecords(self): r = np.rec.fromrecords([[456, 'dbe', 1.2], [2, 'de', 1.3]], names='col1,col2,col3') assert_equal(r[0].item(), (456, 'dbe', 1.2)) assert_equal(r['col1'].dtype.kind, 'i') if sys.version_info[0] >= 3: assert_equal(r['col2'].dtype.kind, 'U') assert_equal(r['col2'].dtype.itemsize, 12) else: assert_equal(r['col2'].dtype.kind, 'S') assert_equal(r['col2'].dtype.itemsize, 3) assert_equal(r['col3'].dtype.kind, 'f') def test_fromrecords_0len(self): """ Verify fromrecords works with a 0-length input """ dtype = [('a', float), ('b', float)] r = np.rec.fromrecords([], dtype=dtype) assert_equal(r.shape, (0,)) def test_fromrecords_2d(self): data = [ [(1, 2), (3, 4), (5, 6)], [(6, 5), (4, 3), (2, 1)] ] expected_a = [[1, 3, 5], [6, 4, 2]] expected_b = [[2, 4, 6], [5, 3, 1]] # try with dtype r1 = np.rec.fromrecords(data, dtype=[('a', int), ('b', int)]) assert_equal(r1['a'], expected_a) assert_equal(r1['b'], expected_b) # try with names r2 = np.rec.fromrecords(data, names=['a', 'b']) assert_equal(r2['a'], expected_a) assert_equal(r2['b'], expected_b) assert_equal(r1, r2) def test_method_array(self): r = np.rec.array(b'abcdefg' * 100, formats='i2,a3,i4', shape=3, byteorder='big') assert_equal(r[1].item(), (25444, b'efg', 1633837924)) def test_method_array2(self): r = np.rec.array([(1, 11, 'a'), (2, 22, 'b'), (3, 33, 'c'), (4, 44, 'd'), (5, 55, 'ex'), (6, 66, 'f'), (7, 77, 'g')], formats='u1,f4,a1') assert_equal(r[1].item(), (2, 22.0, b'b')) def test_recarray_slices(self): r = np.rec.array([(1, 11, 'a'), (2, 22, 'b'), (3, 33, 'c'), (4, 44, 'd'), (5, 55, 'ex'), (6, 66, 'f'), (7, 77, 'g')], formats='u1,f4,a1') assert_equal(r[1::2][1].item(), (4, 44.0, b'd')) def test_recarray_fromarrays(self): x1 = np.array([1, 2, 3, 4]) x2 = np.array(['a', 'dd', 'xyz', '12']) x3 = np.array([1.1, 2, 3, 4]) r = np.rec.fromarrays([x1, x2, x3], names='a,b,c') assert_equal(r[1].item(), (2, 'dd', 2.0)) x1[1] = 34 assert_equal(r.a, np.array([1, 2, 3, 4])) def test_recarray_fromfile(self): data_dir = path.join(path.dirname(__file__), 'data') filename = path.join(data_dir, 'recarray_from_file.fits') fd = open(filename, 'rb') fd.seek(2880 * 2) r1 = np.rec.fromfile(fd, formats='f8,i4,a5', shape=3, byteorder='big') fd.seek(2880 * 2) r2 = np.rec.array(fd, formats='f8,i4,a5', shape=3, byteorder='big') fd.close() assert_equal(r1, r2) def test_recarray_from_obj(self): count = 10 a = np.zeros(count, dtype='O') b = np.zeros(count, dtype='f8') c = np.zeros(count, dtype='f8') for i in range(len(a)): a[i] = list(range(1, 10)) mine = np.rec.fromarrays([a, b, c], names='date,data1,data2') for i in range(len(a)): assert_((mine.date[i] == list(range(1, 10)))) assert_((mine.data1[i] == 0.0)) assert_((mine.data2[i] == 0.0)) def test_recarray_repr(self): a = np.array([(1, 0.1), (2, 0.2)], dtype=[('foo', int), ('bar', float)]) a = np.rec.array(a) assert_equal( repr(a), textwrap.dedent("""\ rec.array([(1, 0.1), (2, 0.2)], dtype=[('foo', '<i4'), ('bar', '<f8')])""") ) def test_recarray_from_repr(self): a = np.array([(1,'ABC'), (2, "DEF")], dtype=[('foo', int), ('bar', 'S4')]) recordarr = np.rec.array(a) recarr = a.view(np.recarray) recordview = a.view(np.dtype((np.record, a.dtype))) recordarr_r = eval("numpy." + repr(recordarr), {'numpy': np}) recarr_r = eval("numpy." + repr(recarr), {'numpy': np}) recordview_r = eval("numpy." + repr(recordview), {'numpy': np}) assert_equal(type(recordarr_r), np.recarray) assert_equal(recordarr_r.dtype.type, np.record) assert_equal(recordarr, recordarr_r) assert_equal(type(recarr_r), np.recarray) assert_equal(recarr_r.dtype.type, np.record) assert_equal(recarr, recarr_r) assert_equal(type(recordview_r), np.ndarray) assert_equal(recordview.dtype.type, np.record) assert_equal(recordview, recordview_r) def test_recarray_views(self): a = np.array([(1,'ABC'), (2, "DEF")], dtype=[('foo', int), ('bar', 'S4')]) b = np.array([1,2,3,4,5], dtype=np.int64) #check that np.rec.array gives right dtypes assert_equal(np.rec.array(a).dtype.type, np.record) assert_equal(type(np.rec.array(a)), np.recarray) assert_equal(np.rec.array(b).dtype.type, np.int64) assert_equal(type(np.rec.array(b)), np.recarray) #check that viewing as recarray does the same assert_equal(a.view(np.recarray).dtype.type, np.record) assert_equal(type(a.view(np.recarray)), np.recarray) assert_equal(b.view(np.recarray).dtype.type, np.int64) assert_equal(type(b.view(np.recarray)), np.recarray) #check that view to non-structured dtype preserves type=np.recarray r = np.rec.array(np.ones(4, dtype="f4,i4")) rv = r.view('f8').view('f4,i4') assert_equal(type(rv), np.recarray) assert_equal(rv.dtype.type, np.record) #check that getitem also preserves np.recarray and np.record r = np.rec.array(np.ones(4, dtype=[('a', 'i4'), ('b', 'i4'), ('c', 'i4,i4')])) assert_equal(r['c'].dtype.type, np.record) assert_equal(type(r['c']), np.recarray) #and that it preserves subclasses (gh-6949) class C(np.recarray): pass c = r.view(C) assert_equal(type(c['c']), C) # check that accessing nested structures keep record type, but # not for subarrays, non-void structures, non-structured voids test_dtype = [('a', 'f4,f4'), ('b', 'V8'), ('c', ('f4',2)), ('d', ('i8', 'i4,i4'))] r = np.rec.array([((1,1), b'11111111', [1,1], 1), ((1,1), b'11111111', [1,1], 1)], dtype=test_dtype) assert_equal(r.a.dtype.type, np.record) assert_equal(r.b.dtype.type, np.void) assert_equal(r.c.dtype.type, np.float32) assert_equal(r.d.dtype.type, np.int64) # check the same, but for views r = np.rec.array(np.ones(4, dtype='i4,i4')) assert_equal(r.view('f4,f4').dtype.type, np.record) assert_equal(r.view(('i4',2)).dtype.type, np.int32) assert_equal(r.view('V8').dtype.type, np.void) assert_equal(r.view(('i8', 'i4,i4')).dtype.type, np.int64) #check that we can undo the view arrs = [np.ones(4, dtype='f4,i4'), np.ones(4, dtype='f8')] for arr in arrs: rec = np.rec.array(arr) # recommended way to view as an ndarray: arr2 = rec.view(rec.dtype.fields or rec.dtype, np.ndarray) assert_equal(arr2.dtype.type, arr.dtype.type) assert_equal(type(arr2), type(arr)) def test_recarray_repr(self): # make sure non-structured dtypes also show up as rec.array a = np.array(np.ones(4, dtype='f8')) assert_(repr(np.rec.array(a)).startswith('rec.array')) # check that the 'np.record' part of the dtype isn't shown a = np.rec.array(np.ones(3, dtype='i4,i4')) assert_equal(repr(a).find('numpy.record'), -1) a = np.rec.array(np.ones(3, dtype='i4')) assert_(repr(a).find('dtype=int32') != -1) def test_recarray_from_names(self): ra = np.rec.array([ (1, 'abc', 3.7000002861022949, 0), (2, 'xy', 6.6999998092651367, 1), (0, ' ', 0.40000000596046448, 0)], names='c1, c2, c3, c4') pa = np.rec.fromrecords([ (1, 'abc', 3.7000002861022949, 0), (2, 'xy', 6.6999998092651367, 1), (0, ' ', 0.40000000596046448, 0)], names='c1, c2, c3, c4') assert_(ra.dtype == pa.dtype) assert_(ra.shape == pa.shape) for k in range(len(ra)): assert_(ra[k].item() == pa[k].item()) def test_recarray_conflict_fields(self): ra = np.rec.array([(1, 'abc', 2.3), (2, 'xyz', 4.2), (3, 'wrs', 1.3)], names='field, shape, mean') ra.mean = [1.1, 2.2, 3.3] assert_array_almost_equal(ra['mean'], [1.1, 2.2, 3.3]) assert_(type(ra.mean) is type(ra.var)) ra.shape = (1, 3) assert_(ra.shape == (1, 3)) ra.shape = ['A', 'B', 'C'] assert_array_equal(ra['shape'], [['A', 'B', 'C']]) ra.field = 5 assert_array_equal(ra['field'], [[5, 5, 5]]) assert_(isinstance(ra.field, collections.Callable)) def test_fromrecords_with_explicit_dtype(self): a = np.rec.fromrecords([(1, 'a'), (2, 'bbb')], dtype=[('a', int), ('b', object)]) assert_equal(a.a, [1, 2]) assert_equal(a[0].a, 1) assert_equal(a.b, ['a', 'bbb']) assert_equal(a[-1].b, 'bbb') # ndtype = np.dtype([('a', int), ('b', object)]) a = np.rec.fromrecords([(1, 'a'), (2, 'bbb')], dtype=ndtype) assert_equal(a.a, [1, 2]) assert_equal(a[0].a, 1) assert_equal(a.b, ['a', 'bbb']) assert_equal(a[-1].b, 'bbb') def test_recarray_stringtypes(self): # Issue #3993 a = np.array([('abc ', 1), ('abc', 2)], dtype=[('foo', 'S4'), ('bar', int)]) a = a.view(np.recarray) assert_equal(a.foo[0] == a.foo[1], False) def test_recarray_returntypes(self): qux_fields = {'C': (np.dtype('S5'), 0), 'D': (np.dtype('S5'), 6)} a = np.rec.array([('abc ', (1,1), 1, ('abcde', 'fgehi')), ('abc', (2,3), 1, ('abcde', 'jklmn'))], dtype=[('foo', 'S4'), ('bar', [('A', int), ('B', int)]), ('baz', int), ('qux', qux_fields)]) assert_equal(type(a.foo), np.ndarray) assert_equal(type(a['foo']), np.ndarray) assert_equal(type(a.bar), np.recarray) assert_equal(type(a['bar']), np.recarray) assert_equal(a.bar.dtype.type, np.record) assert_equal(type(a['qux']), np.recarray) assert_equal(a.qux.dtype.type, np.record) assert_equal(dict(a.qux.dtype.fields), qux_fields) assert_equal(type(a.baz), np.ndarray) assert_equal(type(a['baz']), np.ndarray) assert_equal(type(a[0].bar), np.record) assert_equal(type(a[0]['bar']), np.record) assert_equal(a[0].bar.A, 1) assert_equal(a[0].bar['A'], 1) assert_equal(a[0]['bar'].A, 1) assert_equal(a[0]['bar']['A'], 1) assert_equal(a[0].qux.D, b'fgehi') assert_equal(a[0].qux['D'], b'fgehi') assert_equal(a[0]['qux'].D, b'fgehi') assert_equal(a[0]['qux']['D'], b'fgehi') def test_zero_width_strings(self): # Test for #6430, based on the test case from #1901 cols = [['test'] * 3, [''] * 3] rec = np.rec.fromarrays(cols) assert_equal(rec['f0'], ['test', 'test', 'test']) assert_equal(rec['f1'], ['', '', '']) dt = np.dtype([('f0', '|S4'), ('f1', '|S')]) rec = np.rec.fromarrays(cols, dtype=dt) assert_equal(rec.itemsize, 4) assert_equal(rec['f0'], [b'test', b'test', b'test']) assert_equal(rec['f1'], [b'', b'', b'']) class TestRecord(object): def setup(self): self.data = np.rec.fromrecords([(1, 2, 3), (4, 5, 6)], dtype=[("col1", "<i4"), ("col2", "<i4"), ("col3", "<i4")]) def test_assignment1(self): a = self.data assert_equal(a.col1[0], 1) a[0].col1 = 0 assert_equal(a.col1[0], 0) def test_assignment2(self): a = self.data assert_equal(a.col1[0], 1) a.col1[0] = 0 assert_equal(a.col1[0], 0) def test_invalid_assignment(self): a = self.data def assign_invalid_column(x): x[0].col5 = 1 assert_raises(AttributeError, assign_invalid_column, a) def test_nonwriteable_setfield(self): # gh-8171 r = np.rec.array([(0,), (1,)], dtype=[('f', 'i4')]) r.flags.writeable = False with assert_raises(ValueError): r.f = [2, 3] with assert_raises(ValueError): r.setfield([2,3], *r.dtype.fields['f']) def test_pickle_1(self): # Issue #1529 a = np.array([(1, [])], dtype=[('a', np.int32), ('b', np.int32, 0)]) assert_equal(a, pickle.loads(pickle.dumps(a))) assert_equal(a[0], pickle.loads(pickle.dumps(a[0]))) def test_pickle_2(self): a = self.data assert_equal(a, pickle.loads(pickle.dumps(a))) assert_equal(a[0], pickle.loads(pickle.dumps(a[0]))) def test_pickle_3(self): # Issue #7140 a = self.data pa = pickle.loads(pickle.dumps(a[0])) assert_(pa.flags.c_contiguous) assert_(pa.flags.f_contiguous) assert_(pa.flags.writeable) assert_(pa.flags.aligned) def test_objview_record(self): # https://github.com/numpy/numpy/issues/2599 dt = np.dtype([('foo', 'i8'), ('bar', 'O')]) r = np.zeros((1,3), dtype=dt).view(np.recarray) r.foo = np.array([1, 2, 3]) # TypeError? # https://github.com/numpy/numpy/issues/3256 ra = np.recarray((2,), dtype=[('x', object), ('y', float), ('z', int)]) ra[['x','y']] # TypeError? def test_record_scalar_setitem(self): # https://github.com/numpy/numpy/issues/3561 rec = np.recarray(1, dtype=[('x', float, 5)]) rec[0].x = 1 assert_equal(rec[0].x, np.ones(5)) def test_missing_field(self): # https://github.com/numpy/numpy/issues/4806 arr = np.zeros((3,), dtype=[('x', int), ('y', int)]) assert_raises(ValueError, lambda: arr[['nofield']]) def test_find_duplicate(): l1 = [1, 2, 3, 4, 5, 6] assert_(np.rec.find_duplicate(l1) == []) l2 = [1, 2, 1, 4, 5, 6] assert_(np.rec.find_duplicate(l2) == [1]) l3 = [1, 2, 1, 4, 1, 6, 2, 3] assert_(np.rec.find_duplicate(l3) == [1, 2]) l3 = [2, 2, 1, 4, 1, 6, 2, 3] assert_(np.rec.find_duplicate(l3) == [2, 1]) if __name__ == "__main__": run_module_suite()
38.461153
96
0.527304
73cffba5d864cfe12b31c93486313cdb22085789
663
py
Python
AlgoTalks/Linear DS/simple_text_editor.py
mishrakeshav/Competitive-Programming
b25dcfeec0fb9a9c71bf3a05644b619f4ca83dd2
[ "MIT" ]
2
2020-06-25T21:10:32.000Z
2020-12-10T06:53:45.000Z
AlgoTalks/Linear DS/simple_text_editor.py
mishrakeshav/Competitive-Programming
b25dcfeec0fb9a9c71bf3a05644b619f4ca83dd2
[ "MIT" ]
null
null
null
AlgoTalks/Linear DS/simple_text_editor.py
mishrakeshav/Competitive-Programming
b25dcfeec0fb9a9c71bf3a05644b619f4ca83dd2
[ "MIT" ]
3
2020-05-15T14:17:09.000Z
2021-07-25T13:18:20.000Z
""" Problem link: https://www.hackerrank.com/challenges/simple-text-editor/problem Solution By Keshav Mishra """ from sys import stdin,stdout def input(): return stdin.readline().strip() # def print(s): stdout.write(str(s)+'\n') def solve(): s = '' stack = [] q = int(input()) for i in range(q): k = input().split() if k[0] == '1': stack.append(s) s += k[1] elif k[0] == '2': stack.append(s) s = s[:len(s)-int(k[1])] elif k[0] == '3': print(s[int(k[1])-1]) elif k[0] == '4': s = stack.pop() if __name__ == '__main__': solve()
22.862069
78
0.485671
73d013de52295cbe543b511968ecb3967210e70a
9,768
py
Python
models/losses.py
mackenzie-warren/SO-Net
141e26c48dfdc0845b287c02402acbd3ab2d09f7
[ "MIT" ]
325
2018-02-28T06:14:05.000Z
2022-03-09T08:39:31.000Z
models/losses.py
mackenzie-warren/SO-Net
141e26c48dfdc0845b287c02402acbd3ab2d09f7
[ "MIT" ]
37
2018-05-11T06:04:41.000Z
2022-03-01T11:36:26.000Z
models/losses.py
mackenzie-warren/SO-Net
141e26c48dfdc0845b287c02402acbd3ab2d09f7
[ "MIT" ]
87
2018-03-13T13:06:34.000Z
2022-02-24T02:35:45.000Z
import torch import torch.nn as nn from torch.autograd import Variable import numpy as np import math import torch.utils.model_zoo as model_zoo import time import torch.nn.functional as F import faiss import json import os import os.path from collections import OrderedDict def robust_norm(var): ''' :param var: Variable of BxCxHxW :return: p-norm of BxCxW ''' result = ((var**2).sum(dim=2) + 1e-8).sqrt() # result = (var ** 2).sum(dim=2) # try to make the points less dense, caused by the backward loss # result = result.clamp(min=7e-3, max=None) return result class CrossEntropyLossSeg(nn.Module): def __init__(self, weight=None, size_average=True): super(CrossEntropyLossSeg, self).__init__() self.nll_loss = nn.NLLLoss(weight, size_average) def forward(self, inputs, targets): ''' :param inputs: BxclassxN :param targets: BxN :return: ''' inputs = inputs.unsqueeze(3) targets = targets.unsqueeze(2) return self.nll_loss(F.log_softmax(inputs, dim=1), targets) def visualize_pc_seg(score, seg, label, visualizer, opt, input_pc, batch_num): # display only one instance of pc/img input_pc_np = input_pc.cpu().numpy().transpose() # Nx3 pc_color_np = np.ones(input_pc_np.shape, dtype=int) # Nx3 gt_pc_color_np = np.ones(input_pc_np.shape, dtype=int) # Nx3 # construct color map _, predicted_seg = torch.max(score, dim=0, keepdim=False) # 50xN -> N predicted_seg_np = predicted_seg.cpu().numpy() # N gt_seg_np = seg.cpu().numpy() # N color_map_file = os.path.join(opt.dataroot, 'part_color_mapping.json') color_map = json.load(open(color_map_file, 'r')) color_map_np = np.fabs((np.asarray(color_map) * 255)).astype(int) # 50x3 for i in range(input_pc_np.shape[0]): pc_color_np[i] = color_map_np[predicted_seg_np[i]] gt_pc_color_np[i] = color_map_np[gt_seg_np[i]] if gt_seg_np[i] == 49: gt_pc_color_np[i] = np.asarray([1, 1, 1]).astype(int) dict = OrderedDict([('pc_colored_predicted', [input_pc_np, pc_color_np]), ('pc_colored_gt', [input_pc_np, gt_pc_color_np])]) visualizer.display_current_results(dict, 1, 1) def compute_iou_np_array(score, seg, label, visualizer, opt, input_pc): part_label = [ [0, 1, 2, 3], [4, 5], [6, 7], [8, 9, 10, 11], [12, 13, 14, 15], [16, 17, 18], [19, 20, 21], [22, 23], [24, 25, 26, 27], [28, 29], [30, 31, 32, 33, 34, 35], [36, 37], [38, 39, 40], [41, 42, 43], [44, 45, 46], [47, 48, 49] ] _, seg_predicted = torch.max(score, dim=1) # BxN iou_batch = [] for i in range(score.size()[0]): iou_pc = [] for part in part_label[label[i]]: gt = seg[i] == part predict = seg_predicted[i] == part intersection = (gt.int() + predict.int()) == 2 union = (gt.int() + predict.int()) >= 1 if union.sum() == 0: iou_part = 1.0 else: iou_part = intersection.int().sum().item() / (union.int().sum().item() + 0.0001) iou_pc.append(iou_part) iou_batch.append(np.asarray(iou_pc).mean()) iou_np = np.asarray(iou_batch) return iou_np def compute_iou(score, seg, label, visualizer, opt, input_pc): ''' :param score: BxCxN tensor :param seg: BxN tensor :return: ''' part_label = [ [0, 1, 2, 3], [4, 5], [6, 7], [8, 9, 10, 11], [12, 13, 14, 15], [16, 17, 18], [19, 20, 21], [22, 23], [24, 25, 26, 27], [28, 29], [30, 31, 32, 33, 34, 35], [36, 37], [38, 39, 40], [41, 42, 43], [44, 45, 46], [47, 48, 49] ] _, seg_predicted = torch.max(score, dim=1) # BxN iou_batch = [] vis_flag = False for i in range(score.size()[0]): iou_pc = [] for part in part_label[label[i]]: gt = seg[i] == part predict = seg_predicted[i] == part intersection = (gt.int() + predict.int()) == 2 union = (gt.int() + predict.int()) >= 1 # print(intersection) # print(union) # assert False if union.sum() == 0: iou_part = 1.0 else: iou_part = intersection.int().sum().item() / (union.int().sum().item() + 0.0001) # debug to see what happened # if iou_part < 0.1: # print(part) # print('predict:') # print(predict.nonzero()) # print('gt') # print(gt.nonzero()) # vis_flag = True iou_pc.append(iou_part) # debug to see what happened if vis_flag: print('============') print(iou_pc) print(label[i]) visualize_pc_seg(score[i], seg[i], label[i], visualizer, opt, input_pc[i], i) iou_batch.append(np.asarray(iou_pc).mean()) iou = np.asarray(iou_batch).mean() return iou class ChamferLoss(nn.Module): def __init__(self, opt): super(ChamferLoss, self).__init__() self.opt = opt self.dimension = 3 self.k = 1 # we need only a StandardGpuResources per GPU self.res = faiss.StandardGpuResources() self.res.setTempMemoryFraction(0.1) self.flat_config = faiss.GpuIndexFlatConfig() self.flat_config.device = opt.gpu_id # place holder self.forward_loss = torch.FloatTensor([0]) self.backward_loss = torch.FloatTensor([0]) def build_nn_index(self, database): ''' :param database: numpy array of Nx3 :return: Faiss index, in CPU ''' # index = faiss.GpuIndexFlatL2(self.res, self.dimension, self.flat_config) # dimension is 3 index_cpu = faiss.IndexFlatL2(self.dimension) index = faiss.index_cpu_to_gpu(self.res, self.opt.gpu_id, index_cpu) index.add(database) return index def search_nn(self, index, query, k): ''' :param index: Faiss index :param query: numpy array of Nx3 :return: D: Variable of Nxk, type FloatTensor, in GPU I: Variable of Nxk, type LongTensor, in GPU ''' D, I = index.search(query, k) D_var =torch.from_numpy(np.ascontiguousarray(D)) I_var = torch.from_numpy(np.ascontiguousarray(I).astype(np.int64)) if self.opt.gpu_id >= 0: D_var = D_var.to(self.opt.device) I_var = I_var.to(self.opt.device) return D_var, I_var def forward(self, predict_pc, gt_pc): ''' :param predict_pc: Bx3xM Variable in GPU :param gt_pc: Bx3xN Variable in GPU :return: ''' predict_pc_size = predict_pc.size() gt_pc_size = gt_pc.size() predict_pc_np = np.ascontiguousarray(torch.transpose(predict_pc.data.clone(), 1, 2).cpu().numpy()) # BxMx3 gt_pc_np = np.ascontiguousarray(torch.transpose(gt_pc.data.clone(), 1, 2).cpu().numpy()) # BxNx3 # selected_gt: Bxkx3xM selected_gt_by_predict = torch.FloatTensor(predict_pc_size[0], self.k, predict_pc_size[1], predict_pc_size[2]) # selected_predict: Bxkx3xN selected_predict_by_gt = torch.FloatTensor(gt_pc_size[0], self.k, gt_pc_size[1], gt_pc_size[2]) if self.opt.gpu_id >= 0: selected_gt_by_predict = selected_gt_by_predict.to(self.opt.device) selected_predict_by_gt = selected_predict_by_gt.to(self.opt.device) # process each batch independently. for i in range(predict_pc_np.shape[0]): index_predict = self.build_nn_index(predict_pc_np[i]) index_gt = self.build_nn_index(gt_pc_np[i]) # database is gt_pc, predict_pc -> gt_pc ----------------------------------------------------------- _, I_var = self.search_nn(index_gt, predict_pc_np[i], self.k) # process nearest k neighbors for k in range(self.k): selected_gt_by_predict[i,k,...] = gt_pc[i].index_select(1, I_var[:,k]) # database is predict_pc, gt_pc -> predict_pc ------------------------------------------------------- _, I_var = self.search_nn(index_predict, gt_pc_np[i], self.k) # process nearest k neighbors for k in range(self.k): selected_predict_by_gt[i,k,...] = predict_pc[i].index_select(1, I_var[:,k]) # compute loss =================================================== # selected_gt(Bxkx3xM) vs predict_pc(Bx3xM) forward_loss_element = robust_norm(selected_gt_by_predict-predict_pc.unsqueeze(1).expand_as(selected_gt_by_predict)) self.forward_loss = forward_loss_element.mean() self.forward_loss_array = forward_loss_element.mean(dim=1).mean(dim=1) # selected_predict(Bxkx3xN) vs gt_pc(Bx3xN) backward_loss_element = robust_norm(selected_predict_by_gt - gt_pc.unsqueeze(1).expand_as(selected_predict_by_gt)) # BxkxN self.backward_loss = backward_loss_element.mean() self.backward_loss_array = backward_loss_element.mean(dim=1).mean(dim=1) self.loss_array = self.forward_loss_array + self.backward_loss_array return self.forward_loss + self.backward_loss # + self.sparsity_loss def __call__(self, predict_pc, gt_pc): # start_time = time.time() loss = self.forward(predict_pc, gt_pc) # print(time.time()-start_time) return loss
33
131
0.573505
73d03bc60da3484c22e0fbcbd42ac29cb5d95b1e
1,857
py
Python
flayer/loader.py
techhat/grabbr
c188468168b35b8cd9cd0a42022fb603129faf9e
[ "Apache-2.0" ]
null
null
null
flayer/loader.py
techhat/grabbr
c188468168b35b8cd9cd0a42022fb603129faf9e
[ "Apache-2.0" ]
null
null
null
flayer/loader.py
techhat/grabbr
c188468168b35b8cd9cd0a42022fb603129faf9e
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- ''' Basic functions for Web Flayer ''' # 3rd party from salt.loader import LazyLoader import salt.config def parser(opts, context, urls, dbclient): ''' Load spider modules ''' master_opts = salt.config.master_config('/etc/salt/master') minion_opts = salt.config.minion_config('/etc/salt/minion') return LazyLoader( opts['parser_dir'], minion_opts, tag=u'flayer/parser', pack={ u'__master_opts__': master_opts, u'__minion_opts__': minion_opts, u'__opts__': opts, u'__context__': context, u'__urls__': urls, u'__dbclient__': dbclient, }, ) def search(opts, dbclient): ''' Load search modules ''' minion_opts = salt.config.minion_config('/etc/salt/minion') return LazyLoader( opts['search_dir'], minion_opts, tag=u'flayer/search', pack={ u'__opts__': opts, u'__dbclient__': dbclient, }, ) def organize(opts, dbclient, context): ''' Load organizer modules ''' minion_opts = salt.config.minion_config('/etc/salt/minion') return LazyLoader( opts['organize_dir'], minion_opts, tag=u'flayer/organize', pack={ u'__opts__': opts, u'__dbclient__': dbclient, u'__context__': context, }, ) def filter(opts, context, urls, dbclient): ''' Load filterr modules ''' minion_opts = salt.config.minion_config('/etc/salt/minion') return LazyLoader( opts['filter_dir'], minion_opts, tag=u'flayer/filter', pack={ u'__opts__': opts, u'__context__': context, u'__urls__': urls, u'__dbclient__': dbclient, }, )
23.2125
63
0.556274
73d08ab42def173bdc57c91eab3cae03e179a653
11,796
py
Python
src/dmf_chip/edit.py
sci-bots/dmf-chip
6fc192235f792046297fcf0250606c8838bb9257
[ "BSD-3-Clause" ]
null
null
null
src/dmf_chip/edit.py
sci-bots/dmf-chip
6fc192235f792046297fcf0250606c8838bb9257
[ "BSD-3-Clause" ]
1
2019-04-11T18:55:01.000Z
2019-04-11T18:55:01.000Z
src/dmf_chip/edit.py
sci-bots/dmf-chip
6fc192235f792046297fcf0250606c8838bb9257
[ "BSD-3-Clause" ]
null
null
null
from __future__ import absolute_import import functools as ft import warnings from logging_helpers import _L from lxml.etree import QName, Element import lxml.etree import networkx as nx import numpy as np import pandas as pd from .core import ureg from .load import draw, load from six.moves import zip __all__ = ['detect_neighbours', 'draw_with_segment_rays', 'write_connections_layer'] DEFAULT_DISTANCE_THRESHOLD = 0.175 * ureg.mm def detect_neighbours(chip_info, distance_threshold=DEFAULT_DISTANCE_THRESHOLD): segments = get_segment_rays(chip_info, magnitude=distance_threshold) return get_all_intersections(segments) def draw_with_segment_rays(chip_info, distance_threshold=DEFAULT_DISTANCE_THRESHOLD, axis=None): import matplotlib.pyplot as plt if axis is None: fig, axis = plt.subplots(figsize=(50, 50)) result = draw(chip_info, ax=axis) # result = draw(chip_info) axis = result['axis'] for p in result['patches'].values(): p.set_alpha(.3) light_green = '#90cd97' dark_green = '#059748' df_intersections = detect_neighbours(chip_info, distance_threshold=.175 * ureg.mm) for idx_i, segment_i in df_intersections.iterrows(): axis.arrow(segment_i['x_mid'], segment_i['y_mid'], segment_i['x_normal'], segment_i['y_normal'], width=.25, edgecolor=dark_green, facecolor=light_green) def get_all_intersections(df_rays): ''' Parameters ---------- segment_rays : pandas.DataFrame See return type of :func:`get_segment_rays()`. ''' intersections = [] for i, ((id_i, vertex_i), segment_i) in enumerate(df_rays.iterrows()): p = segment_i[['x_mid', 'y_mid']].values r = segment_i[['x_normal', 'y_normal']].values df_intersections_i = get_intersections(df_rays, p, r) # Do not include self electrode in consideration for neighbours. self_mask = df_intersections_i.index.get_level_values('id') == id_i df_intersections_i = df_intersections_i.loc[~self_mask] if df_intersections_i.shape[0]: intersections.append(((id_i, vertex_i), df_intersections_i)) if not intersections: return pd.DataFrame() index, values = list(zip(*intersections)) df_result = pd.concat(values, keys=index) df_result.index.names = ['id', 'vertex_i', 'id_neighbour', 'vertex_i_neighbour'] return df_result def get_intersections(df_rays, p, r): # See: https://stackoverflow.com/a/565282/345236 q = df_rays[['x1', 'y1']].values s = df_rays[['x2', 'y2']].values - q r_x_s = np.cross(r, s) r_x_s[r_x_s == 0] = np.NaN t = np.cross((q - p), s) / r_x_s u = np.cross((q - p), r) / r_x_s df_tu = pd.DataFrame(np.column_stack([t, u]), columns=list('tu'), index=df_rays.index) with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=RuntimeWarning) df_i = df_rays.join(df_tu).loc[(r_x_s != 0) & (t >= 0) & (t <= 1) & (u >= 0) & (u <= 1)] intersect_points = p + df_i.t.values[:, None] * r return df_i.join(pd.DataFrame(intersect_points, columns=['x_intersect', 'y_intersect'], index=df_i.index)).drop(['t', 'u'], axis=1) def _electrode_segment_rays(electrode, magnitude): '''Compute ray cast "outwards" for each line segment of electrode shape. Parameters ---------- electrode : dict See ``electrodes`` item in :func:`dmf_chip.load()`. magnitude : float Magnitude of ray vectors (in pixels). Returns ------- pandas.DataFrame Each row corresponds to a ray vector cast from the respective line segment in the electrode shape, with the following columns:: - ``x1``, ``y1``: start point of line segment - ``x2``, ``y2``: end point of line segment - ``x_mid``, ``y_mid``: mid point of line segment - ``length``: Cartesian length of line segment - ``x_normal``, ``y_normal``: end point of cast ray ''' points = np.array(electrode['points']) if electrode['direction'] == 'counter-clockwise': points = points[::-1] # Vector direction/magnitude for each segment (relative to origin). v = .5 * (points[1:] - points[:-1]) # Mid-point of segment. x_mid, y_mid = .5 * (points[1:] + points[:-1]).T length = np.sqrt((v ** 2)).sum(axis=1) v_scaled = magnitude * (v / length[:, None]) x_normal = -v_scaled[:, 1] y_normal = v_scaled[:, 0] x1, y1 = points[:-1].T x2, y2 = points[1:].T result = pd.DataFrame(np.column_stack((x1, y1, x2, y2, x_mid, y_mid, length, x_normal, y_normal)), columns=['x1', 'y1', 'x2', 'y2', 'x_mid', 'y_mid', 'length', 'x_normal', 'y_normal']) return result def get_segment_rays(chip_info, magnitude=DEFAULT_DISTANCE_THRESHOLD): magnitude_px = (magnitude * chip_info['__metadata__']['ppi'] * ureg.ppi).to('pixel').magnitude df_rays = pd.concat([_electrode_segment_rays(e_i, magnitude_px) for e_i in chip_info['electrodes']], keys=[e['id'] for e in chip_info['electrodes']]) df_rays.index.names = 'id', 'vertex_i' return df_rays def write_connections_layer(chip_file, distance_threshold=DEFAULT_DISTANCE_THRESHOLD): chip_info = load(chip_file) df_intersections = detect_neighbours(chip_info, distance_threshold=distance_threshold) doc = lxml.etree.parse(chip_file) root = doc.getroot() nsmap = {k: v for k, v in root.nsmap.items() if k} _xpath = ft.partial(root.xpath, namespaces=nsmap) device_layer = _xpath('//svg:g[@inkscape:label="Device"]')[0] connections_layers = _xpath('//svg:g[@inkscape:label="Connections"]') # Remove existing neighbouring electrode connections layer(s) (if any). for layer in connections_layers: root.remove(layer) # Determine and use first unused layer label number. layer_ids = set(_xpath('//svg:g[@inkscape:label and @inkscape:groupmode=' '"layer"]/@id')) i = 1 while True: layer_id = 'layer%d' % i if layer_id not in layer_ids: break i += 1 connections_layer = Element(QName(nsmap['svg'], 'g'), attrib={QName(nsmap['inkscape'], 'label'): 'Connections', QName(nsmap['inkscape'], 'groupmode'): 'layer', 'id': layer_id}) # Construct undirected graph from detected intersections. edges = df_intersections.reset_index()[['id', 'id_neighbour']].values.tolist() graph = nx.Graph(edges) # Create one `<svg:path>` per electrode. path_elements = [] centers = pd.Series((e['pole_of_accessibility'] for e in chip_info['electrodes']), index=[e['id'] for e in chip_info['electrodes']]) for a, b, in graph.edges: a_point, b_point = centers[[a, b]] path_d = 'M %.2f,%.2f L %.2f,%.2f' % (a_point['x'], a_point['y'], b_point['x'], b_point['y']) path_elem = Element(QName(nsmap['svg'], 'path'), attrib={'id': layer_id, 'style': 'stroke:#000000;stroke-width:0.1', 'd': path_d}) path_elements.append(path_elem) connections_layer.extend(path_elements) device_layer.addnext(connections_layer) return doc def _get_or_create(parent, name, attrib=None): '''Get element specified by qualified tag name or create it. Parameters ---------- parent : lxml.etree element Parent element. name : str Name in form ``"<namespace alias>:<tagname>"``, e.g., ``"dmf:ChipDesign"``. If :data:`parent` does not contain a child matching the specified tag name and corresponding attributes, create a new element. attrib : dict, optional Element attributes to match (or set, if creating new element). Returns ------- lxml.etree.Element Matching child element (if available) or created element. Examples -------- Get ``<dmf:ChipDesign>`` element or create it if it does not exist: >>>> from dmf_chip.edit import _get_or_create >>>> >>>> # Load xml document define `_xpath` alias... >>>> >>>> metadata = _xpath('/svg:svg/svg:metadata')[0] >>>> chip_design = _get_or_create(metadata, 'dmf:ChipDesign') ''' docroot = parent.getroottree().getroot() nsmap = {k: v for k, v in docroot.nsmap.items() if k} ns, tagname = name.split(':') qname = QName(nsmap[ns], tagname) # Short-hand to xpath using namespaces referenced in file. _xpath = ft.wraps(parent.xpath)(ft.partial(parent.xpath, namespaces=nsmap)) xquery = './%s:%s' % (ns, tagname) if attrib is not None: attrib_str = ''.join('[@%s="%s"]' % (k, v) for k, v in attrib.items()) else: attrib_str = '' xquery += attrib_str if not _xpath(xquery): element = Element(qname, attrib=attrib) parent.append(element) _L().info('Add new element: `%s:%s%s`', ns, tagname, attrib_str) else: element = _xpath(xquery)[0] _L().info('found element: `%s:%s%s`', ns, tagname, attrib_str) return element def write_test_route(chip_file, tour_ids, id_): '''Write test route to SVG metadata. Parameters ---------- chip_file : str Path to chip design file. tour_ids : list[str] Ordered list of electrode ids defining tour waypoints. id_ : str Test route id. Returns ------- lxml.etree document In-memory document with test route element added. ''' doc = lxml.etree.parse(chip_file) root = doc.getroot() if 'dmf' not in root.nsmap: root.nsmap['dmf'] = \ "https://github.com/sci-bots/dmf-chip-spec/releases/tag/v0.1" NSMAP = {k: v for k, v in root.nsmap.items() if k} # Short-hand to xpath using namespaces referenced in file. _xpath = ft.wraps(root.xpath)(ft.partial(root.xpath, namespaces=NSMAP)) metadata = _xpath('/svg:svg/svg:metadata')[0] chip_design = _get_or_create(metadata, 'dmf:ChipDesign') test_routes = _get_or_create(chip_design, 'dmf:TestRoutes') if test_routes.xpath('./dmf:TestRoute[@id="%s"]' % id_, namespaces=NSMAP): raise NameError('Test route already exists with id: `%s`', id_) test_route = _get_or_create(test_routes, 'dmf:TestRoute', attrib={'id': id_, 'version': '0.1.0'}) for id_i in tour_ids: element_i = Element(QName(NSMAP['dmf'], 'Waypoint')) element_i.text = str(id_i) test_route.append(element_i) _L().info('Added %d waypoints.', len(tour_ids)) return doc
36.747664
81
0.569939
73d0a7cbd2a172dfea06bbf97de646bcd089488d
11,501
py
Python
troveclient/tests/test_management.py
hrnciar/python-troveclient
e41d08d24325a61a213592934b508bf77ce67dac
[ "Apache-2.0" ]
46
2015-01-14T12:26:24.000Z
2021-01-05T12:54:03.000Z
troveclient/tests/test_management.py
hrnciar/python-troveclient
e41d08d24325a61a213592934b508bf77ce67dac
[ "Apache-2.0" ]
3
2015-05-22T19:58:11.000Z
2021-04-30T06:12:21.000Z
troveclient/tests/test_management.py
hrnciar/python-troveclient
e41d08d24325a61a213592934b508bf77ce67dac
[ "Apache-2.0" ]
34
2015-01-21T12:33:53.000Z
2021-10-13T00:29:35.000Z
# Copyright 2011 OpenStack Foundation # Copyright 2013 Rackspace Hosting # Copyright 2013 Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import testtools from unittest import mock from troveclient import base from troveclient.v1 import management """ Unit tests for management.py """ class RootHistoryTest(testtools.TestCase): def setUp(self): super(RootHistoryTest, self).setUp() self.orig__init = management.RootHistory.__init__ management.RootHistory.__init__ = mock.Mock(return_value=None) def tearDown(self): super(RootHistoryTest, self).tearDown() management.RootHistory.__init__ = self.orig__init def test___repr__(self): root_history = management.RootHistory() root_history.id = "1" root_history.created = "ct" root_history.user = "tu" self.assertEqual('<Root History: Instance 1 enabled at ct by tu>', root_history.__repr__()) class ManagementTest(testtools.TestCase): def setUp(self): super(ManagementTest, self).setUp() self.orig__init = management.Management.__init__ management.Management.__init__ = mock.Mock(return_value=None) self.management = management.Management() self.management.api = mock.Mock() self.management.api.client = mock.Mock() self.orig_hist__init = management.RootHistory.__init__ self.orig_base_getid = base.getid base.getid = mock.Mock(return_value="instance1") def tearDown(self): super(ManagementTest, self).tearDown() management.Management.__init__ = self.orig__init management.RootHistory.__init__ = self.orig_hist__init base.getid = self.orig_base_getid def test_show(self): def side_effect_func(path, instance): return path, instance self.management._get = mock.Mock(side_effect=side_effect_func) p, i = self.management.show(1) self.assertEqual(('/mgmt/instances/instance1', 'instance'), (p, i)) def test_list(self): page_mock = mock.Mock() self.management._paginated = page_mock self.management.list(deleted=True) page_mock.assert_called_with('/mgmt/instances', 'instances', None, None, query_strings={'deleted': True}) self.management.list(deleted=False, limit=10, marker="foo") page_mock.assert_called_with('/mgmt/instances', 'instances', 10, "foo", query_strings={"deleted": False}) def test_index(self): """index() is just wrapper for list()""" page_mock = mock.Mock() self.management._paginated = page_mock self.management.index(deleted=True) page_mock.assert_called_with('/mgmt/instances', 'instances', None, None, query_strings={'deleted': True}) self.management.index(deleted=False, limit=10, marker="foo") page_mock.assert_called_with('/mgmt/instances', 'instances', 10, "foo", query_strings={"deleted": False}) def test_root_enabled_history(self): self.management.api.client.get = mock.Mock(return_value=('resp', None)) self.assertRaises(Exception, self.management.root_enabled_history, "instance") body = {'root_history': 'rh'} self.management.api.client.get = mock.Mock(return_value=('resp', body)) management.RootHistory.__init__ = mock.Mock(return_value=None) rh = self.management.root_enabled_history("instance") self.assertIsInstance(rh, management.RootHistory) def test__action(self): resp = mock.Mock() self.management.api.client.post = mock.Mock( return_value=(resp, 'body') ) resp.status_code = 200 self.management._action(1, 'body') self.assertEqual(1, self.management.api.client.post.call_count) resp.status_code = 400 self.assertRaises(Exception, self.management._action, 1, 'body') self.assertEqual(2, self.management.api.client.post.call_count) def _mock_action(self): self.body_ = "" def side_effect_func(instance_id, body): self.body_ = body self.management._action = mock.Mock(side_effect=side_effect_func) def test_stop(self): self._mock_action() self.management.stop(1) self.assertEqual(1, self.management._action.call_count) self.assertEqual({'stop': {}}, self.body_) def test_reboot(self): self._mock_action() self.management.reboot(1) self.assertEqual(1, self.management._action.call_count) self.assertEqual({'reboot': {}}, self.body_) def test_migrate(self): self._mock_action() self.management.migrate(1) self.assertEqual(1, self.management._action.call_count) self.assertEqual({'migrate': {}}, self.body_) def test_migrate_to_host(self): hostname = 'hostname2' self._mock_action() self.management.migrate(1, host=hostname) self.assertEqual(1, self.management._action.call_count) self.assertEqual({'migrate': {'host': hostname}}, self.body_) def test_update(self): self._mock_action() self.management.update(1) self.assertEqual(1, self.management._action.call_count) self.assertEqual({'update': {}}, self.body_) def test_reset_task_status(self): self._mock_action() self.management.reset_task_status(1) self.assertEqual(1, self.management._action.call_count) self.assertEqual({'reset-task-status': {}}, self.body_) class MgmtFlavorsTest(testtools.TestCase): def setUp(self): super(MgmtFlavorsTest, self).setUp() self.orig__init = management.MgmtFlavors.__init__ management.MgmtFlavors.__init__ = mock.Mock(return_value=None) self.flavors = management.MgmtFlavors() self.flavors.api = mock.Mock() self.flavors.api.client = mock.Mock() self.flavors.resource_class = mock.Mock(return_value="flavor-1") self.orig_base_getid = base.getid base.getid = mock.Mock(return_value="flavor1") def tearDown(self): super(MgmtFlavorsTest, self).tearDown() management.MgmtFlavors.__init__ = self.orig__init base.getid = self.orig_base_getid def test_create(self): def side_effect_func(path, body, inst): return path, body, inst self.flavors._create = mock.Mock(side_effect=side_effect_func) p, b, i = self.flavors.create("test-name", 1024, 30, 2, 1) self.assertEqual("/mgmt/flavors", p) self.assertEqual("flavor", i) self.assertEqual("test-name", b["flavor"]["name"]) self.assertEqual(1024, b["flavor"]["ram"]) self.assertEqual(2, b["flavor"]["vcpu"]) self.assertEqual(1, b["flavor"]["flavor_id"]) class MgmtDatastoreVersionsTest(testtools.TestCase): def setUp(self): super(MgmtDatastoreVersionsTest, self).setUp() self.orig__init = management.MgmtDatastoreVersions.__init__ management.MgmtDatastoreVersions.__init__ = mock.Mock( return_value=None) self.ds_version = management.MgmtDatastoreVersions() self.ds_version.api = mock.Mock() self.ds_version.api.client = mock.Mock() self.ds_version.resource_class = mock.Mock(return_value="ds-version-1") self.orig_base_getid = base.getid base.getid = mock.Mock(return_value="ds-version1") def tearDown(self): super(MgmtDatastoreVersionsTest, self).tearDown() management.MgmtDatastoreVersions.__init__ = self.orig__init base.getid = self.orig_base_getid def _get_mock_method(self): self._resp = mock.Mock() self._body = None self._url = None def side_effect_func(url, body=None): self._body = body self._url = url return (self._resp, body) return mock.Mock(side_effect=side_effect_func) def test_create(self): def side_effect_func(path, body, *kw): return path, body self.ds_version._create = mock.Mock(side_effect=side_effect_func) p, b, = self.ds_version.create( "ds-version1", "mysql", "mysql", "image-id", ["mysql-server-5.5"], "true", "true") self.assertEqual("/mgmt/datastore-versions", p) self.assertEqual("ds-version1", b["version"]["name"]) self.assertEqual("mysql", b["version"]["datastore_name"]) self.assertEqual("mysql", b["version"]["datastore_manager"]) self.assertEqual("image-id", b["version"]["image"]) self.assertEqual(["mysql-server-5.5"], b["version"]["packages"]) self.assertTrue(b["version"]["active"]) self.assertTrue(b["version"]["default"]) def test_get(self): def side_effect_func(path, ins): return path, ins self.ds_version._get = mock.Mock(side_effect=side_effect_func) p, i = self.ds_version.get('ds-version-1') self.assertEqual(('/mgmt/datastore-versions/ds-version-1', 'version'), (p, i)) def test_list(self): page_mock = mock.Mock() self.ds_version._paginated = page_mock self.ds_version.list() page_mock.assert_called_with('/mgmt/datastore-versions', 'versions', None, None) self.ds_version.list(limit=10, marker="foo") page_mock.assert_called_with('/mgmt/datastore-versions', 'versions', 10, "foo") def test_delete(self): resp = mock.Mock() resp.status_code = 202 self.ds_version.api.client.delete = mock.Mock( return_value=(resp, None) ) self.ds_version.delete('ds-version-1') self.assertEqual(1, self.ds_version.api.client.delete.call_count) self.ds_version.api.client.delete.assert_called_with( '/mgmt/datastore-versions/ds-version-1') resp.status_code = 400 self.assertRaises(Exception, self.ds_version.delete, 'ds-version-2') self.assertEqual(2, self.ds_version.api.client.delete.call_count) self.ds_version.api.client.delete.assert_called_with( '/mgmt/datastore-versions/ds-version-2') def test_edit(self): self.ds_version.api.client.patch = self._get_mock_method() self._resp.status_code = 202 self.ds_version.edit('ds-version-1', image="new-image-id") self.assertEqual('/mgmt/datastore-versions/ds-version-1', self._url) self.assertEqual({"image": "new-image-id"}, self._body) self._resp.status_code = 400 self.assertRaises(Exception, self.ds_version.edit, 'ds-version-1', "new-mgr", "non-existent-image")
39.25256
79
0.644727
73d0f6d768ebbd6cd96a33e44c76bdf57213838e
1,105
py
Python
ote_sdk/ote_sdk/usecases/tasks/interfaces/evaluate_interface.py
ntyukaev/training_extensions
c897d42e50828fea853ceda0795e1f0e7d6e9909
[ "Apache-2.0" ]
775
2019-03-01T02:13:33.000Z
2020-09-07T22:49:15.000Z
ote_sdk/ote_sdk/usecases/tasks/interfaces/evaluate_interface.py
ntyukaev/training_extensions
c897d42e50828fea853ceda0795e1f0e7d6e9909
[ "Apache-2.0" ]
229
2019-02-28T21:37:08.000Z
2020-09-07T15:11:49.000Z
ote_sdk/ote_sdk/usecases/tasks/interfaces/evaluate_interface.py
ntyukaev/training_extensions
c897d42e50828fea853ceda0795e1f0e7d6e9909
[ "Apache-2.0" ]
290
2019-02-28T20:32:11.000Z
2020-09-07T05:51:41.000Z
"""This module contains the interface class for tasks that can compute performance. """ # Copyright (C) 2021-2022 Intel Corporation # SPDX-License-Identifier: Apache-2.0 # import abc from typing import Optional from ote_sdk.entities.resultset import ResultSetEntity class IEvaluationTask(metaclass=abc.ABCMeta): """ A base interface class for tasks which can compute performance on a resultset. """ @abc.abstractmethod def evaluate( self, output_resultset: ResultSetEntity, evaluation_metric: Optional[str] = None ): """ Compute performance metrics for a given set of results. The task may use at its discretion the most appropriate metrics for the evaluation (for instance, average precision for classification, DICE for segmentation, etc). The performance will be stored directly to output_resultset.performance :param output_resultset: The set of results which must be evaluated. :param evaluation_metric: the evaluation metric used to compute the performance """ raise NotImplementedError
33.484848
105
0.727602
73d10cc2aefea335d630959e1e064356ef8a0879
408
py
Python
gala-ragdoll/ragdoll/test/__init__.py
seandong37tt4qu/jeszhengq
32b3737ab45e89e8c5b71cdce871cefd2c938fa8
[ "MulanPSL-1.0" ]
null
null
null
gala-ragdoll/ragdoll/test/__init__.py
seandong37tt4qu/jeszhengq
32b3737ab45e89e8c5b71cdce871cefd2c938fa8
[ "MulanPSL-1.0" ]
null
null
null
gala-ragdoll/ragdoll/test/__init__.py
seandong37tt4qu/jeszhengq
32b3737ab45e89e8c5b71cdce871cefd2c938fa8
[ "MulanPSL-1.0" ]
null
null
null
import logging import connexion from flask_testing import TestCase from ragdoll.encoder import JSONEncoder class BaseTestCase(TestCase): def create_app(self): logging.getLogger('connexion.operation').setLevel('ERROR') app = connexion.App(__name__, specification_dir='../swagger/') app.app.json_encoder = JSONEncoder app.add_api('swagger.yaml') return app.app
24
70
0.715686
73d13e75a338c37571078e541d924d7618b35154
777
py
Python
Tests/ne.py
certik/pyquante
f5cae27f519b1c1b70afbebfe8b5c83cb4b3c2a6
[ "DOC" ]
8
2016-08-26T14:57:01.000Z
2019-12-23T07:39:37.000Z
Tests/ne.py
nicodgomez/pyquante
483571110b83cab406d3d4d8f2eba5cae0a2da58
[ "DOC" ]
1
2019-02-03T10:44:02.000Z
2019-02-03T10:44:02.000Z
Tests/ne.py
globulion/pyq-mod
ee32b6685e9c1e915c5c515ef7ab83adc200fa9b
[ "DOC" ]
8
2016-02-23T19:22:11.000Z
2021-08-28T12:12:59.000Z
#!/usr/bin/env python "Neon using Gaussians" import unittest, sciunittest from PyQuante.Ints import getbasis,getints from PyQuante.hartree_fock import rhf from PyQuante.Molecule import Molecule # GAMESS-UK HF Energy # Energy -128.4744065199 energy = -128.474406 # Changed 2003-04-07 to reflect diis name = "Ne" def main(): ne = Molecule('Ne',atomlist = [(10,(0,0,0))]) en,orbe,orbs = rhf(ne) return en class NeTest(sciunittest.TestCase): def runTest(self): """Energy of Ne (using Gaussians) close to -128.474406?""" result = main() self.assertInside(result, energy, 1e-4) def suite(): return unittest.TestLoader().loadTestsFromTestCase(NeTest) if __name__ == '__main__': unittest.TextTestRunner(verbosity=2).run(suite())
24.28125
66
0.698842
73d19abf135ba448e7926707cad5a5ce0189b5e4
612
py
Python
topi/python/topi/nn/__init__.py
mingwayzhang/tvm
3b287c4d4e6d83e6fd30db47ffa3d5481a332a63
[ "Apache-2.0" ]
2
2019-07-29T06:26:59.000Z
2019-07-29T06:27:02.000Z
topi/python/topi/nn/__init__.py
mingwayzhang/tvm
3b287c4d4e6d83e6fd30db47ffa3d5481a332a63
[ "Apache-2.0" ]
null
null
null
topi/python/topi/nn/__init__.py
mingwayzhang/tvm
3b287c4d4e6d83e6fd30db47ffa3d5481a332a63
[ "Apache-2.0" ]
1
2019-05-01T16:57:15.000Z
2019-05-01T16:57:15.000Z
# pylint: disable=wildcard-import """Neural network operators""" from __future__ import absolute_import as _abs from .conv2d import * from .deformable_conv2d import * from .depthwise_conv2d import * from .elemwise import * from .dilate import * from .flatten import * from .dense import * from .mapping import * from .pooling import * from .softmax import * from .conv2d_transpose import * from .bnn import * from .upsampling import * from .local_response_norm import * from .bitserial_conv2d import * from .bitserial_dense import * from .l2_normalize import * from .batch_matmul import * from .sparse import *
25.5
46
0.776144
73d1af7f49eef5b57dcca891444cbda446bcda50
1,320
py
Python
deepsim_envs/test/test_deepsim_envs_import.py
aws-deepracer/deepsim
cad2639f525c2f94ec5c03d8b855cc65b0b8ee55
[ "Apache-2.0" ]
1
2022-03-25T07:20:49.000Z
2022-03-25T07:20:49.000Z
deepsim_envs/test/test_deepsim_envs_import.py
aws-deepracer/deepsim
cad2639f525c2f94ec5c03d8b855cc65b0b8ee55
[ "Apache-2.0" ]
null
null
null
deepsim_envs/test/test_deepsim_envs_import.py
aws-deepracer/deepsim
cad2639f525c2f94ec5c03d8b855cc65b0b8ee55
[ "Apache-2.0" ]
null
null
null
################################################################################# # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # # # # Licensed under the Apache License, Version 2.0 (the "License"). # # You may not use this file except in compliance with the License. # # You may obtain a copy of the License at # # # # http://www.apache.org/licenses/LICENSE-2.0 # # # # Unless required by applicable law or agreed to in writing, software # # distributed under the License is distributed on an "AS IS" BASIS, # # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # # See the License for the specific language governing permissions and # # limitations under the License. # ################################################################################# import pytest def test_deepsim_envs_importable(): import deepsim_envs # noqa: F401
62.857143
81
0.407576
73d1bd597f14cef91ebf9e4f4742afbc848d82f0
16,576
py
Python
augly/image/utils/bboxes.py
Adib234/AugLy
35a6a5de07e64f465b8979e3257218551929e57a
[ "MIT" ]
null
null
null
augly/image/utils/bboxes.py
Adib234/AugLy
35a6a5de07e64f465b8979e3257218551929e57a
[ "MIT" ]
null
null
null
augly/image/utils/bboxes.py
Adib234/AugLy
35a6a5de07e64f465b8979e3257218551929e57a
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. import math from typing import List, Optional, Tuple import augly.image.utils as imutils import numpy as np def crop_bboxes_helper( bbox: Tuple, x1: float, y1: float, x2: float, y2: float, **kwargs ) -> Tuple: """ If part of the bbox was cropped out in the x-axis, the left/right side will now be 0/1 respectively; otherwise the fraction x1 is cut off from the left & x2 from the right and we renormalize with the new width. Analogous for the y-axis """ left_factor, upper_factor, right_factor, lower_factor = bbox new_w, new_h = x2 - x1, y2 - y1 return ( max(0, (left_factor - x1) / new_w), max(0, (upper_factor - y1) / new_h), min(1, 1 - (x2 - right_factor) / new_w), min(1, 1 - (y2 - lower_factor) / new_h), ) def hflip_bboxes_helper(bbox: Tuple, **kwargs) -> Tuple: """ When the src image is horizontally flipped, the bounding box also gets horizontally flipped """ left_factor, upper_factor, right_factor, lower_factor = bbox return (1 - right_factor, upper_factor, 1 - left_factor, lower_factor) def meme_format_bboxes_helper( bbox: Tuple, src_w: int, src_h: int, caption_height: int, **kwargs ) -> Tuple: """ The src image is offset vertically by caption_height pixels, so we normalize that to get the y offset, add that to the upper & lower coordinates, & renormalize with the new height. The x dimension is unaffected """ left_f, upper_f, right_f, lower_f = bbox y_off = caption_height / src_h new_h = 1.0 + y_off return left_f, (upper_f + y_off) / new_h, right_f, (lower_f + y_off) / new_h def overlay_onto_background_image_bboxes_helper( bbox: Tuple, overlay_size: float, x_pos: float, y_pos: float, **kwargs ) -> Tuple: """ The src image is overlaid on the dst image offset by (`x_pos`, `y_pos`) & with a size of `overlay_size` (all relative to the dst image dimensions). So the bounding box is also offset by (`x_pos`, `y_pos`) & scaled by `overlay_size`. It is also possible that some of the src image will be cut off, so we take the max with 0/min with 1 in order to crop the bbox if needed """ left_factor, upper_factor, right_factor, lower_factor = bbox return ( max(0, left_factor * overlay_size + x_pos), max(0, upper_factor * overlay_size + y_pos), min(1, right_factor * overlay_size + x_pos), min(1, lower_factor * overlay_size + y_pos), ) def overlay_image_bboxes_helper( bbox: Tuple, opacity: float, overlay_size: float, x_pos: float, y_pos: float, max_visible_opacity: float, **kwargs, ) -> Tuple: """ We made a few decisions for this augmentation about how bboxes are defined: 1. If `opacity` < `max_visible_opacity` (default 0.75, can be specified by the user), the bbox stays the same because it is still considered "visible" behind the overlaid image 2. If the entire bbox is covered by the overlaid image, the bbox is no longer valid so we return it as (0, 0, 0, 0), which will be turned to None in `check_bboxes()` 3. If the entire bottom of the bbox is covered by the overlaid image (i.e. `x_pos < left_factor` & `x_pos + overlay_size > right_factor` & `y_pos + overlay_size > lower_factor`), we crop out the lower part of the bbox that is covered. The analogue is true for the top/left/right being occluded 4. If just the middle of the bbox is covered or a rectangle is sliced out of the bbox, we consider that the bbox is unchanged, even though part of it is occluded. This isn't ideal but otherwise it's very complicated; we could split the remaining area into smaller visible bboxes, but then we would have to return multiple dst bboxes corresponding to one src bbox """ left_factor, upper_factor, right_factor, lower_factor = bbox if opacity >= max_visible_opacity: occluded_left = x_pos < left_factor occluded_upper = y_pos < upper_factor occluded_right = x_pos + overlay_size > right_factor occluded_lower = y_pos + overlay_size > lower_factor if occluded_left and occluded_right: # If the bbox is completely covered, it's no longer valid so return zeros if occluded_upper and occluded_lower: return (0.0, 0.0, 0.0, 0.0) if occluded_lower: lower_factor = y_pos elif occluded_upper: upper_factor = y_pos + overlay_size elif occluded_upper and occluded_lower: if occluded_right: right_factor = x_pos elif occluded_left: left_factor = x_pos + overlay_size return left_factor, upper_factor, right_factor, lower_factor def overlay_onto_screenshot_bboxes_helper( bbox: Tuple, src_w: int, src_h: int, template_filepath: str, template_bboxes_filepath: str, resize_src_to_match_template: bool, max_image_size_pixels: int, crop_src_to_fit: bool, **kwargs, ) -> Tuple: """ We transform the bbox by applying all the same transformations as are applied in the `overlay_onto_screenshot` function, each of which is mentioned below in comments """ left_f, upper_f, right_f, lower_f = bbox template, tbbox = imutils.get_template_and_bbox( template_filepath, template_bboxes_filepath ) # Either src image or template image is scaled if resize_src_to_match_template: tbbox_w, tbbox_h = tbbox[2] - tbbox[0], tbbox[3] - tbbox[1] src_scale_factor = min(tbbox_w / src_w, tbbox_h / src_h) else: template, tbbox = imutils.scale_template_image( src_w, src_h, template, tbbox, max_image_size_pixels, crop_src_to_fit, ) tbbox_w, tbbox_h = tbbox[2] - tbbox[0], tbbox[3] - tbbox[1] src_scale_factor = 1 template_w, template_h = template.size x_off, y_off = tbbox[:2] # Src image is scaled (if resize_src_to_match_template) curr_w, curr_h = src_w * src_scale_factor, src_h * src_scale_factor left, upper, right, lower = ( left_f * curr_w, upper_f * curr_h, right_f * curr_w, lower_f * curr_h, ) # Src image is cropped to (tbbox_w, tbbox_h) if crop_src_to_fit: dx, dy = (curr_w - tbbox_w) // 2, (curr_h - tbbox_h) // 2 x1, y1, x2, y2 = dx, dy, dx + tbbox_w, dy + tbbox_h left_f, upper_f, right_f, lower_f = crop_bboxes_helper( bbox, x1 / curr_w, y1 / curr_h, x2 / curr_w, y2 / curr_h ) left, upper, right, lower = ( left_f * tbbox_w, upper_f * tbbox_h, right_f * tbbox_w, lower_f * tbbox_h, ) # Src image is resized to (tbbox_w, tbbox_h) else: resize_f = min(tbbox_w / curr_w, tbbox_h / curr_h) left, upper, right, lower = ( left * resize_f, upper * resize_f, right * resize_f, lower * resize_f, ) curr_w, curr_h = curr_w * resize_f, curr_h * resize_f # Padding with black padding_x = max(0, (tbbox_w - curr_w) // 2) padding_y = max(0, (tbbox_h - curr_h) // 2) left, upper, right, lower = ( left + padding_x, upper + padding_y, right + padding_x, lower + padding_y, ) # Src image is overlaid onto template image left, upper, right, lower = ( left + x_off, upper + y_off, right + x_off, lower + y_off, ) return left / template_w, upper / template_h, right / template_w, lower / template_h def pad_bboxes_helper(bbox: Tuple, w_factor: float, h_factor: float, **kwargs) -> Tuple: """ The src image is padded horizontally with w_factor * src_w, so the bbox gets shifted over by w_factor and then renormalized over the new width. Vertical padding is analogous """ left_factor, upper_factor, right_factor, lower_factor = bbox new_w = 1 + 2 * w_factor new_h = 1 + 2 * h_factor return ( (left_factor + w_factor) / new_w, (upper_factor + h_factor) / new_h, (right_factor + w_factor) / new_w, (lower_factor + h_factor) / new_h, ) def pad_square_bboxes_helper(bbox: Tuple, src_w: int, src_h: int, **kwargs) -> Tuple: """ In pad_square, pad is called with w_factor & h_factor computed as follows, so we can use the `pad_bboxes_helper` function to transform the bbox """ w_factor, h_factor = 0, 0 if src_w < src_h: w_factor = (src_h - src_w) / (2 * src_w) else: h_factor = (src_w - src_h) / (2 * src_h) return pad_bboxes_helper(bbox, w_factor=w_factor, h_factor=h_factor) def perspective_transform_bboxes_helper( bbox: Tuple, src_w: int, src_h: int, sigma: float, dx: float, dy: float, crop_out_black_border: bool, seed: Optional[int], **kwargs, ) -> Tuple: """ Computes the bbox that encloses the bbox in the perspective transformed image. Also uses the `crop_bboxes_helper` function since the image is cropped if `crop_out_black_border` is True. """ def transform(x: float, y: float, a: List[float]) -> Tuple: """ Transforms a point in the image given the perspective transform matrix; we will use this to transform the bounding box corners. Based on PIL source code: https://github.com/python-pillow/Pillow/blob/master/src/libImaging/Geometry.c#L399 """ return ( (a[0] * x + a[1] * y + a[2]) / (a[6] * x + a[7] * y + a[8]), (a[3] * x + a[4] * y + a[5]) / (a[6] * x + a[7] * y + a[8]), ) def get_perspective_transform( src_coords: List[Tuple[int, int]], dst_coords: List[Tuple[int, int]] ) -> List[float]: """ Computes the transformation matrix used for the perspective transform with the given src & dst corner coordinates. Based on OpenCV source code: https://github.com/opencv/opencv/blob/master/modules/imgproc/src/imgwarp.cpp#L3277-L3304 """ a = np.zeros((8, 8), dtype=np.float) dst_x, dst_y = zip(*dst_coords) b = np.asarray(list(dst_x) + list(dst_y)) for i, (sc, dc) in enumerate(zip(src_coords, dst_coords)): a[i][0] = a[i + 4][3] = sc[0] a[i][1] = a[i + 4][4] = sc[1] a[i][2] = a[i + 4][5] = 1 a[i][6] = -sc[0] * dc[0] a[i][7] = -sc[1] * dc[0] a[i + 4][6] = -sc[0] * dc[1] a[i + 4][7] = -sc[1] * dc[1] A = np.matrix(a, dtype=np.float) B = np.array(b).reshape(8) res = np.linalg.solve(A, B) return np.array(res).reshape(8).tolist() + [1.0] assert ( seed is not None ), "Cannot transform bbox for perspective_transform if seed is not provided" rng = np.random.RandomState(seed) src_coords = [(0, 0), (src_w, 0), (src_w, src_h), (0, src_h)] dst_coords = [ (rng.normal(point[0], sigma) + dx, rng.normal(point[1], sigma) + dy) for point in src_coords ] perspective_transform_coeffs = get_perspective_transform(src_coords, dst_coords) left_f, upper_f, right_f, lower_f = bbox left, upper, right, lower = ( left_f * src_w, upper_f * src_h, right_f * src_w, lower_f * src_h, ) bbox_coords = [(left, upper), (right, upper), (right, lower), (left, lower)] transformed_bbox_coords = [ transform(x + 0.5, y + 0.5, perspective_transform_coeffs) for x, y in bbox_coords ] transformed_xs, transformed_ys = zip(*transformed_bbox_coords) transformed_bbox = ( max(0, min(transformed_xs) / src_w), max(0, min(transformed_ys) / src_h), min(1, max(transformed_xs) / src_w), min(1, max(transformed_ys) / src_h), ) # This is copy-pasted from `functional.py`, exactly how the crop coords are computed if crop_out_black_border: top_left, top_right, bottom_right, bottom_left = dst_coords new_left = max(0, top_left[0], bottom_left[0]) new_right = min(src_w, top_right[0], bottom_right[0]) new_top = max(0, top_left[1], top_right[1]) new_bottom = min(src_h, bottom_left[1], bottom_right[1]) transformed_bbox = crop_bboxes_helper( transformed_bbox, x1=new_left / src_w, y1=new_top / src_h, x2=new_right / src_w, y2=new_bottom / src_h, ) return transformed_bbox def rotate_bboxes_helper( bbox: Tuple, src_w: int, src_h: int, degrees: float, **kwargs ) -> Tuple: """ Computes the bbox that encloses the rotated bbox in the rotated image. This code was informed by looking at the source code for PIL.Image.rotate (https://pillow.readthedocs.io/en/stable/_modules/PIL/Image.html#Image.rotate). Also uses the `crop_bboxes_helper` function since the image is cropped after being rotated. """ left_f, upper_f, right_f, lower_f = bbox left, upper, right, lower = ( left_f * src_w, upper_f * src_h, right_f * src_w, lower_f * src_h, ) # Top left, upper right, lower right, & lower left corner coefficients (in pixels) bbox_corners = [(left, upper), (right, upper), (right, lower), (left, lower)] def transform(x: int, y: int, matrix: List[float]) -> Tuple[float, float]: (a, b, c, d, e, f) = matrix return a * x + b * y + c, d * x + e * y + f def get_enclosing_bbox( corners: List[Tuple[int, int]], rotation_matrix: List[float] ) -> Tuple[int, int, int, int]: rotated_corners = [transform(x, y, rotation_matrix) for x, y in corners] xs, ys = zip(*rotated_corners) return ( math.floor(min(xs)), math.floor(min(ys)), math.ceil(max(xs)), math.ceil(max(ys)), ) # Get rotated bbox corner coefficients rotation_center = (src_w // 2, src_h // 2) angle_rad = -math.radians(degrees) rotation_matrix = [ round(math.cos(angle_rad), 15), round(math.sin(angle_rad), 15), 0.0, round(-math.sin(angle_rad), 15), round(math.cos(angle_rad), 15), 0.0, ] rotation_matrix[2], rotation_matrix[5] = transform( -rotation_center[0], -rotation_center[1], rotation_matrix ) rotation_matrix[2] += rotation_center[0] rotation_matrix[5] += rotation_center[1] # Get rotated image dimensions src_img_corners = [(0, 0), (src_w, 0), (src_w, src_h), (0, src_h)] ( rotated_img_min_x, rotated_img_min_y, rotated_img_max_x, rotated_img_max_y, ) = get_enclosing_bbox(src_img_corners, rotation_matrix) rotated_img_w = rotated_img_max_x - rotated_img_min_x rotated_img_h = rotated_img_max_y - rotated_img_min_y # Get enclosing box corners around rotated bbox (on rotated image) new_bbox_left, new_bbox_upper, new_bbox_right, new_bbox_lower = get_enclosing_bbox( bbox_corners, rotation_matrix ) bbox_enclosing_bbox = ( new_bbox_left / rotated_img_w, new_bbox_upper / rotated_img_h, new_bbox_right / rotated_img_w, new_bbox_lower / rotated_img_h, ) # Crop bbox as src image is cropped inside `rotate` cropped_w, cropped_h = imutils.rotated_rect_with_max_area(src_w, src_h, degrees) cropped_img_left, cropped_img_upper, cropped_img_right, cropped_img_lower = ( (rotated_img_w - cropped_w) // 2 + rotated_img_min_x, (rotated_img_h - cropped_h) // 2 + rotated_img_min_y, (rotated_img_w + cropped_w) // 2 + rotated_img_min_x, (rotated_img_h + cropped_h) // 2 + rotated_img_min_y, ) return crop_bboxes_helper( bbox_enclosing_bbox, x1=cropped_img_left / rotated_img_w, y1=cropped_img_upper / rotated_img_h, x2=cropped_img_right / rotated_img_w, y2=cropped_img_lower / rotated_img_h, ) def vflip_bboxes_helper(bbox: Tuple, **kwargs) -> Tuple: """ Analogous to hflip, when the src image is vertically flipped, the bounding box also gets vertically flipped """ left_factor, upper_factor, right_factor, lower_factor = bbox return (left_factor, 1 - lower_factor, right_factor, 1 - upper_factor)
36.511013
96
0.628499
73d1d30b7e96b725e0951d89ab317d417ee553ac
7,539
py
Python
xpython/byteop/byteop32.py
rocky/xpython
ce4ed4329cee2af0aab94254276f5a5687dd25f9
[ "MIT" ]
1
2020-04-28T13:18:13.000Z
2020-04-28T13:18:13.000Z
xpython/byteop/byteop32.py
rocky/xbyterun
fde8f8a31ffd3e3c4545d76b4b1edf4b7e0191d9
[ "MIT" ]
null
null
null
xpython/byteop/byteop32.py
rocky/xbyterun
fde8f8a31ffd3e3c4545d76b4b1edf4b7e0191d9
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """Byte Interpreter operations for Python 3.2 """ from xpython.byteop.byteop24 import ByteOp24, Version_info from xpython.byteop.byteop27 import ByteOp27 from xpython.pyobj import Function # FIXME: investigate does "del" remove an attribute here? # have an effect on what another module sees as ByteOp27's attributes? # Gone since 3.0 del ByteOp24.PRINT_EXPR del ByteOp24.PRINT_ITEM del ByteOp24.PRINT_ITEM_TO del ByteOp24.PRINT_NEWLINE del ByteOp24.PRINT_NEWLINE_TO del ByteOp24.BUILD_CLASS del ByteOp24.EXEC_STMT del ByteOp24.RAISE_VARARGS # Gone since 3.2 del ByteOp24.DUP_TOPX def fmt_make_function(vm, arg=None, repr=repr): """ returns the name of the function from the code object in the stack """ # Gotta love Python for stuff like this. fn_index = 1 if vm.version >= (3, 2) else 2 fn_item = vm.peek(fn_index) name = fn_item if isinstance(fn_item, str) else fn_item.co_name return " (%s)" % name class ByteOp32(ByteOp27): def __init__(self, vm): super(ByteOp32, self).__init__(vm) self.stack_fmt["MAKE_FUNCTION"] = fmt_make_function self.version = "3.2.6 (default, Oct 27 1955, 00:00:00)\n[x-python]" self.version_info = Version_info(3, 2, 6, "final", 0) # Changed from 2.7 # 3.2 has kwdefaults that aren't allowed in 2.7 def MAKE_FUNCTION(self, argc: int): """ Pushes a new function object on the stack. From bottom to top, the consumed stack must consist of: * argc & 0xFF default argument objects in positional order * (argc >> 8) & 0xFF pairs of name and default argument, with the name just below the object on the stack, for keyword-only parameters * (argc >> 16) & 0x7FFF parameter annotation objects * a tuple listing the parameter names for the annotations (only if there are ony annotation objects) * the code associated with the function (at TOS1 if 3.3+ else at TOS for 3.0..3.2) * the qualified name of the function (at TOS if 3.3+) """ rest, default_count = divmod(argc, 256) annotate_count, kw_default_count = divmod(rest, 256) if self.version_info[:2] >= (3, 3): name = self.vm.pop() code = self.vm.pop() else: code = self.vm.pop() name = code.co_name if annotate_count: annotate_names = self.vm.pop() # annotate count includes +1 for the above names annotate_objects = self.vm.popn(annotate_count - 1) n = len(annotate_names) assert n == len(annotate_objects) annotations = {annotate_names[i]: annotate_objects[i] for i in range(n)} else: annotations = {} if kw_default_count: kw_default_pairs = self.vm.popn(2 * kw_default_count) kwdefaults = dict( kw_default_pairs[i : i + 2] for i in range(0, len(kw_default_pairs), 2) ) else: kwdefaults = {} if default_count: defaults = self.vm.popn(default_count) else: defaults = tuple() # FIXME: DRY with code in byteop3{2,6}.py globs = self.vm.frame.f_globals fn = Function( name=name, code=code, globs=globs, argdefs=tuple(defaults), closure=None, vm=self.vm, kwdefaults=kwdefaults, annotations=annotations, ) self.vm.push(fn) # Order of function here is the same as in: # https://docs.python.org/3.2/library/dis.html#python-bytecode-instructions # Note these are only the functions that aren't in the parent class # here, Python 2.7 def DUP_TOP_TWO(self): """Duplicates the reference on top of the stack.""" a, b = self.vm.popn(2) self.vm.push(a, b, a, b) def POP_EXCEPT(self): """ Removes one block from the block stack. The popped block must be an exception handler block, as implicitly created when entering an except handler. In addition to popping extraneous values from the frame stack, the last three popped values are used to restore the exception state.""" block = self.vm.pop_block() if block.type != "except-handler": raise self.vm.PyVMError( "popped block is not an except handler; is %s" % block ) self.vm.unwind_block(block) def LOAD_BUILD_CLASS(self): """Pushes builtins.__build_class__() onto the stack. It is later called by CALL_FUNCTION to construct a class.""" self.vm.push(__build_class__) # This opcode disappears starting in 3.5 def WITH_CLEANUP(self): """Cleans up the stack when a `with` statement block exits. TOS is the context manager's `__exit__()` bound method. Below TOS are 1-3 values indicating how/why the finally clause was entered: * SECOND = None * (SECOND, THIRD) = (WHY_{RETURN,CONTINUE}), retval * SECOND = WHY_*; no retval below it * (SECOND, THIRD, FOURTH) = exc_info() In the last case, EXIT(SECOND, THIRD, FOURTH) is called, otherwise TOS(None, None, None). In addition, TOS is removed from the stack. If the stack represents an exception, and the function call returns a true value, this information is zapped and replaced with a single WHY_SILENCED to prevent END_FINALLY from re-raising the exception. (But non-local gotos will still be resumed.) """ # Note: the code is derived from byterun where it had to handle # both 2.7 and 3.3. v = w = None u = self.vm.top() if u is None: exit_func = self.vm.pop(1) elif isinstance(u, str): if u in ("return", "continue"): exit_func = self.vm.pop(2) else: exit_func = self.vm.pop(1) u = None elif issubclass(u, BaseException): w, v, u = self.vm.popn(3) tp, exc, tb = self.vm.popn(3) exit_func = self.vm.pop() self.vm.push(tp, exc, tb) self.vm.push(None) self.vm.push(w, v, u) block = self.vm.pop_block() assert block.type == "except-handler" self.vm.push_block(block.type, block.handler, block.level - 1) else: # pragma: no cover raise self.vm.PyVMError("Confused WITH_CLEANUP") exit_ret = exit_func(u, v, w) err = (u is not None) and bool(exit_ret) if err: # An error occurred, and was suppressed self.vm.push("silenced") # Note: this is gone in 3.4 def STORE_LOCALS(self): """Pops TOS from the stack and stores it as the current frames f_locals. This is used in class construction.""" self.vm.frame.f_locals = self.vm.pop() def RAISE_VARARGS(self, argc: int): """ Raises an exception. argc indicates the number of arguments to the raise statement, ranging from 0 to 3. The handler will find the traceback as TOS2, the parameter as TOS1, and the exception as TOS. """ cause = exc = None if argc == 2: cause = self.vm.pop() exc = self.vm.pop() elif argc == 1: exc = self.vm.pop() return self.do_raise(exc, cause) if __name__ == "__main__": x = ByteOp32(None)
35.9
142
0.605253
73d1ef3fa74c9926e2fcdbb0ff708bbb12e37e53
1,507
py
Python
fairseq/scoring/__init__.py
MANGA-UOFA/NAUS
8c0c0815a280d0661adf588302848c7f1ecc56da
[ "MIT" ]
null
null
null
fairseq/scoring/__init__.py
MANGA-UOFA/NAUS
8c0c0815a280d0661adf588302848c7f1ecc56da
[ "MIT" ]
null
null
null
fairseq/scoring/__init__.py
MANGA-UOFA/NAUS
8c0c0815a280d0661adf588302848c7f1ecc56da
[ "MIT" ]
null
null
null
# Copyright (c) Facebook, Inc. and its affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. import importlib import os from abc import ABC, abstractmethod from fairseq import registry from omegaconf import DictConfig class BaseScorer(ABC): def __init__(self, cfg): self.cfg = cfg self.ref = [] self.pred = [] def add_string(self, ref, pred): self.ref.append(ref) self.pred.append(pred) @abstractmethod def score(self) -> float: pass @abstractmethod def result_string(self) -> str: pass _build_scorer, register_scorer, SCORER_REGISTRY, _ = registry.setup_registry( "--scoring", default="bleu" ) def build_scorer(choice, tgt_dict): _choice = choice._name if isinstance(choice, DictConfig) else choice if _choice == "bleu": from fairseq.scoring import bleu return bleu.Scorer( bleu.BleuConfig(pad=tgt_dict.pad(), eos=tgt_dict.eos(), unk=tgt_dict.unk()) ) elif _choice == "rouge": from fairseq.scoring import rouge return rouge.Scorer(choice) return _build_scorer(choice) # automatically import any Python files in the current directory for file in sorted(os.listdir(os.path.dirname(__file__))): if file.endswith(".py") and not file.startswith("_"): module = file[: file.find(".py")] importlib.import_module("fairseq.scoring." + module)
25.116667
87
0.669542
73d22b3adcf2e1db2059fdb0355f71672b6861a1
1,742
py
Python
classifiers/documents/categories/classifier.py
topicaxis/trained-models
34682538fc4b4f8823df1bdbabd6c0bc086b2695
[ "MIT" ]
1
2020-01-07T00:03:16.000Z
2020-01-07T00:03:16.000Z
classifiers/documents/categories/classifier.py
topicaxis/trained-models
34682538fc4b4f8823df1bdbabd6c0bc086b2695
[ "MIT" ]
null
null
null
classifiers/documents/categories/classifier.py
topicaxis/trained-models
34682538fc4b4f8823df1bdbabd6c0bc086b2695
[ "MIT" ]
null
null
null
from pkg_resources import resource_filename import joblib def create_category_classifier(): """Create a category classifier using the default model :rtype CategoryClassifier :return: the category classifier """ binarizer_file = resource_filename( "classifiers", "data/document_classifier/binarizer.joblib") classifier_file = resource_filename( "classifiers", "data/document_classifier/classifier.joblib") return CategoryClassifier( binarizer=joblib.load(binarizer_file), classifier=joblib.load(classifier_file) ) class CategoryClassifier(object): """Document category classifier object""" def __init__(self, binarizer, classifier): """Create a new CategoryClassifier object :param sklearn.preprocessing.MultiLabelBinarizer binarizer: the binarizer to be used :param sklearn.base.BaseEstimator classifier: the classifier to be used """ self._binarizer = binarizer self._classifier = classifier @property def classes_(self): return self._binarizer.classes_ def predict(self, data): """Predict the document categories :param list[str] data: the document contents :rtype: list[str] :return: the predicted document categories """ result = self._classifier.predict(data) return self._binarizer.inverse_transform(result) def predict_proba(self, data): """Predict the category probabilities for the given documents :param list[str] data: the document contents :rtype: list[list[float]] :return: the probability for each category """ return self._classifier.predict_proba(data)
30.561404
79
0.686567
73d22f899d27f33c69947c954e88608e2aa0a57d
1,055
py
Python
299_Bulls-and-Cows.py
Coalin/Daily-LeetCode-Exercise
a064dcdc3a82314be4571d342c4807291a24f69f
[ "MIT" ]
3
2018-07-05T05:51:10.000Z
2019-05-04T08:35:44.000Z
299_Bulls-and-Cows.py
Coalin/Daily-LeetCode-Exercise
a064dcdc3a82314be4571d342c4807291a24f69f
[ "MIT" ]
null
null
null
299_Bulls-and-Cows.py
Coalin/Daily-LeetCode-Exercise
a064dcdc3a82314be4571d342c4807291a24f69f
[ "MIT" ]
null
null
null
class Solution: def getHint(self, secret, guess): """ :type secret: str :type guess: str :rtype: str """ secret_dic = {} guess_dic ={} A = 0 B = 0 for i in range(len(secret)): if secret[i] in secret_dic: secret_dic[secret[i]].append(i) else: secret_dic[secret[i]] = [i] for j in range(len(guess)): if guess[j] in guess_dic: guess_dic[guess[j]].append(j) else: guess_dic[guess[j]] = [j] print(secret_dic) print(guess_dic) for key in guess_dic: cur_A = 0 if key in secret_dic: for x in guess_dic[key]: if x in secret_dic[key]: cur_A += 1 A += cur_A B += min(len(secret_dic[key]),len(guess_dic[key]))-cur_A return str(A)+"A"+str(B)+"B"
27.051282
72
0.411374
73d22fb84ac41c1e3530224d514ddb9e8930dbed
6,048
py
Python
tf_agents/drivers/dynamic_step_driver.py
wookayin/tensorflow-agents
ae3751dfeed52422a350227047648dd82297960b
[ "Apache-2.0" ]
null
null
null
tf_agents/drivers/dynamic_step_driver.py
wookayin/tensorflow-agents
ae3751dfeed52422a350227047648dd82297960b
[ "Apache-2.0" ]
null
null
null
tf_agents/drivers/dynamic_step_driver.py
wookayin/tensorflow-agents
ae3751dfeed52422a350227047648dd82297960b
[ "Apache-2.0" ]
null
null
null
# coding=utf-8 # Copyright 2018 The TF-Agents Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A Driver that takes N steps in the environment using a tf.while_loop.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tensorflow as tf from tf_agents.drivers import driver from tf_agents.environments import trajectory from tf_agents.utils import nest_utils import gin.tf nest = tf.contrib.framework.nest @gin.configurable class DynamicStepDriver(driver.Driver): """A driver that takes N steps in an environment using a tf.while_loop. The while loop will run num_steps in the environment, only counting steps that result in an environment transition, i.e. (time_step, action, next_time_step). If a step results in environment resetting, i.e. time_step.is_last() and next_time_step.fist() (traj.is_boundary()), this is not counted toward the num_steps. As environments run batched time_steps, the counters for all batch elements are summed, and execution stops when the total exceeds num_steps. When batch_size > 1, there is no guarantee that exactly num_steps are taken -- it may be more but never less. This termination condition can be overridden in subclasses by implementing the self._loop_condition_fn() method. """ def __init__(self, env, policy, observers=None, num_steps=1, ): """Creates a DynamicStepDriver. Args: env: A tf_environment.Base environment. policy: A tf_policy.Base policy. observers: A list of observers that are updated after every step in the environment. Each observer is a callable(time_step.Trajectory). num_steps: The number of steps to take in the environment. Raises: ValueError: If env is not a tf_environment.Base or policy is not an instance of tf_policy.Base. """ super(DynamicStepDriver, self).__init__(env, policy, observers) self._num_steps = num_steps def _loop_condition_fn(self): """Returns a function with the condition needed for tf.while_loop.""" def loop_cond(counter, *_): """Determines when to stop the loop, based on step counter. Args: counter: Step counters per batch index. Shape [batch_size] when batch_size > 1, else shape []. Returns: tf.bool tensor, shape (), indicating whether while loop should continue. """ return tf.less(tf.reduce_sum(counter), self._num_steps) return loop_cond def _loop_body_fn(self): """Returns a function with the driver's loop body ops.""" def loop_body(counter, time_step, policy_state): """Runs a step in environment. While loop will call multiple times. Args: counter: Step counters per batch index. Shape [batch_size]. time_step: TimeStep tuple with elements shape [batch_size, ...]. policy_state: Policy state tensor shape [batch_size, policy_state_dim]. Pass empty tuple for non-recurrent policies. Returns: loop_vars for next iteration of tf.while_loop. """ action_step = self.policy.action(time_step, policy_state) policy_state = action_step.state next_time_step = self.env.step(action_step.action) traj = trajectory.from_transition(time_step, action_step, next_time_step) observer_ops = [observer(traj) for observer in self._observers] with tf.control_dependencies([tf.group(observer_ops)]): time_step, next_time_step, policy_state = nest.map_structure( tf.identity, (time_step, next_time_step, policy_state)) # While loop counter should not be incremented for episode reset steps. counter += tf.to_int32(~traj.is_boundary()) return [counter, next_time_step, policy_state] return loop_body # TODO(b/113529538): Add tests for policy_state. def run(self, time_step=None, policy_state=None, maximum_iterations=None): """Takes steps in the environment using the policy while updating observers. Args: time_step: optional initial time_step. If None, it will use the current_time_step of the environment. Elements should be shape [batch_size, ...]. policy_state: optional initial state for the policy. maximum_iterations: Optional maximum number of iterations of the while loop to run. If provided, the cond output is AND-ed with an additional condition ensuring the number of iterations executed is no greater than maximum_iterations. Returns: time_step: TimeStep named tuple with final observation, reward, etc. policy_state: Tensor with final step policy state. """ if time_step is None: time_step = self.env.current_time_step() if policy_state is None: policy_state = self.policy.get_initial_state(self.env.batch_size) # Batch dim should be first index of tensors during data collection. batch_dims = nest_utils.get_outer_shape( time_step, self.env.time_step_spec()) counter = tf.zeros(batch_dims, tf.int32) [_, time_step, policy_state] = tf.while_loop( cond=self._loop_condition_fn(), body=self._loop_body_fn(), loop_vars=[ counter, time_step, policy_state], back_prop=False, parallel_iterations=1, maximum_iterations=maximum_iterations, name='driver_loop' ) return time_step, policy_state
37.333333
80
0.705026
73d23652f93f819a26ab2feeb96d6c98857c0208
2,539
py
Python
setup.py
thombashi/elasticsearch-faker
a6b33bd82750181abf1671fc7046ceea21ea287c
[ "MIT" ]
1
2020-12-15T18:11:46.000Z
2020-12-15T18:11:46.000Z
setup.py
thombashi/elasticsearch-faker
a6b33bd82750181abf1671fc7046ceea21ea287c
[ "MIT" ]
1
2022-01-27T18:04:04.000Z
2022-03-20T12:19:46.000Z
setup.py
thombashi/elasticsearch-faker
a6b33bd82750181abf1671fc7046ceea21ea287c
[ "MIT" ]
null
null
null
import os.path from typing import Dict import setuptools MODULE_NAME = "elasticsearch-faker" REPOSITORY_URL = f"https://github.com/thombashi/{MODULE_NAME:s}" REQUIREMENT_DIR = "requirements" ENCODING = "utf8" pkg_info: Dict[str, str] = {} def get_release_command_class() -> Dict[str, setuptools.Command]: try: from releasecmd import ReleaseCommand except ImportError: return {} return {"release": ReleaseCommand} with open(os.path.join(MODULE_NAME.replace("-", "_"), "__version__.py")) as f: exec(f.read(), pkg_info) with open("README.rst", encoding=ENCODING) as f: LONG_DESCRIPTION = f.read() with open(os.path.join(REQUIREMENT_DIR, "requirements.txt")) as f: INSTALL_REQUIRES = [line.strip() for line in f if line.strip()] with open(os.path.join(REQUIREMENT_DIR, "test_requirements.txt")) as f: TESTS_REQUIRES = [line.strip() for line in f if line.strip()] build_exe_requires = ["pyinstaller>=4.5"] setuptools.setup( name=MODULE_NAME, version=pkg_info["__version__"], url=REPOSITORY_URL, author=pkg_info["__author__"], author_email=pkg_info["__email__"], description="elasticsearch-faker is a CLI tool to generate fake data for Elasticsearch.", include_package_data=True, keywords=["elasticsearch", "faker"], license=pkg_info["__license__"], long_description=LONG_DESCRIPTION, long_description_content_type="text/x-rst", packages=setuptools.find_packages(exclude=["tests*"]), project_urls={ "Source": REPOSITORY_URL, "Tracker": f"{REPOSITORY_URL:s}/issues", }, python_requires=">=3.6", install_requires=INSTALL_REQUIRES, extras_require={ "buildexe": build_exe_requires, "test": TESTS_REQUIRES, }, classifiers=[ "Development Status :: 3 - Alpha", "Environment :: Console", "Intended Audience :: Information Technology", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3 :: Only", "Topic :: Terminals", ], cmdclass=get_release_command_class(), zip_safe=False, entry_points={ "console_scripts": [ "elasticsearch-faker=elasticsearch_faker.__main__:cmd", ] }, )
30.590361
93
0.660496
73d24011dadbffdc56287e227009660cecbbbc1b
933
py
Python
graph/urls.py
StephenGrey/covid_data
8651460734a9aa21b5106896ab98c47491eadb3c
[ "MIT" ]
null
null
null
graph/urls.py
StephenGrey/covid_data
8651460734a9aa21b5106896ab98c47491eadb3c
[ "MIT" ]
null
null
null
graph/urls.py
StephenGrey/covid_data
8651460734a9aa21b5106896ab98c47491eadb3c
[ "MIT" ]
null
null
null
from django.urls import path from django.conf.urls import url from . import views urlpatterns = [ url('^aklhakjfhljhxliuyasfxjh/place=(?P<place>.*)$', views.index_m, name='index_base_m_o'), url('^aklhakjfhljhxliuyasfxjh', views.index_m, name='index_base_m'), url('^aklhlkjlkfzkerxsfxjh', views.sparks, name='sparks'), url('^ons_api/(?P<place>.*)$', views.fetch_ons, name='fetch_ons'), url('^api_rates$',views.api_rates,name='js_api_rates'), url('^api_places$',views.api_places,name='js_api_places'), url('^api_shapes$',views.api_shapes,name='js_api_shapes'), url('^api_all_daily$',views.api_all_daily,name='js_all_daily'), url('^api_slim/(?P<place>.*)$',views.api_slim_data,name='js_api_slim'), url('^api/(?P<place>.*)$',views.api,name='js_api'), url('^place=(?P<place>.*)$',views.index,name='index_base_o'), url('^$', views.index, name='index_base'), ]
42.409091
96
0.658092
73d24caf81a95487826a1a40d7edb679c2ba7a33
878
py
Python
backend.py
vulcan-coalition/pyjs
cfafea13269ac04988478e107941b8c9f3147af4
[ "Apache-2.0" ]
null
null
null
backend.py
vulcan-coalition/pyjs
cfafea13269ac04988478e107941b8c9f3147af4
[ "Apache-2.0" ]
null
null
null
backend.py
vulcan-coalition/pyjs
cfafea13269ac04988478e107941b8c9f3147af4
[ "Apache-2.0" ]
null
null
null
import pyjs import client all_clients = set() @pyjs.Expose def broadcast(client_obj, message: str, p1: int = 0, p2=None): 'This is the broadcast function.' print("receiving..", message, p1, p2) all_clients.add(client_obj) for c in all_clients: res = c.incoming_message(str(client_obj.client_id) + " : " + message) print(res) @pyjs.Expose def another(client_obj, p0: int, p1: str = "a", p2=None): 'This is another function.' print("receiving..", p0, p1, p2) client_obj.another(100) if __name__ == '__main__': # foo(1, 1) pyjs.mock_incoming("__main__.broadcast", "aa") pyjs.mock_incoming("__main__.another", 2, p1="bb") print(pyjs.get_all_exposed_interfaces()) print(pyjs.get_active_client_info()) md_doc = pyjs.generate_md_api_doc() with open("README.md", "w") as file: file.write(md_doc)
24.388889
77
0.656036
73d2596b4245dbf7c3cbf09074eec28721998a03
6,033
py
Python
softlearning/policies/gaussian_policy_test.py
limash/softlearning
aaa9d8c5cc3ba4bf459eb5a1a7469b3ae3b4f94f
[ "MIT" ]
920
2018-12-11T16:22:23.000Z
2022-03-28T08:17:07.000Z
softlearning/policies/gaussian_policy_test.py
limash/softlearning
aaa9d8c5cc3ba4bf459eb5a1a7469b3ae3b4f94f
[ "MIT" ]
133
2018-12-09T08:08:33.000Z
2022-02-28T12:43:14.000Z
softlearning/policies/gaussian_policy_test.py
limash/softlearning
aaa9d8c5cc3ba4bf459eb5a1a7469b3ae3b4f94f
[ "MIT" ]
241
2018-12-03T21:35:24.000Z
2022-03-20T06:24:45.000Z
import pickle from collections import OrderedDict import numpy as np import pytest import tensorflow as tf import tree from softlearning import policies from softlearning.environments.utils import get_environment from softlearning.policies.gaussian_policy import FeedforwardGaussianPolicy from softlearning.samplers import utils as sampler_utils class GaussianPolicyTest(tf.test.TestCase): def setUp(self): self.env = get_environment('gym', 'Swimmer', 'v3', {}) self.hidden_layer_sizes = (16, 16) self.policy = FeedforwardGaussianPolicy( input_shapes=self.env.observation_shape, output_shape=self.env.action_space.shape, action_range=( self.env.action_space.low, self.env.action_space.high, ), hidden_layer_sizes=self.hidden_layer_sizes, observation_keys=self.env.observation_keys) def test_actions_and_log_probs(self): observation1_np = self.env.reset() observation2_np = self.env.step(self.env.action_space.sample())[0] observations_np = type(observation1_np)(( (key, np.stack(( observation1_np[key], observation2_np[key] ), axis=0).astype(np.float32)) for key in observation1_np.keys() )) observations_tf = tree.map_structure( lambda x: tf.constant(x, dtype=x.dtype), observations_np) for observations in (observations_np, observations_tf): actions = self.policy.actions(observations) log_pis = self.policy.log_probs(observations, actions) self.assertEqual(actions.shape, (2, *self.env.action_space.shape)) self.assertEqual(log_pis.shape, (2, 1)) def test_env_step_with_actions(self): observation_np = self.env.reset() action = self.policy.action(observation_np).numpy() self.env.step(action) def test_trainable_variables(self): self.assertEqual( tuple(self.policy.trainable_variables), tuple(self.policy.shift_and_scale_model.trainable_variables)) self.assertEqual( len(self.policy.trainable_variables), 2 * (len(self.hidden_layer_sizes) + 1)) def test_get_diagnostics(self): observation1_np = self.env.reset() observation2_np = self.env.step(self.env.action_space.sample())[0] observations_np = type(observation1_np)(( (key, np.stack(( observation1_np[key], observation2_np[key] ), axis=0).astype(np.float32)) for key in observation1_np.keys() )) diagnostics = self.policy.get_diagnostics_np(observations_np) self.assertTrue(isinstance(diagnostics, OrderedDict)) self.assertEqual( tuple(diagnostics.keys()), ('shifts-mean', 'shifts-std', 'scales-mean', 'scales-std', 'entropy-mean', 'entropy-std', 'actions-mean', 'actions-std', 'actions-min', 'actions-max')) for value in diagnostics.values(): self.assertTrue(np.isscalar(value)) def test_serialize_deserialize(self): policy_1 = FeedforwardGaussianPolicy( input_shapes=self.env.observation_shape, output_shape=self.env.action_space.shape, action_range=( self.env.action_space.low, self.env.action_space.high, ), hidden_layer_sizes=self.hidden_layer_sizes, observation_keys=self.env.observation_keys) path = sampler_utils.rollout( self.env, policy_1, path_length=10, break_on_terminal=False) observations = path['observations'] weights_1 = policy_1.get_weights() actions_1 = policy_1.actions(observations) log_pis_1 = policy_1.log_probs(observations, actions_1) config = policies.serialize(policy_1) policy_2 = policies.deserialize(config) policy_2.set_weights(policy_1.get_weights()) weights_2 = policy_2.get_weights() log_pis_2 = policy_2.log_probs(observations, actions_1) for weight_1, weight_2 in zip(weights_1, weights_2): np.testing.assert_array_equal(weight_1, weight_2) np.testing.assert_array_equal(log_pis_1, log_pis_2) np.testing.assert_equal( actions_1.shape, policy_2.actions(observations).shape) @pytest.mark.skip("Latent smoothing is temporarily disabled.") def test_latent_smoothing(self): observation_np = self.env.reset() smoothed_policy = FeedforwardGaussianPolicy( input_shapes=self.env.observation_shape, output_shape=self.env.action_space.shape, action_range=( self.env.action_space.low, self.env.action_space.high, ), hidden_layer_sizes=self.hidden_layer_sizes, smoothing_coefficient=0.5, observation_keys=self.env.observation_keys) np.testing.assert_equal(smoothed_policy._smoothing_x, 0.0) self.assertEqual(smoothed_policy._smoothing_alpha, 0.5) self.assertEqual( smoothed_policy._smoothing_beta, np.sqrt((1.0 - np.power(smoothed_policy._smoothing_alpha, 2.0))) / (1.0 - smoothed_policy._smoothing_alpha)) smoothing_x_previous = smoothed_policy._smoothing_x for i in range(5): action_np = smoothed_policy.action(observation_np).numpy() observation_np = self.env.step(action_np)[0] self.assertFalse(np.all(np.equal( smoothing_x_previous, smoothed_policy._smoothing_x))) smoothing_x_previous = smoothed_policy._smoothing_x smoothed_policy.reset() np.testing.assert_equal(smoothed_policy._smoothing_x, 0.0) if __name__ == '__main__': tf.test.main()
35.698225
78
0.638654
73d27a004f4d1690508f48d45fdebf9629b34ee0
5,589
py
Python
s3cortxutils/s3cipher/s3cipher/cortx_s3_cipher.py
SeagateChaDeepak/cortx-s3server
2af49128d467187a7e566fa939f90d2e727d0c8f
[ "Apache-2.0" ]
null
null
null
s3cortxutils/s3cipher/s3cipher/cortx_s3_cipher.py
SeagateChaDeepak/cortx-s3server
2af49128d467187a7e566fa939f90d2e727d0c8f
[ "Apache-2.0" ]
null
null
null
s3cortxutils/s3cipher/s3cipher/cortx_s3_cipher.py
SeagateChaDeepak/cortx-s3server
2af49128d467187a7e566fa939f90d2e727d0c8f
[ "Apache-2.0" ]
null
null
null
# # Copyright (c) 2020 Seagate Technology LLC and/or its Affiliates # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # For any questions about this software or licensing, # please email opensource@seagate.com or cortx-questions@seagate.com. # #!/usr/bin/env python3 import argparse import base64 import sys from s3backgrounddelete.cortx_cluster_config import CORTXClusterConfig, CipherInvalidToken from cortx.utils.security.cipher import Cipher class CortxS3Cipher: def __init__(self, config = None, use_base64 = False, key_len = 20, const_key = "default"): """Load and initialise s3cipher.""" self.use_base64 = use_base64 self.key_len = key_len self.const_key = const_key self.config = config if (self.config is None): self.config = CORTXClusterConfig() try: self.cluster_id = self.config.get_cluster_id() except KeyError as err: print("Fail to parse cluster_id from config file") sys.exit(1) @staticmethod def encrypt(key: str, data: str): edata = Cipher.encrypt(bytes(key, 'utf-8'), bytes(data, 'utf-8')) return edata.decode("utf-8") @staticmethod def decrypt(key: str, data: str): ddata = Cipher.decrypt(bytes(key, 'utf-8'), bytes(data, 'utf-8')) return ddata.decode("utf-8") def generate_key(self): try: key = Cipher.generate_key(self.cluster_id, self.const_key) except Exception as err: raise CipherInvalidToken("Cipher generate key failed with error : {0}".format(err)) if(self.use_base64): key = base64.b64encode(key, str.encode("AZ")) if(self.key_len): if(len(key) < self.key_len): while(len(key) < self.key_len): key = key * 2 key = key[:self.key_len] elif(len(key) > self.key_len): key = key[:self.key_len] return key.decode("utf-8") def run(self): parser = argparse.ArgumentParser(description='cortx-py-utils::Cipher wrapper') subparsers = parser.add_subparsers(dest='command', title='commands') generatekey = subparsers.add_parser('generate_key', help="generate key to encrypt or decrypt data with it, use '--const_key' option with this.") generatekey.add_argument("--const_key", help="Constant key name to be used during encryption", type=str, required=True) generatekey.add_argument("--key_len", help="Key length to be obtained", type=int) generatekey.add_argument("--use_base64", help="Used to obtain alphanumeric base64 keys", action="store_true") encryptkey = subparsers.add_parser("encrypt", help="encrypt provided bytes of data, with provided key") encryptkey.add_argument("--data", help="bytes which needs to be encrypted or decrypted using provided key", type=str, required=True) encryptkey.add_argument("--key", help="key (in bytes) to be used in encrypting or decrypting bytes of data", type=str, required=True) decryptkey = subparsers.add_parser("decrypt", help="decrypt provided bytes of data, with provided key") decryptkey.add_argument("--data", help="bytes which needs to be encrypted or decrypted using provided key", type=str, required=True) decryptkey.add_argument("--key", help="key (in bytes) to be used in encrypting or decrypting bytes of data", type=str, required=True) args = parser.parse_args() try: if args.use_base64: use_base64_flag = True else: use_base64_flag = False except AttributeError: use_base64_flag = False try: if args.key_len: key_len_flag = args.key_len else: key_len_flag = 0 except AttributeError: key_len_flag = 0 try: if args.const_key: const_key_flag = args.const_key else: const_key_flag = "default_key" except AttributeError: const_key_flag = "default_key" try: if args.key: key = args.key else: key = "" except AttributeError: key = "" try: if args.data: data = args.data else: data = "" except AttributeError: data = "" s3_cipher = CortxS3Cipher(None, use_base64_flag, key_len_flag, const_key_flag) try: if args.command == 'encrypt': print(s3_cipher.encrypt(key, data)) elif args.command == 'decrypt': print(s3_cipher.decrypt(key, data)) elif args.command == 'generate_key': print(s3_cipher.generate_key()) else: sys.exit("Invalid command option passed, see help.") except CipherInvalidToken as err: print("Cipher generate key failed with error : {0}".format(err)) sys.exit(1)
37.26
152
0.620147
73d28a412b43efc41d2a9f393384a189366cb2da
3,350
py
Python
sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/_configuration.py
vincenttran-msft/azure-sdk-for-python
348b56f9f03eeb3f7b502eed51daf494ffff874d
[ "MIT" ]
1
2022-03-09T08:59:13.000Z
2022-03-09T08:59:13.000Z
sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/_configuration.py
vincenttran-msft/azure-sdk-for-python
348b56f9f03eeb3f7b502eed51daf494ffff874d
[ "MIT" ]
null
null
null
sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2016_01_01/_configuration.py
vincenttran-msft/azure-sdk-for-python
348b56f9f03eeb3f7b502eed51daf494ffff874d
[ "MIT" ]
1
2022-03-04T06:21:56.000Z
2022-03-04T06:21:56.000Z
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, TYPE_CHECKING from azure.core.configuration import Configuration from azure.core.pipeline import policies from azure.mgmt.core.policies import ARMChallengeAuthenticationPolicy, ARMHttpLoggingPolicy from ._version import VERSION if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials import TokenCredential class StorageManagementClientConfiguration(Configuration): """Configuration for StorageManagementClient. Note that all parameters used to create this instance are saved as instance attributes. :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: Gets subscription credentials which uniquely identify the Microsoft Azure subscription. The subscription ID forms part of the URI for every service call. :type subscription_id: str """ def __init__( self, credential: "TokenCredential", subscription_id: str, **kwargs: Any ) -> None: super(StorageManagementClientConfiguration, self).__init__(**kwargs) if credential is None: raise ValueError("Parameter 'credential' must not be None.") if subscription_id is None: raise ValueError("Parameter 'subscription_id' must not be None.") self.credential = credential self.subscription_id = subscription_id self.api_version = "2016-01-01" self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) kwargs.setdefault('sdk_moniker', 'mgmt-storage/{}'.format(VERSION)) self._configure(**kwargs) def _configure( self, **kwargs # type: Any ): # type: (...) -> None self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) self.authentication_policy = kwargs.get('authentication_policy') if self.credential and not self.authentication_policy: self.authentication_policy = ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)
48.550725
181
0.698806
73d2999ad34ce4574e2169026259c50903a84ca6
828
py
Python
var/spack/repos/builtin/packages/py-sphinx-limestone-theme/package.py
lguyot/spack
e910c227a7bac3adf2c18fc86cf994811b7d14f7
[ "ECL-2.0", "Apache-2.0", "MIT" ]
2
2020-10-15T01:08:42.000Z
2021-10-18T01:28:18.000Z
var/spack/repos/builtin/packages/py-sphinx-limestone-theme/package.py
lguyot/spack
e910c227a7bac3adf2c18fc86cf994811b7d14f7
[ "ECL-2.0", "Apache-2.0", "MIT" ]
null
null
null
var/spack/repos/builtin/packages/py-sphinx-limestone-theme/package.py
lguyot/spack
e910c227a7bac3adf2c18fc86cf994811b7d14f7
[ "ECL-2.0", "Apache-2.0", "MIT" ]
null
null
null
# Copyright 2013-2018 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class PySphinxLimestoneTheme(PythonPackage): """Sphinx Limestone Theme""" homepage = "https://bbpteam.epfl.ch/project/spaces/display/BBPSTD/Documentation+Standards" git = "ssh://bbpcode.epfl.ch/nse/sphinx-limestone-theme" version('0.0.3', tag='sphinx-limestone-theme-v0.0.3', submodules=True, get_full_repo=True) depends_on('python@3:', type=('build', 'run')) depends_on('py-setuptools', type='build') depends_on('py-sphinx@2:', type='run') @run_before('build') def generate(self): python = self.spec['python'].command python('translate_templates.py')
33.12
94
0.704106
73d29ff52e8240e1025ea04a57df92d946e1fbea
3,706
py
Python
blender_scripts/renderer_option.py
gizatt/blender_server
fc20b7ee53b3c8d8d141653128dada965a15a127
[ "MIT" ]
26
2019-03-11T21:09:56.000Z
2022-02-28T10:16:07.000Z
blender_scripts/renderer_option.py
gizatt/blender_server
fc20b7ee53b3c8d8d141653128dada965a15a127
[ "MIT" ]
8
2019-03-11T21:53:59.000Z
2021-09-28T20:49:04.000Z
blender_scripts/renderer_option.py
gizatt/blender_server
fc20b7ee53b3c8d8d141653128dada965a15a127
[ "MIT" ]
3
2019-10-09T23:31:34.000Z
2020-08-05T02:27:01.000Z
import attr import bpy from blender_scripts.utils import is_old_api def get_bpyobj_by_name(name_key: str): """ Using the name key to retrieve the object :param name_key: The key assigned using import_obj_model :return: bpy.data.objects[name_key] """ obj_collect = bpy.context.scene.objects if name_key in obj_collect: return obj_collect[name_key] else: sys.exit(-1) @attr.s class RendererOption: resolution_x = 640 resolution_y = 480 focal_x_mm = 1.0 focal_y_mm = 1.0 principal_x = 640 principal_y = 480 def rows(self): return self.resolution_y def cols(self): return self.resolution_x def sensor_width_bpy(self): return (self.focal_y_mm * self.principal_x) / (self.focal_x_mm * self.principal_y) def focal_bpy(self): s_u = self.resolution_x / self.sensor_width_bpy() return self.focal_x_mm / s_u def setup_renderer_resolution(option: RendererOption, camera_name='Camera'): bpy.data.scenes['Scene'].render.resolution_x = option.resolution_x bpy.data.scenes['Scene'].render.resolution_y = option.resolution_y bpy.data.scenes['Scene'].render.resolution_percentage = 100 # Set up the camera intrsinsic # camera_obj = bpy.context.scene.objects[camera_name] # camera_obj.data.type = 'PERSP' #camera_obj.data.lens_unit = 'MILLIMETERS' # Already set up in the camera itself #camera_obj.data.lens = option.focal_bpy() # camera_obj.data.sensor_width = option.sensor_width_bpy() @attr.s class CyclesRendererOption(RendererOption): # The number of samples per pixel num_samples = 32 use_denoising = True # Parameter related to performance but not output renderer_tile_x = 128 renderer_tile_y = 128 renderer_device = 'GPU' def setup_and_use_cycles(option: CyclesRendererOption, camera_name='Camera'): # The general setup code setup_renderer_resolution(option, camera_name=camera_name) bpy.context.scene.render.engine = 'CYCLES' if is_old_api(): bpy.context.scene.render.use_raytrace = True bpy.context.scene.render.use_shadows = True # The number of samples and denosing bpy.data.scenes['Scene'].cycles.samples = option.num_samples if is_old_api(): bpy.context.scene.render.layers[0].cycles.use_denoising = True else: bpy.context.view_layer.cycles.use_denoising = True # Performance parameters bpy.data.scenes['Scene'].render.tile_x = option.renderer_tile_x bpy.data.scenes['Scene'].render.tile_y = option.renderer_tile_y bpy.data.scenes['Scene'].cycles.device = option.renderer_device @attr.s class EeveeRendererOption(RendererOption): # The member related to ambient occlusion use_ambient_occlusion = True # The member related to shadow use_soft_shadow = True shadow_cube_size = '1024' use_ssr = True taa_render_samples = 20 ssr_max_roughness = 0.1 def setup_and_use_eevee(option: EeveeRendererOption, camera_name='Camera'): # The general setup code assert not is_old_api() setup_renderer_resolution(option, camera_name=camera_name) bpy.context.scene.render.engine = 'BLENDER_EEVEE' # The setup code for ambient occulusion bpy.context.scene.eevee.use_gtao = option.use_ambient_occlusion # the setup code for shadow bpy.context.scene.eevee.use_soft_shadows = option.use_soft_shadow bpy.context.scene.eevee.shadow_cube_size = option.shadow_cube_size bpy.context.scene.eevee.taa_render_samples = option.taa_render_samples bpy.context.scene.eevee.use_ssr = option.use_ssr bpy.context.scene.eevee.ssr_max_roughness = option.ssr_max_roughness
31.675214
90
0.726659
73d2a983708817324d0b56ccedaced47959a0a35
784
py
Python
python/tree/0108_convert_sorted_array_to_binary_search_tree.py
linshaoyong/leetcode
ea052fad68a2fe0cbfa5469398508ec2b776654f
[ "MIT" ]
6
2019-07-15T13:23:57.000Z
2020-01-22T03:12:01.000Z
python/tree/0108_convert_sorted_array_to_binary_search_tree.py
linshaoyong/leetcode
ea052fad68a2fe0cbfa5469398508ec2b776654f
[ "MIT" ]
null
null
null
python/tree/0108_convert_sorted_array_to_binary_search_tree.py
linshaoyong/leetcode
ea052fad68a2fe0cbfa5469398508ec2b776654f
[ "MIT" ]
1
2019-07-24T02:15:31.000Z
2019-07-24T02:15:31.000Z
# Definition for a binary tree node. class TreeNode: def __init__(self, x): self.val = x self.left = None self.right = None class Solution: def sortedArrayToBST(self, nums): """ :type nums: List[int] :rtype: TreeNode """ if not nums: return None mid = len(nums) // 2 root = TreeNode(nums[mid]) root.left = self.sortedArrayToBST(nums[:mid]) root.right = self.sortedArrayToBST(nums[mid + 1:]) return root def test_sorted_array_to_BST(): assert Solution().sortedArrayToBST(None) is None assert Solution().sortedArrayToBST([]) is None a = Solution().sortedArrayToBST([1]) assert a.val == 1 assert a.left is None assert a.right is None
24.5
58
0.591837