repository_name
stringlengths
7
107
function_path
stringlengths
4
190
function_identifier
stringlengths
1
236
language
stringclasses
1 value
function
stringlengths
9
647k
docstring
stringlengths
5
488k
function_url
stringlengths
71
285
context
stringlengths
0
2.51M
license
stringclasses
5 values
microsoft/agogosml
agogosml/agogosml/writer/output_writer.py
OutputWriter.stop_incoming_messages
python
def stop_incoming_messages(self): self.listener.stop() self.logger.event('output.lifecycle.stop')
Stop accepting messages.
https://github.com/microsoft/agogosml/blob/5e603394f037640b2fb7ddee60be47c569ab48c9/agogosml/agogosml/writer/output_writer.py#L31-L34
from typing import Callable from typing import Optional from agogosml.common.abstract_streaming_client import AbstractStreamingClient from agogosml.common.listener_client import ListenerClient from agogosml.utils.logger import Logger class OutputWriter: def __init__(self, streaming_client: AbstractStreamingClient, listener: ListenerClient): self.messaging_client = streaming_client self.listener = listener self.logger = Logger() def on_message_received(self, message: str) -> bool: success = self.messaging_client.send(message) self.logger.event('output.message.received', {'success': str(success)}) return success def start_incoming_messages(self, callback: Optional[Callable[[str], bool]] = None): self.logger.event('output.lifecycle.start') self.listener.start(callback or self.on_message_received)
MIT License
google/init2winit
init2winit/init_lib/sparse_init.py
sparse_init
python
def sparse_init(loss_fn, model, hps, input_shape, output_shape, rng_key, metrics_logger=None, log_every=10): del loss_fn, input_shape, output_shape, rng_key, metrics_logger, log_every activation_functions = hps.activation_function num_hidden_layers = len(hps.hid_sizes) if isinstance(hps.activation_function, str): activation_functions = [hps.activation_function] * num_hidden_layers for i, key in enumerate(model.params): num_units, num_weights = model.params[key]['kernel'].shape mask = np.zeros((num_units, num_weights), dtype=bool) for k in range(num_units): if num_weights >= hps.non_zero_connection_weights: sample = np.random.choice( num_weights, hps.non_zero_connection_weights, replace=False) else: sample = np.random.choice(num_weights, hps.non_zero_connection_weights) mask[k, sample] = True model.params[key]['kernel'] = model.params[key]['kernel'].at[~mask].set(0.0) if i < num_hidden_layers and activation_functions[i] == 'tanh': model.params[key]['bias'] = model.params[key]['bias'].at[:].set(0.5) else: model.params[key]['bias'] = model.params[key]['bias'].at[:].set(0.0) return model
Implements SparseInit initializer. Args: loss_fn: Loss function. model: Flax Model class. hps: HParam object. Required hparams are meta_learning_rate, meta_batch_size, meta_steps, and epsilon. input_shape: Must agree with batch[0].shape[1:]. output_shape: Must agree with batch[1].shape[1:]. rng_key: jax.PRNGKey, used to seed all randomness. metrics_logger: Instance of utils.MetricsLogger log_every: Print meta loss every k steps. Returns: A Flax model with sparse initialization.
https://github.com/google/init2winit/blob/d54661d82576204bfcc306fae8606b8e7c3838b6/init2winit/init_lib/sparse_init.py#L29-L75
from ml_collections.config_dict import config_dict import numpy as np DEFAULT_HPARAMS = config_dict.ConfigDict(dict(non_zero_connection_weights=15,))
Apache License 2.0
thomasgermain/pymultimatic
pymultimatic/systemmanager.py
SystemManager.set_ventilation_operating_mode
python
async def set_ventilation_operating_mode( self, ventilation_id: str, mode: OperatingMode ) -> None: await self._call_api( urls.set_ventilation_operating_mode, params={"id": ventilation_id}, payload=payloads.ventilation_operating_mode(mode.name), )
Set ventilation at night level. Compatible modes are listed here :class:`~pymultimatic.model.Ventilation.MODES` Args: ventilation_id (str): id of the ventilation mode (OperatingMode): Mode to set
https://github.com/thomasgermain/pymultimatic/blob/9a05d0f1e341bb59f72cd6294aa1e22651803b3f/pymultimatic/systemmanager.py#L749-L764
import asyncio import logging from datetime import date, timedelta from typing import Any, Callable, List, Optional, Tuple, Type from aiohttp import ClientSession from schema import Schema, SchemaError from .api import ApiError, Connector, WrongResponseError, defaults, payloads, schemas, urls from .model import ( Circulation, Dhw, FacilityDetail, HolidayMode, HotWater, HvacStatus, OperatingMode, OperatingModes, QuickMode, QuickVeto, Report, Room, System, Ventilation, Zone, ZoneCooling, ZoneHeating, constants, mapper, EmfReport, ) _LOGGER = logging.getLogger("SystemManager") def ignore_http_409(return_value: Any = None) -> Callable[..., Any]: def decorator(func: Callable[..., Any]) -> Any: async def wrapper(*args: Any, **kwargs: Any) -> Any: try: return await func(*args, **kwargs) except ApiError as ex: if ex.status != 409: raise return return_value return wrapper return decorator def retry_async( num_tries: int = 5, on_exceptions: Tuple[Type[BaseException], ...] = (Exception,), on_status_codes: Tuple[int, ...] = (), backoff_base: float = 0.5, ) -> Callable[..., Any]: on_exceptions = on_exceptions + (ApiError,) def decorator(func: Callable[..., Any]) -> Any: async def wrapper(*args: Any, **kwargs: Any) -> Any: _num_tries = num_tries last_response: Optional[str] = None while _num_tries > 0: _num_tries -= 1 try: return await func(*args, **kwargs) except on_exceptions as ex: if not _num_tries: if isinstance(ex, ApiError): raise ex raise ApiError( "Cannot get correct response", response=last_response, status=200, ) from ex if isinstance(ex, ApiError): last_response = ex.response if ex.status not in on_status_codes: raise retry_in = backoff_base * (num_tries - _num_tries) _LOGGER.debug("Error occurred, retrying in %s", retry_in, exc_info=True) await asyncio.sleep(retry_in) return wrapper return decorator class SystemManager: def __init__( self, user: str, password: str, session: ClientSession, smartphone_id: str = defaults.SMARTPHONE_ID, serial: Optional[str] = None, ): self._connector: Connector = Connector(user, password, session, smartphone_id) self._serial = serial self._fixed_serial = self._serial is not None self._ensure_ready_lock = asyncio.Lock() async def login(self, force_login: bool = False) -> bool: return await self._connector.login(force_login) async def logout(self) -> None: if not self._fixed_serial: self._serial = None await self._connector.logout() async def get_system(self) -> System: (facilities, full_system, live_report, hvac_state, gateway_json,) = await asyncio.gather( self._call_api(urls.facilities_list, schema=schemas.FACILITIES), self._call_api(urls.system, schema=schemas.SYSTEM), self._call_api(urls.live_report, schema=schemas.LIVE_REPORTS), self._call_api(urls.hvac, schema=schemas.HVAC), self._call_api(urls.gateway_type, schema=schemas.GATEWAY), ) hvac_status = mapper.map_hvac_status(hvac_state) holiday = mapper.map_holiday_mode_from_system(full_system) zones = mapper.map_zones_from_system(full_system) outdoor_temp = mapper.map_outdoor_temp(full_system) quick_mode = mapper.map_quick_mode_from_system(full_system) ventilation = mapper.map_ventilation_from_system(full_system) dhw = mapper.map_dhw_from_system(full_system, live_report) reports = mapper.map_reports(live_report) facility_detail = mapper.map_facility_detail(facilities, self._serial) gateway = mapper.map_gateway(gateway_json) rooms: List[Room] = [] if [z for z in zones if z.rbr]: rooms_raw = await self._call_api(urls.rooms, schema=schemas.ROOM_LIST) rooms = mapper.map_rooms(rooms_raw) return System( holiday=holiday, quick_mode=quick_mode, zones=zones, rooms=rooms, dhw=dhw, reports=reports, outdoor_temperature=outdoor_temp, hvac_status=hvac_status, facility_detail=facility_detail, ventilation=ventilation, gateway=gateway, ) @ignore_http_409(return_value=[]) async def get_emf_devices(self) -> List[EmfReport]: return mapper.map_emf_reports(await self._call_api(urls.emf_devices)) async def get_gateway(self) -> str: return mapper.map_gateway(await self._call_api(urls.gateway_type)) @ignore_http_409() async def get_outdoor_temperature(self) -> Optional[float]: return mapper.map_outdoor_temp(await self._call_api(urls.system_status)) @ignore_http_409() async def get_hvac_status(self) -> HvacStatus: return mapper.map_hvac_status(await self._call_api(urls.hvac)) async def get_facility_detail(self, serial: Optional[str] = None) -> FacilityDetail: serial = serial if serial is not None else self._serial return mapper.map_facility_detail( await self._call_api(urls.facilities_list, schema=schemas.FACILITIES), serial ) @ignore_http_409() async def get_live_reports(self) -> List[Report]: return mapper.map_reports( await self._call_api(urls.live_report, schema=schemas.LIVE_REPORTS) ) @ignore_http_409() async def get_live_report(self, report_id: str, device_id: str) -> Optional[Report]: json = await self._call_api( urls.live_report_device, params={"device_id": device_id, "report_id": report_id}, schema=schemas.LIVE_REPORT, ) return mapper.map_report(json) @ignore_http_409() async def get_ventilation(self) -> Optional[Ventilation]: return mapper.map_ventilation( await self._call_api(urls.system_ventilation, schema=schemas.VENTILATION_LIST) ) @ignore_http_409() async def get_holiday_mode(self) -> HolidayMode: raw = await self._call_api(urls.system_holiday_mode) return mapper.map_holiday_mode(raw) @ignore_http_409() async def get_quick_mode(self) -> Optional[QuickMode]: return mapper.map_quick_mode(await self._call_api(urls.system_quickmode)) @ignore_http_409() async def get_hot_water(self, dhw_id: str) -> Optional[HotWater]: dhw = await self._call_api(urls.hot_water, params={"id": dhw_id}, schema=schemas.FUNCTION) return mapper.map_hot_water(dhw, dhw_id) @ignore_http_409() async def get_dhw(self) -> Optional[Dhw]: dhw = await self._call_api(urls.dhws, schema=schemas.DHWS) return mapper.map_dhw(dhw) @ignore_http_409(return_value=[]) async def get_rooms(self) -> Optional[List[Room]]: rooms = await self._call_api(urls.rooms, schema=schemas.ROOM_LIST) return mapper.map_rooms(rooms) @ignore_http_409() async def get_room(self, room_id: str) -> Optional[Room]: new_room = await self._call_api(urls.room, params={"id": room_id}, schema=schemas.ROOM) return mapper.map_room(new_room) @ignore_http_409(return_value=[]) async def get_zones(self) -> Optional[List[Zone]]: rooms = await self._call_api(urls.zones, schema=schemas.ZONE_LIST) return mapper.map_zones(rooms) @ignore_http_409() async def get_zone(self, zone_id: str) -> Optional[Zone]: new_zone = await self._call_api(urls.zone, params={"id": zone_id}, schema=schemas.ZONE) return mapper.map_zone(new_zone) @ignore_http_409() async def get_circulation(self, dhw_id: str) -> Optional[Circulation]: new_circulation = await self._call_api( urls.circulation, params={"id": dhw_id}, schema=schemas.FUNCTION ) return mapper.map_circulation_alone(new_circulation, dhw_id) async def set_quick_mode(self, quick_mode: QuickMode) -> None: await self._call_api( urls.system_quickmode, payload=payloads.quickmode(quick_mode.name, quick_mode.duration) ) @ignore_http_409(return_value=False) async def remove_quick_mode(self) -> bool: await self._call_api(urls.system_quickmode, "delete") return True async def set_holiday_mode(self, start_date: date, end_date: date, temperature: float) -> None: payload = payloads.holiday_mode(True, start_date, end_date, self._round(temperature)) await self._call_api(urls.system_holiday_mode, payload=payload) async def remove_holiday_mode(self) -> None: payload = payloads.holiday_mode( False, date.today() - timedelta(days=2), date.today() - timedelta(days=1), constants.FROST_PROTECTION_TEMP, ) await self._call_api(urls.system_holiday_mode, payload=payload) async def set_hot_water_setpoint_temperature(self, dhw_id: str, temperature: float) -> None: _LOGGER.debug("Will set dhw target temperature to %s", temperature) payload = payloads.hotwater_temperature_setpoint(self._round(temperature)) await self._call_api( urls.hot_water_temperature_setpoint, params={"id": dhw_id}, payload=payload ) async def set_hot_water_operating_mode(self, dhw_id: str, new_mode: OperatingMode) -> None: _LOGGER.debug("Will try to set hot water mode to %s", new_mode) if new_mode in HotWater.MODES: _LOGGER.debug("New mode is %s", new_mode) await self._call_api( urls.hot_water_operating_mode, params={"id": dhw_id}, payload=payloads.hot_water_operating_mode(new_mode.name), ) else: _LOGGER.debug("New mode is not available for hot water %s", new_mode) async def set_room_operating_mode(self, room_id: str, new_mode: OperatingMode) -> None: if new_mode in Room.MODES and new_mode != OperatingModes.QUICK_VETO: _LOGGER.debug("New mode is %s", new_mode) await self._call_api( urls.room_operating_mode, params={"id": room_id}, payload=payloads.room_operating_mode(new_mode.name), ) else: _LOGGER.debug("mode is not available for room %s", new_mode) async def set_room_quick_veto(self, room_id: str, quick_veto: QuickVeto) -> None: payload = payloads.room_quick_veto(self._round(quick_veto.target), quick_veto.duration) await self._call_api(urls.room_quick_veto, params={"id": room_id}, payload=payload) async def remove_room_quick_veto(self, room_id: str) -> None: await self._call_api(urls.room_quick_veto, "delete", params={"id": room_id}) async def set_room_setpoint_temperature(self, room_id: str, temperature: float) -> None: _LOGGER.debug("Will try to set room target temperature to %s", temperature) await self._call_api( urls.room_temperature_setpoint, params={"id": room_id}, payload=payloads.room_temperature_setpoint(self._round(temperature)), ) async def set_zone_quick_veto(self, zone_id: str, quick_veto: QuickVeto) -> None: payload = payloads.zone_quick_veto(self._round(quick_veto.target)) await self._call_api(urls.zone_quick_veto, params={"id": zone_id}, payload=payload) async def set_zone_heating_operating_mode(self, zone_id: str, new_mode: OperatingMode) -> None: if new_mode in ZoneHeating.MODES and new_mode != OperatingModes.QUICK_VETO: _LOGGER.debug("New mode is %s", new_mode) await self._call_api( urls.zone_heating_mode, params={"id": zone_id}, payload=payloads.zone_operating_mode(new_mode.name), ) else: _LOGGER.debug("mode is not available for zone %s", new_mode) async def set_zone_cooling_operating_mode(self, zone_id: str, new_mode: OperatingMode) -> None: if new_mode in ZoneCooling.MODES and new_mode != OperatingModes.QUICK_VETO: _LOGGER.debug("New mode is %s", new_mode) await self._call_api( urls.zone_cooling_mode, params={"id": zone_id}, payload=payloads.zone_operating_mode(new_mode.name), ) else: _LOGGER.debug("mode is not available for zone %s", new_mode) async def remove_zone_quick_veto(self, zone_id: str) -> None: await self._call_api(urls.zone_quick_veto, "delete", params={"id": zone_id}) async def set_zone_heating_setpoint_temperature(self, zone_id: str, temperature: float) -> None: _LOGGER.debug("Will try to set zone target temperature to %s", temperature) payload = payloads.zone_temperature_setpoint(self._round(temperature)) await self._call_api( urls.zone_heating_setpoint_temperature, params={"id": zone_id}, payload=payload, ) async def set_zone_cooling_setpoint_temperature(self, zone_id: str, temperature: float) -> None: _LOGGER.debug("Will try to set zone target temperature to %s", temperature) payload = payloads.zone_temperature_setpoint(self._round(temperature)) await self._call_api( urls.zone_cooling_setpoint_temperature, params={"id": zone_id}, payload=payload, ) async def set_zone_heating_setback_temperature(self, zone_id: str, temperature: float) -> None: _LOGGER.debug("Will try to set zone setback temperature to %s", temperature) await self._call_api( urls.zone_heating_setback_temperature, params={"id": zone_id}, payload=payloads.zone_temperature_setback(self._round(temperature)), )
MIT License
truckersmp-cli/truckersmp-cli
truckersmp_cli/gamestarter.py
StarterProton.setup_game_env
python
def setup_game_env(env, steamdir): if not Args.disable_proton_overlay: overlayrenderer = os.path.join(steamdir, File.overlayrenderer_inner) if "LD_PRELOAD" in env: env["LD_PRELOAD"] += ":" + overlayrenderer else: env["LD_PRELOAD"] = overlayrenderer env.update( SteamAppId=Args.steamid, SteamGameId=Args.steamid, PROTON_USE_WINED3D="1" if Args.use_wined3d else "0", )
Set up environment variables for running the game with Proton. env: A dict of environment variables steamdir: Path to Steam installation
https://github.com/truckersmp-cli/truckersmp-cli/blob/98b0828ca2edca4b5d6cd7788bfb621e00a53882/truckersmp_cli/gamestarter.py#L280-L298
import logging import os import shutil import subprocess as subproc import sys import tempfile import time from .utils import ( activate_native_d3dcompiler_47, find_discord_ipc_sockets, get_proton_version, get_steam_library_dirs, is_d3dcompiler_setup_skippable, log_info_formatted_envars_and_args, print_child_output, set_wine_desktop_registry, setup_wine_discord_ipc_bridge, wait_for_steam, ) from .variables import Args, Dir, File class GameStarterInterface: def run(self): raise NotImplementedError @property def runner_name(self): raise NotImplementedError class StarterProton(GameStarterInterface): def __init__(self, cfg): self._cfg = cfg self._steamruntime_usr_tempdir = None self._discord_sockets = find_discord_ipc_sockets() major, minor = get_proton_version(Args.protondir) logging.info("Proton version is (major=%d, minor=%d)", major, minor) self._use_steam_runtime = ( not Args.without_steam_runtime and (major >= 6 or (major == 5 and minor >= 13))) logging.info( "Using Steam Runtime container" if self._use_steam_runtime else "Not using Steam Runtime container") def _cleanup(self): if self._steamruntime_usr_tempdir is not None: with self._steamruntime_usr_tempdir: pass def _determine_steamruntime_shared_paths(self, steamdir): if not self._use_steam_runtime: return [] shared_paths = [Args.gamedir, Args.protondir, Args.prefixdir] if Args.singleplayer: shared_paths += get_steam_library_dirs(steamdir) else: shared_paths += Args.moddir, Dir.truckersmp_cli_data if Dir.scriptdir.startswith("/usr/"): logging.info("System-wide installation detected: %s", Dir.scriptdir) self._steamruntime_usr_tempdir = tempfile.TemporaryDirectory( prefix="truckersmp-cli-steamruntime-sharing-workaround-") logging.debug( "Copying Steam Runtime helper to %s", self._steamruntime_usr_tempdir.name) shutil.copy( File.steamruntime_helper, self._steamruntime_usr_tempdir.name) if not Args.singleplayer: logging.debug( "Copying inject program to %s", self._steamruntime_usr_tempdir.name) shutil.copy(File.inject_exe, self._steamruntime_usr_tempdir.name) shared_paths.append(self._steamruntime_usr_tempdir.name) else: shared_paths.append(Dir.scriptdir) if len(self._discord_sockets) > 0: shared_paths += self._discord_sockets logging.debug("Shared paths: %s", shared_paths) return shared_paths def _init_args(self, args, steamdir): if self._use_steam_runtime: python = "python3" args["steamrt"].append(os.path.join(Args.steamruntimedir, "run")) shared_paths = self._determine_steamruntime_shared_paths(steamdir) for shared_path in shared_paths: args["steamrt"] += "--filesystem", shared_path args["steamrt"].append("--") else: python = sys.executable args["wine"] = args["steamrt"].copy() args["steamrt"].append(python) args["proton"].append(python) args["proton"] += os.path.join(Args.protondir, "proton"), "run" def _setup_helper_args(self, args): args.append( File.steamruntime_helper if self._steamruntime_usr_tempdir is None else os.path.join( self._steamruntime_usr_tempdir.name, os.path.basename(File.steamruntime_helper))) if (not Args.without_wine_discord_ipc_bridge and len(self._discord_sockets) > 0): args += "--early-executable", File.ipcbridge, "--early-wait-before-start", "5" for executable in self._cfg.thirdparty_executables: args += "--executable", executable args += "--wait-before-start", str(self._cfg.thirdparty_wait) if Args.verbose: args.append("-v" if Args.verbose == 1 else "-vv") return args def _setup_proton_args(self, proton_args): if Args.singleplayer: exename = "eurotrucks2.exe" if Args.ets2 else "amtrucks.exe" gamepath = os.path.join(Args.gamedir, "bin/win_x64", exename) proton_args.append(gamepath) else: proton_args.append( File.inject_exe if self._steamruntime_usr_tempdir is None else os.path.join( self._steamruntime_usr_tempdir.name, os.path.basename(File.inject_exe)) ) proton_args += Args.gamedir, Args.moddir for opt in f"-rdevice {Args.rendering_backend} {Args.game_options}".split(" "): if opt != "": proton_args.append(opt) @staticmethod def determine_env_print(env): env_print = ["SteamAppId", "SteamGameId"] if "LD_PRELOAD" in env: env_print.append("LD_PRELOAD") env_print += [ "PROTON_USE_WINED3D", "STEAM_COMPAT_CLIENT_INSTALL_PATH", "STEAM_COMPAT_DATA_PATH", ] return env_print def run(self): args = dict(wine=[], proton=[], steamrt=[]) prefix = os.path.join(Args.prefixdir, "pfx") env = os.environ.copy() steamdir = wait_for_steam(use_proton=True, loginvdf_paths=File.loginusers_paths) logging.info("Steam installation directory: %s", steamdir) logging.debug("Creating directory %s if it doesn't exist", Args.prefixdir) os.makedirs(Args.prefixdir, exist_ok=True) env.update( STEAM_COMPAT_DATA_PATH=Args.prefixdir, STEAM_COMPAT_CLIENT_INSTALL_PATH=steamdir, ) self._init_args(args, steamdir) do_d3dcompiler_setup = (Args.activate_native_d3dcompiler_47 or (not Args.singleplayer and Args.rendering_backend == "dx11" and not is_d3dcompiler_setup_skippable())) logging.debug("Whether to setup native d3dcompiler_47: %s", do_d3dcompiler_setup) wine_command = os.path.join( Args.protondir, "files" if os.access( os.path.join(Args.protondir, "files/bin/wine"), os.R_OK | os.X_OK, ) else "dist", "bin/wine") args["wine"].append(wine_command) if (not os.access(wine_command, os.R_OK) or do_d3dcompiler_setup): try: subproc.check_output( args["proton"] + ["wineboot", ], env=env, stderr=subproc.STDOUT) except OSError as ex: sys.exit(f"Failed to run wineboot: {ex}") except subproc.CalledProcessError as ex: sys.exit(f"wineboot failed:\n{ex.output.decode('utf-8')}") if do_d3dcompiler_setup: activate_native_d3dcompiler_47(prefix, args["wine"]) if Args.wine_desktop: set_wine_desktop_registry(prefix, args["wine"], True) StarterProton.setup_game_env(env, steamdir) self._setup_proton_args(args["proton"]) log_info_formatted_envars_and_args( runner="Steam Runtime helper", env_print=StarterProton.determine_env_print(env), env=env, args=args["proton"]) try: with subproc.Popen( self._setup_helper_args(args["steamrt"]) + ["--", ] + args["proton"], env=env, stdout=subproc.PIPE, stderr=subproc.STDOUT) as proc: if Args.verbose: print_child_output(proc) except subproc.CalledProcessError as ex: logging.error( "Steam Runtime helper exited abnormally:\n%s", ex.output.decode("utf-8")) if Args.wine_desktop: set_wine_desktop_registry(prefix, args["wine"], False) self._cleanup() @staticmethod
MIT License
databand-ai/dbnd
modules/dbnd/src/dbnd/_vendor/cloudpickle/cloudpickle_fast.py
CloudPickler._dynamic_function_reduce
python
def _dynamic_function_reduce(self, func): newargs = self._function_getnewargs(func) state = _function_getstate(func) return (types.FunctionType, newargs, state, None, None, _function_setstate)
Reduce a function that is not pickleable via attribute lookup.
https://github.com/databand-ai/dbnd/blob/ec0076f9a142b20e2f7afd886ed1a18683c553ec/modules/dbnd/src/dbnd/_vendor/cloudpickle/cloudpickle_fast.py#L504-L509
import _collections_abc import abc import copyreg import io import itertools import logging import sys import struct import types import weakref import typing from enum import Enum from collections import ChainMap from .compat import pickle, Pickler from .cloudpickle import ( _extract_code_globals, _BUILTIN_TYPE_NAMES, DEFAULT_PROTOCOL, _find_imported_submodules, _get_cell_contents, _is_importable, _builtin_type, _get_or_create_tracker_id, _make_skeleton_class, _make_skeleton_enum, _extract_class_dict, dynamic_subimport, subimport, _typevar_reduce, _get_bases, _make_cell, _make_empty_cell, CellType, _is_parametrized_type_hint, PYPY, cell_set, parametrized_type_hint_getinitargs, _create_parametrized_type_hint, builtin_code_type, _make_dict_keys, _make_dict_values, _make_dict_items, ) if pickle.HIGHEST_PROTOCOL >= 5 and not PYPY: def dump(obj, file, protocol=None, buffer_callback=None): CloudPickler( file, protocol=protocol, buffer_callback=buffer_callback ).dump(obj) def dumps(obj, protocol=None, buffer_callback=None): with io.BytesIO() as file: cp = CloudPickler( file, protocol=protocol, buffer_callback=buffer_callback ) cp.dump(obj) return file.getvalue() else: def dump(obj, file, protocol=None): CloudPickler(file, protocol=protocol).dump(obj) def dumps(obj, protocol=None): with io.BytesIO() as file: cp = CloudPickler(file, protocol=protocol) cp.dump(obj) return file.getvalue() load, loads = pickle.load, pickle.loads def _class_getnewargs(obj): type_kwargs = {} if "__slots__" in obj.__dict__: type_kwargs["__slots__"] = obj.__slots__ __dict__ = obj.__dict__.get('__dict__', None) if isinstance(__dict__, property): type_kwargs['__dict__'] = __dict__ return (type(obj), obj.__name__, _get_bases(obj), type_kwargs, _get_or_create_tracker_id(obj), None) def _enum_getnewargs(obj): members = dict((e.name, e.value) for e in obj) return (obj.__bases__, obj.__name__, obj.__qualname__, members, obj.__module__, _get_or_create_tracker_id(obj), None) def _file_reconstructor(retval): return retval def _function_getstate(func): slotstate = { "__name__": func.__name__, "__qualname__": func.__qualname__, "__annotations__": func.__annotations__, "__kwdefaults__": func.__kwdefaults__, "__defaults__": func.__defaults__, "__module__": func.__module__, "__doc__": func.__doc__, "__closure__": func.__closure__, } f_globals_ref = _extract_code_globals(func.__code__) f_globals = {k: func.__globals__[k] for k in f_globals_ref if k in func.__globals__} closure_values = ( list(map(_get_cell_contents, func.__closure__)) if func.__closure__ is not None else () ) slotstate["_cloudpickle_submodules"] = _find_imported_submodules( func.__code__, itertools.chain(f_globals.values(), closure_values)) slotstate["__globals__"] = f_globals state = func.__dict__ return state, slotstate def _class_getstate(obj): clsdict = _extract_class_dict(obj) clsdict.pop('__weakref__', None) if issubclass(type(obj), abc.ABCMeta): clsdict.pop('_abc_cache', None) clsdict.pop('_abc_negative_cache', None) clsdict.pop('_abc_negative_cache_version', None) registry = clsdict.pop('_abc_registry', None) if registry is None: clsdict.pop('_abc_impl', None) (registry, _, _, _) = abc._get_dump(obj) clsdict["_abc_impl"] = [subclass_weakref() for subclass_weakref in registry] else: clsdict["_abc_impl"] = [type_ for type_ in registry] if "__slots__" in clsdict: if isinstance(obj.__slots__, str): clsdict.pop(obj.__slots__) else: for k in obj.__slots__: clsdict.pop(k, None) clsdict.pop('__dict__', None) return (clsdict, {}) def _enum_getstate(obj): clsdict, slotstate = _class_getstate(obj) members = dict((e.name, e.value) for e in obj) for attrname in ["_generate_next_value_", "_member_names_", "_member_map_", "_member_type_", "_value2member_map_"]: clsdict.pop(attrname, None) for member in members: clsdict.pop(member) return clsdict, slotstate def _code_reduce(obj): if hasattr(obj, "co_posonlyargcount"): args = ( obj.co_argcount, obj.co_posonlyargcount, obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, obj.co_varnames, obj.co_filename, obj.co_name, obj.co_firstlineno, obj.co_lnotab, obj.co_freevars, obj.co_cellvars ) else: args = ( obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, obj.co_varnames, obj.co_filename, obj.co_name, obj.co_firstlineno, obj.co_lnotab, obj.co_freevars, obj.co_cellvars ) return types.CodeType, args def _cell_reduce(obj): try: obj.cell_contents except ValueError: return _make_empty_cell, () else: return _make_cell, (obj.cell_contents, ) def _classmethod_reduce(obj): orig_func = obj.__func__ return type(obj), (orig_func,) def _file_reduce(obj): import io if not hasattr(obj, "name") or not hasattr(obj, "mode"): raise pickle.PicklingError( "Cannot pickle files that do not map to an actual file" ) if obj is sys.stdout: return getattr, (sys, "stdout") if obj is sys.stderr: return getattr, (sys, "stderr") if obj is sys.stdin: raise pickle.PicklingError("Cannot pickle standard input") if obj.closed: raise pickle.PicklingError("Cannot pickle closed files") if hasattr(obj, "isatty") and obj.isatty(): raise pickle.PicklingError( "Cannot pickle files that map to tty objects" ) if "r" not in obj.mode and "+" not in obj.mode: raise pickle.PicklingError( "Cannot pickle files that are not opened for reading: %s" % obj.mode ) name = obj.name retval = io.StringIO() try: curloc = obj.tell() obj.seek(0) contents = obj.read() obj.seek(curloc) except IOError as e: raise pickle.PicklingError( "Cannot pickle file %s as it cannot be read" % name ) from e retval.write(contents) retval.seek(curloc) retval.name = name return _file_reconstructor, (retval,) def _getset_descriptor_reduce(obj): return getattr, (obj.__objclass__, obj.__name__) def _mappingproxy_reduce(obj): return types.MappingProxyType, (dict(obj),) def _memoryview_reduce(obj): return bytes, (obj.tobytes(),) def _module_reduce(obj): if _is_importable(obj): return subimport, (obj.__name__,) else: obj.__dict__.pop('__builtins__', None) return dynamic_subimport, (obj.__name__, vars(obj)) def _method_reduce(obj): return (types.MethodType, (obj.__func__, obj.__self__)) def _logger_reduce(obj): return logging.getLogger, (obj.name,) def _root_logger_reduce(obj): return logging.getLogger, () def _property_reduce(obj): return property, (obj.fget, obj.fset, obj.fdel, obj.__doc__) def _weakset_reduce(obj): return weakref.WeakSet, (list(obj),) def _dynamic_class_reduce(obj): if Enum is not None and issubclass(obj, Enum): return ( _make_skeleton_enum, _enum_getnewargs(obj), _enum_getstate(obj), None, None, _class_setstate ) else: return ( _make_skeleton_class, _class_getnewargs(obj), _class_getstate(obj), None, None, _class_setstate ) def _class_reduce(obj): if obj is type(None): return type, (None,) elif obj is type(Ellipsis): return type, (Ellipsis,) elif obj is type(NotImplemented): return type, (NotImplemented,) elif obj in _BUILTIN_TYPE_NAMES: return _builtin_type, (_BUILTIN_TYPE_NAMES[obj],) elif not _is_importable(obj): return _dynamic_class_reduce(obj) return NotImplemented def _dict_keys_reduce(obj): return _make_dict_keys, (list(obj), ) def _dict_values_reduce(obj): return _make_dict_values, (list(obj), ) def _dict_items_reduce(obj): return _make_dict_items, (dict(obj), ) def _function_setstate(obj, state): state, slotstate = state obj.__dict__.update(state) obj_globals = slotstate.pop("__globals__") obj_closure = slotstate.pop("__closure__") slotstate.pop("_cloudpickle_submodules") obj.__globals__.update(obj_globals) obj.__globals__["__builtins__"] = __builtins__ if obj_closure is not None: for i, cell in enumerate(obj_closure): try: value = cell.cell_contents except ValueError: continue cell_set(obj.__closure__[i], value) for k, v in slotstate.items(): setattr(obj, k, v) def _class_setstate(obj, state): state, slotstate = state registry = None for attrname, attr in state.items(): if attrname == "_abc_impl": registry = attr else: setattr(obj, attrname, attr) if registry is not None: for subclass in registry: obj.register(subclass) return obj class CloudPickler(Pickler): _dispatch_table = {} _dispatch_table[classmethod] = _classmethod_reduce _dispatch_table[io.TextIOWrapper] = _file_reduce _dispatch_table[logging.Logger] = _logger_reduce _dispatch_table[logging.RootLogger] = _root_logger_reduce _dispatch_table[memoryview] = _memoryview_reduce _dispatch_table[property] = _property_reduce _dispatch_table[staticmethod] = _classmethod_reduce _dispatch_table[CellType] = _cell_reduce _dispatch_table[types.CodeType] = _code_reduce _dispatch_table[types.GetSetDescriptorType] = _getset_descriptor_reduce _dispatch_table[types.ModuleType] = _module_reduce _dispatch_table[types.MethodType] = _method_reduce _dispatch_table[types.MappingProxyType] = _mappingproxy_reduce _dispatch_table[weakref.WeakSet] = _weakset_reduce _dispatch_table[typing.TypeVar] = _typevar_reduce _dispatch_table[_collections_abc.dict_keys] = _dict_keys_reduce _dispatch_table[_collections_abc.dict_values] = _dict_values_reduce _dispatch_table[_collections_abc.dict_items] = _dict_items_reduce dispatch_table = ChainMap(_dispatch_table, copyreg.dispatch_table)
Apache License 2.0
bfreskura/kindle_note_parser
export.py
choose_export
python
def choose_export(export_index, template_dir): author = input( "Enter your name (this will appear on the top of the document): ") if export_index == 0: return exporter.ExportTex(author_name=author, template_path=choose_template( template_dir=template_dir, extension=EXPORT_EXTENSIONS['tex'])) elif export_index == 1: return exporter.ExportMarkdown(author_name=author, template_path=choose_template( template_dir=template_dir, extension=EXPORT_EXTENSIONS[ 'markdown'])) else: return exporter.ExportPlain(author_name=author)
Choose export object based on the user input :param template_dir: Templates directory path :param export_index: Index which was input by user :return: Export object
https://github.com/bfreskura/kindle_note_parser/blob/f560a3146a9199a39a21f1b9261b5e8ab07dd9a7/export.py#L37-L61
import argparse import collections import sys from constants import * from export import exporter from raw_parser import raw_parser def choose_template(template_dir, extension): print("Available templates for the specified format: ") available = {id: name for id, name in enumerate(os.listdir(template_dir)) if name.endswith(extension)} if not available: print("No supported template files in this folder. Try again with" "different templates folder :)") sys.exit() [print("{}) {}".format(id, name)) for id, name in available.items() if name.endswith(extension)] selection = "" while selection not in list(available.keys()): try: selection = int(input("Choose format index from the list: ")) except ValueError: print("Please enter valid integer format from the list.") return os.path.join(template_dir, available[selection])
MIT License
skamithi/ansible-tower-ldap-settings
library/tower_ldap_settings.py
transform_ldap_group_type
python
def transform_ldap_group_type(group_type): transformed_group_type = '' if group_type == 'NestedActiveDirectoryGroupType': transformed_group_type = 'active_directory' elif group_type == 'NestedGroupOfNamesType': transformed_group_type = 'open_ldap' elif group_type == 'active_directory': transformed_group_type = 'NestedActiveDirectoryGroupType' elif group_type == 'open_ldap': transformed_group_type = "NestedGroupOfNamesType" else: transformed_group_type = 'MemberDNGroupType' return transformed_group_type
This transformation function takes a group type name. If the group type name matches this module group_type options then it outputs the Tower API equivalent output. And the reverse is true.
https://github.com/skamithi/ansible-tower-ldap-settings/blob/24c59dbcb935177803a3720c5c00d91fe54fa4b9/library/tower_ldap_settings.py#L402-L422
from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: tower_ldap_settings author: "Stanley Karunditu (@linuxsimba)" short_description: Set Ansible Tower LDAP Credentials description: - Set Ansible Tower LDAP Credentials. These actions can only be performed by a Tower superuser. See U(https://www.ansible.com/tower) for details. options: ldap_server_protocol: description: - LDAP connection protocol required: False default: ldaps ldap_server_name: description: - LDAP Server FQDN required: False ldap_server_port: description: - LDAP Server Connection Port default: 636 required: False ldap_bind_dn: description: - LDAP Bind User DN required: False ldap_bind_password: description: - LDAP Bind User password. If set, this module will never be idempotent - because bind password is encrypted and difficult to confirm if the password - has changed. required: False ldap_start_tls: description: - Set LDAP Start TLS default: false required: False ldap_user_search: description: - List of DNs to search through to find users. Performs recursive LDAP search required: False ldap_group_search: description: - single group DN to find User Groups. Performs recursive LDAP search. - Multiple DNs cannot be specified required: False ldap_group_type: description: - Specify the type of LDAP database to be used. - The C(active_directory) option will set the I(AUTH_LDAP_GROUP_TYPE) Tower setting attribute to I(NestedActiveDirectoryGroupType) - The C(open_ldap) option will set the I(AUTH_LDAP_GROUP_TYPE) Tower setting attribute to I(NestedGroupOfNamesType) - The default setting is C(active_directory) choices: ['"active_directory"', "open_ldap"] default: "active_directory" required: False ldap_superuser: description: - Specify DN that can have superuser privileges on Tower. Could be a Group or User. - Multiple DNs cannot be specified required: False ldap_organization_map: description: - Provide a list of Tower organization maps dictionaries. - Each dictionary contains the following - C(organization) - Tower Organization name. This value is case sensitive. - C(users) - List of DNs associated with the organization - C(admins) list - List of DNs that have admin organization privileges required: False ldap_team_map: description: - Provide a list of Tower teams map directories - Each directory contains the following - C(organization) - Tower organization the team belongs to. This value is case sensitive. - C(team) - Tower team name - C(users) - List of User LDAP DNs that should belong to the team. required: False state: description: - When set to absent all LDAP configuration is removed. required: True default: "present" choices: ["present", "absent"] extends_documentation_fragment: tower ''' EXAMPLES = ''' - name: Remove all LDAP configuration tower_ldap_settings: state: absent tower_host: tower.example.com tower_username: "{{ vault_tower_user }}" tower_password: "{{ vault_tower_pass }}" - name: | Update Ldap org and team map with a dbapp org and admin team. Previously only defined a webapp organization and webapp-admin team tower_ldap_settings: state: present ldap_organization_map: - organization: webapp users: "cn=webapp,ou=groups,dc=example,dc=local" admins: "cn=webadmins,ou=groups,dc=example,dc=local" - organization: dbapp users: "cn=dbapp,ou=groups,dc=example,dc=local" admins: "cn=dbadmins,ou=groups,dc=example,dc=local" ldap_team_map: - team: webapp_admins organization: webapp users: "cn=webadmins,ou=groups,dc=example,dc=local" - team: dbapp_admins organization: dbapp users: "cn=dbadmins, ou=groups,dc=example,dc=local" tower_host: tower.example.com tower_username: "{{ vault_tower_user }}" tower_password: "{{ vault_tower_pass }}" - name: | use multiple user search DNs to find users. Modify existing settings. tower_ldap_settings: state: present ldap_user_search: - "ou=hrdept,ou=users,dc=example,dc=local" - "ou=engdept,ou=users,dc=example,dc=local" tower_host: tower.example.com tower_username: "{{ vault_tower_user }}" tower_password: "{{ vault_tower_pass }}" - name: | set all available tower ldap module settings. NOTE - it is not idempotent because ldap_bind_password is defined tower_ldap_settings: state: present ldap_server_name: adserver.example.local ldap_bind_dn: "cn=towerbinduser,ou=users,dc=example,dc=local" ldap_bind_password: "{{ vault_bind_pass }}" ldap_user_search: - "ou=users,dc=example,dc=local" ldap_group_search: "ou=groups,dc=example,dc=local" ldap_superuser: - cn=toweradmins, ou=groups,dc=example,dc=local ldap_organization_map: - organization: webapp users: "cn=webapp,ou=groups,dc=example,dc=local" admins: "cn=webadmins,ou=groups,dc=example,dc=local" ldap_team_map: - team: webapp_admins organization: webapp users: "cn=webadmins,ou=groups,dc=example,dc=local" tower_host: tower.example.com tower_username: "{{ vault_tower_user }}" tower_password: "{{ vault_tower_pass }}" ''' from ansible.module_utils.ansible_tower import tower_argument_spec, tower_auth_config, tower_check_mode,HAS_TOWER_CLI try: import tower_cli import tower_cli.utils.exceptions as exc import json from tower_cli.conf import settings import re except ImportError: pass class FixBoolValuesEncoder(json.JSONEncoder): def change_bool_to_str(self, obj): if isinstance(obj, bool): return str(obj) elif isinstance(obj, dict): for _key, _value in obj.items(): obj[_key] = self.change_bool_to_str(_value) return obj def iterencode(self, obj, _one_shot=True): obj = self.change_bool_to_str(obj) return json.JSONEncoder.iterencode(self, obj) def empty_module_params(module): module.params = { 'ldap_server_name': '', 'ldap_bind_dn': '', 'ldap_bind_password': '', 'ldap_start_tls': False, 'ldap_user_search': [], 'ldap_group_search': [], 'ldap_organization_map': [], 'ldap_team_map': [], 'ldap_group_type': '', 'ldap_user_attr_map': {}, 'ldap_superuser': '', 'state': 'absent' } def clear_all_ldap_config(module, check_mode=False): empty_module_params(module) modify_ldap_config(module, check_mode) def modify_ldap_config(module, check_mode=False): module.changed_values = [] modified_server_uri = False for _ldap_attr, _value in module.current_settings.items(): if _ldap_attr == 'ldap_server_uri' or _ldap_attr == 'ldap_user_attr_map': continue elif _ldap_attr == 'ldap_bind_password': module.changed_values.append(_ldap_attr) module.tower_settings.modify('AUTH_LDAP_BIND_PASSWORD', module.params.get('ldap_bind_password')) elif _value != module.params.get(_ldap_attr): if _ldap_attr == 'ldap_bind_dn': if not check_mode: module.tower_settings.modify('AUTH_LDAP_BIND_DN', module.params.get(_ldap_attr)) module.changed_values.append(_ldap_attr) elif _ldap_attr == 'ldap_start_tls': if not check_mode: module.tower_settings.modify('AUTH_LDAP_START_TLS', str(module.params.get(_ldap_attr))) elif _ldap_attr == 'ldap_server_name' or _ldap_attr == 'ldap_server_port' or _ldap_attr == 'ldap_server_protocol': if modified_server_uri == False: if not check_mode: module.tower_settings.modify('AUTH_LDAP_SERVER_URI', transform_ldap_server_uri( ldap_server_name=module.params.get('ldap_server_name'), ldap_server_port=module.params.get('ldap_server_port'), ldap_server_protocol=module.params.get('ldap_server_protocol')).get('ldap_server_uri')) modified_server_uri = True module.changed_values = module.changed_values + [ 'ldap_server_name', 'ldap_server_protocol', 'ldap_server_port', 'ldap_server_uri'] del module.current_settings['ldap_server_name'] del module.current_settings['ldap_server_protocol'] del module.current_settings['ldap_server_uri'] del module.current_settings['ldap_server_port'] elif _ldap_attr == 'ldap_superuser': if not check_mode: module.tower_settings.modify('AUTH_LDAP_USER_FLAGS_BY_GROUP', json.dumps(transform_ldap_user_flags_by_group(module.params.get(_ldap_attr)), cls=FixBoolValuesEncoder)) module.changed_values.append(_ldap_attr) elif _ldap_attr == 'ldap_user_search': if not check_mode: module.tower_settings.modify('AUTH_LDAP_USER_SEARCH', json.dumps(transform_ldap_user_search( module.params.get(_ldap_attr), module.params.get('ldap_group_type') ), cls=FixBoolValuesEncoder)) module.changed_values.append(_ldap_attr) else: real_ldap_attr = "AUTH_%s" % _ldap_attr.upper() if not check_mode: _value = globals()["transform_%s" % (_ldap_attr)](module.params.get(_ldap_attr)) if isinstance(_value, str) or isinstance(_value, bool): _value = str(_value) else: _value = json.dumps(_value, cls=FixBoolValuesEncoder) try: module.tower_settings.modify(real_ldap_attr, _value) except: module.fail_json(msg="Real ldap attr %s with value %s" % (real_ldap_attr, _value)) module.changed_values.append(_ldap_attr) def get_ldap_values(module): tower_settings = tower_cli.get_resource('setting') current_settings = { 'ldap_server_protocol': None, 'ldap_server_name': None, 'ldap_server_port': None } current_ldap_server_uri = tower_settings.get('AUTH_LDAP_SERVER_URI').get('value') ldap_server_uri_settings = transform_ldap_server_uri(ldap_server_uri=current_ldap_server_uri) for _key, _value in ldap_server_uri_settings.items(): current_settings[_key] = _value if module.params.get('ldap_bind_password'): current_settings['ldap_bind_password'] = tower_settings.get('AUTH_LDAP_BIND_PASSWORD').get('value') current_settings['ldap_bind_dn'] = tower_settings.get('AUTH_LDAP_BIND_DN').get('value') current_settings['ldap_start_tls'] = tower_settings.get('AUTH_LDAP_START_TLS').get('value') current_settings['ldap_user_attr_map'] = tower_settings.get('AUTH_LDAP_USER_ATTR_MAP').get('value') group_search = tower_settings.get('AUTH_LDAP_GROUP_SEARCH').get('value') current_settings['ldap_group_search'] = transform_ldap_group_search(group_search) group_type_from_tower = tower_settings.get('AUTH_LDAP_GROUP_TYPE').get('value') group_type = transform_ldap_group_type(group_type_from_tower) current_settings['ldap_group_type'] = group_type user_search = tower_settings.get('AUTH_LDAP_USER_SEARCH').get('value') current_settings['ldap_user_search'] = transform_ldap_user_search(user_search, group_type) ldap_user_flags = tower_settings.get('AUTH_LDAP_USER_FLAGS_BY_GROUP').get('value') current_settings['ldap_superuser']= transform_ldap_user_flags_by_group(ldap_user_flags) org_map = tower_settings.get('AUTH_LDAP_ORGANIZATION_MAP').get('value') current_settings['ldap_organization_map'] = transform_ldap_organization_map(org_map) team_map = tower_settings.get('AUTH_LDAP_TEAM_MAP').get('value') current_settings['ldap_team_map'] = transform_ldap_team_map(team_map) current_settings['ldap_user_attr_map'] = tower_settings.get('AUTH_LDAP_USER_ATTR_MAP').get('value') module.tower_settings = tower_settings module.current_settings = current_settings def transform_ldap_user_attr_map(_value=None): attr_map_assignment = { "first_name": "givenName", "last_name": "sn", "email": "mail" } return attr_map_assignment def transform_ldap_server_uri(**kwargs): server_protocol = kwargs.get('ldap_server_protocol') server_name = kwargs.get('ldap_server_name') server_port = kwargs.get('ldap_server_port') server_uri = kwargs.get('ldap_server_uri') if server_uri: uri_match = re.match('(\w+)://(.+):(\d+)', server_uri) if uri_match: server_protocol = uri_match.group(1) server_name = uri_match.group(2) server_port = int(uri_match.group(3)) else: server_uri = '' if server_protocol and server_name and server_port: server_uri = "%s://%s:%s" % (server_protocol, server_name, server_port) result = { 'ldap_server_protocol': server_protocol, 'ldap_server_name': server_name, 'ldap_server_port': server_port, 'ldap_server_uri': server_uri } return result
MIT License
muyeby/amr-dialogue
DialogRG/nn_utils.py
PositionalEncoding.forward
python
def forward(self, x): x = x + self.pe[: x.size(0), :] return self.dropout(x)
r"""Inputs of forward function Args: x: the sequence fed to the positional encoder model (required). Shape: x: [sequence length, batch size, embed dim] output: [sequence length, batch size, embed dim] Examples: >>> output = pos_encoder(x)
https://github.com/muyeby/amr-dialogue/blob/261535c407be6c166016e4759bc81176b1c99957/DialogRG/nn_utils.py#L79-L91
import numpy as np import copy import math import torch import torch.nn as nn from torch.nn import ModuleList import torch.nn.functional as F def has_nan(tensor): return torch.isnan(tensor).any().item() def _get_activation_fn(activation): if activation == "relu": return F.relu elif activation == "gelu": return F.gelu raise RuntimeError("activation should be relu/gelu, not {}".format(activation)) def _get_clones(module, N): return ModuleList([copy.deepcopy(module) for i in range(N)]) def universal_sentence_embedding(sentences, mask, length, sqrt=True): sentence_sum = torch.bmm(sentences.permute(0, 2, 1), mask.unsqueeze(2).float()).squeeze( -1 ) divisor = length.float().unsqueeze(dim=1) assert (divisor == 0.0).any() is False if sqrt: divisor = divisor.sqrt() sentence_sum /= divisor return sentence_sum def clip_and_normalize(word_probs, epsilon): word_probs = torch.clamp(word_probs, epsilon, 1.0 - epsilon) return word_probs / word_probs.sum(dim=-1, keepdim=True) class PositionalEncoding(nn.Module): def __init__(self, d_model, dropout=0.1, max_len=5000): super(PositionalEncoding, self).__init__() self.dropout = nn.Dropout(p=dropout) pe = torch.zeros(max_len, d_model) position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1) div_term = torch.exp(torch.arange(0, d_model, 2).float() * (-math.log(10000.0) / d_model)) pe[:, 0::2] = torch.sin(position * div_term) pe[:, 1::2] = torch.cos(position * div_term) pe = pe.unsqueeze(0).transpose(0, 1) self.register_buffer("pe", pe)
MIT License
sorsnce/red-team
1. Information Gathering/recon-ng/recon/mixins/threads.py
ThreadingMixin._thread_wrapper
python
def _thread_wrapper(self, *args): thread_name = threading.current_thread().name self.debug(f"THREAD => {thread_name} started.") while not self.stopped.is_set(): try: obj = self.q.get_nowait() except Empty: continue try: self.module_thread(obj, *args) except: self.print_exception(f"(thread={thread_name}, object={repr(obj)})") finally: self.q.task_done() self.debug(f"THREAD => {thread_name} exited.")
Wrapper for the worker method defined in the module. Handles calling the actual worker, cleanly exiting upon interrupt, and passing exceptions back to the main process.
https://github.com/sorsnce/red-team/blob/5cd1932ccafcd2c1b92b8642e9a64fa0d2e99324/1. Information Gathering/recon-ng/recon/mixins/threads.py#L7-L27
from queue import Queue, Empty import threading import time class ThreadingMixin(object):
MIT License
ngsutils/ngsutils
ngsutils/bam/count/count.py
_calc_read_regions
python
def _calc_read_regions(read): regions = [] start = read.pos end = read.pos for op, length in read.cigar: if op == 0: end += length elif op == 1: pass elif op == 2: end += length elif op == 3: regions.append((start, end)) end += length start = end regions.append((start, end)) return regions
Find regions of reference the read covers - breaking on long gaps (N)
https://github.com/ngsutils/ngsutils/blob/417e90dc1918fb553dd84990f2c54bd8cea8f44d/ngsutils/bam/count/count.py#L196-L215
import ngsutils.support.stats import sys import tempfile import ngsutils from ngsutils.bam.t import MockBam assert(MockBam) class TmpCountFile(object): def __init__(self): self.tmpfile = tempfile.TemporaryFile() def write(self, count, coding_len, cols): self.tmpfile.write('%s\t%s\t%s\n' % (count, coding_len, '\t'.join([str(x) for x in cols]))) def fetch(self): self.tmpfile.flush() self.tmpfile.seek(0) for line in self.tmpfile: cols = line.strip('\n').split('\t') yield (int(cols[0]), int(cols[1]), cols[2:]) def close(self): self.tmpfile.close() class Model(object): def __init__(self): pass def get_source(self): raise NotImplemented def get_name(self): raise NotImplemented def get_headers(self): raise NotImplemented def get_regions(self): raise NotImplemented def get_postheaders(self): return None def count(self, bam, library_type='FR', coverage=False, uniq_only=False, fpkm=False, norm='', multiple='complete', whitelist=None, blacklist=None, out=sys.stdout, quiet=False, start_only=False): tmpcounts = TmpCountFile() counts_tally = {} total_count = 0.0 if library_type in ['FR', 'RF']: stranded = True else: stranded = False for chrom, starts, ends, strand, cols, callback in self.get_regions(): outcols = cols[:] coding_len = 0 for s, e in zip(starts, ends): coding_len += e - s outcols.append(coding_len) count, reads = _fetch_reads(bam, chrom, strand if stranded else None, starts, ends, multiple, False, whitelist, blacklist, uniq_only, library_type, start_only) outcols.append('') total_count += count if coverage: mean, stdev, median = calc_coverage(bam, chrom, strand if stranded else None, starts, ends, whitelist, blacklist, library_type=library_type) outcols.append(mean) outcols.append(stdev) outcols.append(median) if count > 0: if not count in counts_tally: counts_tally[count] = 1 else: counts_tally[count] += 1 if callback: for callback_cols in callback(bam, count, reads, outcols): tmpcounts.write(count, coding_len, callback_cols) else: tmpcounts.write(count, coding_len, outcols) if not quiet: sys.stderr.write('Calculating normalization...') norm_val = None norm_val_orig = None if norm == 'all': norm_val_orig = _find_mapped_count(bam, whitelist, blacklist, quiet) elif norm == 'mapped': norm_val_orig = total_count elif norm == 'median': norm_val_orig = ngsutils.support.stats.count_median(counts_tally) if norm_val_orig: norm_val = float(norm_val_orig) / 1000000 if not quiet: sys.stderr.write('\n') out.write('## %s\n' % (ngsutils.version())) out.write('## input%s%s\n' % (' ' if bam.filename else '', bam.filename)) out.write('## model %s %s\n' % (self.get_name(), self.get_source())) out.write('## library_type %s\n' % library_type) out.write('## multiple %s\n' % multiple) if start_only: out.write('## start_only\n') if norm_val: out.write('## norm %s %s\n' % (norm, float(norm_val_orig))) out.write('## CPM-factor %s\n' % norm_val) out.write('\t'.join(self.get_headers())) out.write('\tlength\tcount') if norm_val: out.write('\tcount (CPM)') if fpkm: out.write('\tRPKM') if coverage: out.write('\tcoverage mean\tcoverage stdev\tcoverage median') if self.get_postheaders(): out.write('\t') out.write('\t'.join(self.get_postheaders())) out.write('\n') for count, coding_len, outcols in tmpcounts.fetch(): first = True for col in outcols: if not first: out.write('\t') first = False if col == '' or col is None: out.write('%s' % count) if norm_val: out.write('\t') out.write(str(count / norm_val)) if fpkm: out.write('\t') out.write(str(count / (coding_len / 1000.0) / norm_val)) else: out.write(str(col)) out.write('\n') tmpcounts.close()
BSD 3-Clause New or Revised License
sfanous/pyecobee
pyecobee/objects/report_job.py
ReportJob.status
python
def status(self): return self._status
Gets the status attribute of this ReportJob instance. :return: The value of the status attribute of this ReportJob instance. :rtype: six.text_type
https://github.com/sfanous/pyecobee/blob/3d6b4aec3c6bc9b796aa3d3fd6626909ffdbac13/pyecobee/objects/report_job.py#L64-L73
from pyecobee.ecobee_object import EcobeeObject class ReportJob(EcobeeObject): __slots__ = ['_job_id', '_status', '_message', '_files'] attribute_name_map = { 'job_id': 'jobId', 'jobId': 'job_id', 'status': 'status', 'message': 'message', 'files': 'files', } attribute_type_map = { 'job_id': 'six.text_type', 'status': 'six.text_type', 'message': 'six.text_type', 'files': 'List[six.text_type]', } def __init__(self, job_id=None, status=None, message=None, files=None): self._job_id = job_id self._status = status self._message = message self._files = files @property def job_id(self): return self._job_id @property
MIT License
jamescooke/flake8-aaa
tests/command_line/test_do_command_line.py
example_file
python
def example_file(tmpdir): f = tmpdir.join('example_file.py') f.write(""" def test(): do_stuff() def test_other(): do_other_stuff() """) f.name = 'example_file.py' return f
Returns: file: Test file like argparse returns which has a 'name' property. This is deliberately named to not look like a test file - which means that the command line functionality of running files regardless of if they're a test file or not can be tested.
https://github.com/jamescooke/flake8-aaa/blob/bc7970d925e43c1fb558dd22533edaabf283f39e/tests/command_line/test_do_command_line.py#L10-L27
import ast import pytest from flake8_aaa.command_line import do_command_line from flake8_aaa.helpers import find_test_functions, is_test_file @pytest.fixture
MIT License
dmlc/gluon-nlp
src/gluonnlp/models/transformer.py
transformer_base
python
def transformer_base(): cfg = CN() cfg.MODEL = CN() cfg.MODEL.src_vocab_size = -1 cfg.MODEL.tgt_vocab_size = -1 cfg.MODEL.max_src_length = -1 cfg.MODEL.max_tgt_length = -1 cfg.MODEL.scale_embed = True cfg.MODEL.pos_embed_type = "sinusoidal" cfg.MODEL.shared_embed = True cfg.MODEL.tie_weights = True cfg.MODEL.attention_dropout = 0.0 cfg.MODEL.activation_dropout = 0.0 cfg.MODEL.dropout = 0.1 cfg.MODEL.layout = 'NT' cfg.MODEL.dtype = 'float32' cfg.MODEL.ENCODER = CN() cfg.MODEL.ENCODER.num_layers = 6 cfg.MODEL.ENCODER.units = 512 cfg.MODEL.ENCODER.num_heads = 8 cfg.MODEL.ENCODER.hidden_size = 2048 cfg.MODEL.ENCODER.recurrent = False cfg.MODEL.ENCODER.activation = 'relu' cfg.MODEL.ENCODER.pre_norm = False cfg.MODEL.ENCODER.use_qkv_bias = True cfg.MODEL.DECODER = CN() cfg.MODEL.DECODER.num_layers = 6 cfg.MODEL.DECODER.units = 512 cfg.MODEL.DECODER.num_heads = 8 cfg.MODEL.DECODER.hidden_size = 2048 cfg.MODEL.DECODER.recurrent = False cfg.MODEL.DECODER.activation = 'relu' cfg.MODEL.DECODER.pre_norm = False cfg.MODEL.DECODER.use_qkv_bias = False cfg.INITIALIZER = CN() cfg.INITIALIZER.embed = ['xavier', 'gaussian', 'in', 1.0] cfg.INITIALIZER.weight = ['xavier', 'uniform', 'avg', 3.0] cfg.INITIALIZER.bias = ['zeros'] cfg.VERSION = 1 cfg.freeze() return cfg
Configuration of Transformer WMT EN-DE Base
https://github.com/dmlc/gluon-nlp/blob/5d4bc9eba7226ea9f9aabbbd39e3b1e886547e48/src/gluonnlp/models/transformer.py#L27-L74
__all__ = ['transformer_cfg_reg', 'transformer_base', 'transformer_base_prenorm', 'transformer_iwslt_de_en', 'transformer_wmt_en_de_big', 'transformer_wmt_en_de_big_t2t', 'TransformerEncoderLayer', 'TransformerDecoderLayer', 'TransformerEncoder', 'TransformerDecoder', 'TransformerModel', 'TransformerInference', 'TransformerNMTInference'] import numpy as _np import mxnet as mx from mxnet import np, npx from mxnet import use_np from mxnet.gluon import nn, HybridBlock from typing import Optional, Tuple, List from ..utils.registry import Registry from ..attention_cell import MultiHeadAttentionCell, gen_self_attn_mask, gen_mem_attn_mask from ..layers import PositionalEmbedding, PositionwiseFFN, InitializerType from ..utils.config import CfgNode as CN from ..sequence_sampler import BaseStepDecoder transformer_cfg_reg = Registry('transformer_cfg') @transformer_cfg_reg.register()
Apache License 2.0
quantaxis/quantaxis
QUANTAXIS/QASU/save_binance.py
QA_SU_save_binance
python
def QA_SU_save_binance(frequency): if (frequency not in ["1d", "1day", "day"]): return QA_SU_save_binance_min(frequency) else: return QA_SU_save_binance_day(frequency)
Save binance kline "smart"
https://github.com/quantaxis/quantaxis/blob/910cecae70ede6825f5ff58bb1d2186b6fb3dd1d/QUANTAXIS/QASU/save_binance.py#L66-L73
import datetime import time from dateutil.tz import tzutc from dateutil.relativedelta import relativedelta import pandas as pd from QUANTAXIS.QAUtil import ( DATABASE, QASETTING, QA_util_log_info, QA_util_log_expection, QA_util_to_json_from_pandas ) from QUANTAXIS.QAUtil.QADate_Adv import ( QA_util_timestamp_to_str, QA_util_datetime_to_Unix_timestamp, QA_util_print_timestamp ) from QUANTAXIS.QAFetch.QAbinance import ( QA_fetch_binance_symbols, QA_fetch_binance_kline, QA_fetch_binance_kline_min, Binance2QA_FREQUENCY_DICT ) from QUANTAXIS.QAUtil.QAcrypto import ( QA_util_save_raw_symbols, QA_util_find_missing_kline ) from QUANTAXIS.QAFetch.QAQuery import (QA_fetch_cryptocurrency_list) import pymongo BINANCE_MIN_DATE = datetime.datetime(2017, 7, 1, tzinfo=tzutc()) Binance_EXCHANGE = 'BINANCE' Binance_SYMBOL = 'BINANCE.{}'
MIT License
neccam/slt
signjoey/helpers.py
log_data_info
python
def log_data_info( train_data: Dataset, valid_data: Dataset, test_data: Dataset, gls_vocab: GlossVocabulary, txt_vocab: TextVocabulary, logging_function: Callable[[str], None], ): logging_function( "Data set sizes: \n\ttrain {:d},\n\tvalid {:d},\n\ttest {:d}".format( len(train_data), len(valid_data), len(test_data) if test_data is not None else 0, ) ) logging_function( "First training example:\n\t[GLS] {}\n\t[TXT] {}".format( " ".join(vars(train_data[0])["gls"]), " ".join(vars(train_data[0])["txt"]) ) ) logging_function( "First 10 words (gls): {}".format( " ".join("(%d) %s" % (i, t) for i, t in enumerate(gls_vocab.itos[:10])) ) ) logging_function( "First 10 words (txt): {}".format( " ".join("(%d) %s" % (i, t) for i, t in enumerate(txt_vocab.itos[:10])) ) ) logging_function("Number of unique glosses (types): {}".format(len(gls_vocab))) logging_function("Number of unique words (types): {}".format(len(txt_vocab)))
Log statistics of data and vocabulary. :param train_data: :param valid_data: :param test_data: :param gls_vocab: :param txt_vocab: :param logging_function:
https://github.com/neccam/slt/blob/90588825f6229474bc19ac7a6b30ea3116635ba3/signjoey/helpers.py#L118-L162
import copy import glob import os import os.path import errno import shutil import random import logging from sys import platform from logging import Logger from typing import Callable, Optional import numpy as np import torch from torch import nn, Tensor from torchtext.data import Dataset import yaml from signjoey.vocabulary import GlossVocabulary, TextVocabulary def make_model_dir(model_dir: str, overwrite: bool = False) -> str: if os.path.isdir(model_dir): if not overwrite: raise FileExistsError("Model directory exists and overwriting is disabled.") shutil.rmtree(model_dir) os.makedirs(model_dir) return model_dir def make_logger(model_dir: str, log_file: str = "train.log") -> Logger: logger = logging.getLogger(__name__) if not logger.handlers: logger.setLevel(level=logging.DEBUG) fh = logging.FileHandler("{}/{}".format(model_dir, log_file)) fh.setLevel(level=logging.DEBUG) logger.addHandler(fh) formatter = logging.Formatter("%(asctime)s %(message)s") fh.setFormatter(formatter) if platform == "linux": sh = logging.StreamHandler() sh.setLevel(logging.INFO) sh.setFormatter(formatter) logging.getLogger("").addHandler(sh) logger.info("Hello! This is Joey-NMT.") return logger def log_cfg(cfg: dict, logger: Logger, prefix: str = "cfg"): for k, v in cfg.items(): if isinstance(v, dict): p = ".".join([prefix, k]) log_cfg(v, logger, prefix=p) else: p = ".".join([prefix, k]) logger.info("{:34s} : {}".format(p, v)) def clones(module: nn.Module, n: int) -> nn.ModuleList: return nn.ModuleList([copy.deepcopy(module) for _ in range(n)]) def subsequent_mask(size: int) -> Tensor: mask = np.triu(np.ones((1, size, size)), k=1).astype("uint8") return torch.from_numpy(mask) == 0 def set_seed(seed: int): torch.manual_seed(seed) np.random.seed(seed) random.seed(seed)
Apache License 2.0
hpac/elaps
elaps/backends/lsf.py
Backend.__init__
python
def __init__(self, header="#!/bin/bash -l\n#BSUB -o /dev/null\n"): self.jobs = [] self.header = header
Initialize the backend.
https://github.com/hpac/elaps/blob/390bbe8cbeb056ef57adbc91cdf5bcd1f7cbe187/elaps/backends/lsf.py#L13-L16
import subprocess import re class Backend(object): name = "lsf"
BSD 3-Clause New or Revised License
webkom/lego
lego/apps/ical/viewsets.py
ICalViewset.list
python
def list(self, request): token = ICalToken.objects.get_or_create(user=request.user)[0] path = request.get_full_path() data = { "result": { "calendars": [ { "name": "events", "description": "Calendar with all events on Abakus.no.", "path": f"{path}events/", }, { "name": "personal", "description": "Calendar with your favorite events & meetings.", "path": f"{path}personal/", }, { "name": "registration", "description": "Calendar with all event registration times.", "path": f"{path}registrations/", }, ], "token": ICalTokenSerializer(token).data, } } return Response(data=data)
List all the different icals.
https://github.com/webkom/lego/blob/90204aca73fe1f22df4e356e35baf12e943f9fc7/lego/apps/ical/viewsets.py#L55-L81
from datetime import timedelta from django.utils import timezone from rest_framework import decorators, permissions, viewsets from rest_framework.permissions import IsAuthenticated from rest_framework.response import Response from rest_framework.settings import api_settings from lego.apps.events.models import Event from lego.apps.ical import constants, utils from lego.apps.ical.authentication import ICalTokenAuthentication from lego.apps.ical.models import ICalToken from lego.apps.ical.serializers import ICalTokenSerializer from lego.apps.meetings.models import Meeting from lego.apps.permissions.utils import get_permission_handler class ICalTokenViewset(viewsets.ViewSet): permission_classes = (IsAuthenticated,) @decorators.action(detail=False, methods=["PATCH"]) def regenerate(self, request, *args, **kwargs): token, created = ICalToken.objects.get_or_create(user=request.user) if not created: token.regenerate() serializer = ICalTokenSerializer(token) return Response(serializer.data) def list(self, request): token = ICalToken.objects.get_or_create(user=request.user)[0] serializer = ICalTokenSerializer(token) return Response(serializer.data) class ICalViewset(viewsets.ViewSet): permission_classes = (permissions.IsAuthenticated,) authentication_classes = api_settings.DEFAULT_AUTHENTICATION_CLASSES + [ ICalTokenAuthentication ]
MIT License
bluemirrors/cvu
cvu/detector/yolov5/backends/yolov5_tensorflow.py
Yolov5.__init__
python
def __init__(self, weight: str = "yolov5s", device='auto') -> None: self._model = None self._device = None self._loaded = None logging.disable(logging.WARNING) os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3" self._set_device(device) self._load_model(weight)
Initiate Model Args: weight (str, optional): path to SavedModel weight files. Alternatively, it also accepts identifiers (such as yolvo5s, yolov5m, etc.) to load pretrained models. Defaults to "yolov5s". device (str, optional): name of the device to be used. Valid devices can be "cpu", "gpu", "cuda", "auto", "tpu". Defaults to "auto" which tries to use the device best suited for selected backend and the hardware avaibility.
https://github.com/bluemirrors/cvu/blob/2eb10c5844d7cde2a54d2334d9fb8642bccf2b66/cvu/detector/yolov5/backends/yolov5_tensorflow.py#L32-L57
import logging import os from typing import List import numpy as np import tensorflow as tf from tensorflow.keras import mixed_precision from cvu.interface.model import IModel from cvu.utils.general import get_path from cvu.detector.yolov5.backends.common import download_weights from cvu.postprocess.bbox import denormalize from cvu.postprocess.backend_tf.nms.yolov5 import non_max_suppression_tf class Yolov5(IModel):
Apache License 2.0
saevon/webdnd
player/modifier_obj.py
ModField.get
python
def get(self): return self._value['value']
Returns the value of this field
https://github.com/saevon/webdnd/blob/4dd5d30ae105ede51bbd92bf5281a6965b7d55f4/player/modifier_obj.py#L58-L62
from collections import defaultdict from itertools import chain from webdnd.shared.utils.decorators import cascade, dirty_cache class StatVal(dict): def __init__(self, value, stats=None): super(StatVal, self).__init__(stats or {}) self['value'] = value class ModField(object): def __init__(self, value): super(ModField, self).__init__() self._label = '' self._kind = '' obj = self.StatVal(value, {'change_type': 'start'}) self._hist = [obj] self._value = obj def __repr__(self): return '%s' % (unicode(self)) def __unicode__(self): return u'"%s"' % self.get() def StatVal(self, value='', stats=None): defaults = { 'label': self._label, 'kind': self._kind, } defaults.update(stats or {}) return StatVal(value, defaults) @cascade def set(self, value): self._get_dirty = True val = self.StatVal(value) val['change_type'] = 'set' self._value = val self._hist.append(val)
MIT License
ndrplz/computer_vision_utils
io_helper.py
write_image
python
def write_image(img_path, img, channels_first=False, color_mode='BGR', resize_dim=None, to_normalize=False): color = True if img.ndim == 3 else False if color and channels_first: img = img.transpose(1, 2, 0) if color and color_mode == 'RGB': img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) if resize_dim is not None: img = cv2.resize(img, resize_dim[::-1]) if to_normalize: normalize(img) cv2.imwrite(img_path, img)
Writes an image (numpy array) on file Parameters ---------- img_path : string Path where to save image img : ndarray Image that has to be saved channels_first: bool Set this True if shape is (c, h, w) color_mode: "RGB", "BGR", optional Whether the image is in RGB or BGR format resize_dim: tuple, optional Resize size following convention (new_h, new_w) - interpolation is linear to_normalize: bool Whether or not to normalize the image between 0 and 255. Returns ----------
https://github.com/ndrplz/computer_vision_utils/blob/869ca8d5dcd6a95392d67127aa2a43042b33993c/io_helper.py#L49-L85
import cv2 import numpy as np import os.path as path def read_image(img_path, channels_first, color=True, color_mode='BGR', dtype=np.float32, resize_dim=None): if not path.exists(img_path): raise ValueError('Provided path "{}" does NOT exist.'.format(img_path)) image = cv2.imread(img_path, cv2.IMREAD_COLOR if color else cv2.IMREAD_GRAYSCALE) if color and color_mode == 'RGB': image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) if resize_dim is not None: image = cv2.resize(image, dsize=resize_dim[::-1], interpolation=cv2.INTER_LINEAR) if color and channels_first: image = np.transpose(image, (2, 0, 1)) return image.astype(dtype)
MIT License
quay/quay
data/logs_model/document_logs_model.py
_date_range_in_single_index
python
def _date_range_in_single_index(dt1, dt2): assert isinstance(dt1, date) and isinstance(dt2, date) dt = dt2 - dt1 if not isinstance(dt1, datetime) and not isinstance(dt2, datetime): return dt == timedelta(days=1) if dt < timedelta(days=1) and dt >= timedelta(days=0): return dt2.day == dt1.day if dt == timedelta(days=1): return dt1.hour == 0 and dt1.minute == 0 and dt1.second == 0 and dt1.microsecond == 0 return False
Determine whether a single index can be searched given a range of dates or datetimes. If date instances are given, difference should be 1 day. NOTE: dt2 is exclusive to the search result set. i.e. The date range is larger or equal to dt1 and strictly smaller than dt2
https://github.com/quay/quay/blob/f50f37a393fa2273234f8ac0aa9f34a03a77a731/data/logs_model/document_logs_model.py#L72-L95
import json import logging import uuid from time import time from datetime import timedelta, datetime, date from dateutil.parser import parse as parse_datetime from abc import ABCMeta, abstractmethod from six import add_metaclass from elasticsearch.exceptions import ConnectionTimeout, NotFoundError from data import model from data.database import CloseForLongOperation from data.model import config from data.model.log import ( _json_serialize, ACTIONS_ALLOWED_WITHOUT_AUDIT_LOGGING, DataModelException, ) from data.logs_model.elastic_logs import LogEntry, configure_es from data.logs_model.datatypes import Log, AggregatedLogCount, LogEntriesPage from data.logs_model.interface import ( ActionLogsDataInterface, LogRotationContextInterface, LogsIterationTimeout, ) from data.logs_model.shared import SharedModel, epoch_ms from data.logs_model.logs_producer import LogProducerProxy, LogSendException from data.logs_model.logs_producer.kafka_logs_producer import KafkaLogsProducer from data.logs_model.logs_producer.elasticsearch_logs_producer import ElasticsearchLogsProducer from data.logs_model.logs_producer.kinesis_stream_logs_producer import KinesisStreamLogsProducer logger = logging.getLogger(__name__) PAGE_SIZE = 20 DEFAULT_RESULT_WINDOW = 5000 MAX_RESULT_WINDOW = 10000 DATE_RANGE_LIMIT = 32 COUNT_REPOSITORY_ACTION_TIMEOUT = 30 def _date_range_descending(start_datetime, end_datetime, includes_end_datetime=False): assert end_datetime >= start_datetime start_date = start_datetime.date() if includes_end_datetime: current_date = end_datetime.date() else: current_date = (end_datetime - timedelta(seconds=1)).date() while current_date >= start_date: yield current_date current_date = current_date - timedelta(days=1)
Apache License 2.0
2ndwatch/cloudendure-python
cloudendure/cloudendure_api/models/cloud_endure_account_request.py
CloudEndureAccountRequest.email
python
def email(self): return self._email
Gets the email of this CloudEndureAccountRequest. # noqa: E501 :return: The email of this CloudEndureAccountRequest. # noqa: E501 :rtype: str
https://github.com/2ndwatch/cloudendure-python/blob/f81d1be1422b7c19adedb06c584803eaaa811919/cloudendure/cloudendure_api/models/cloud_endure_account_request.py#L124-L131
import pprint import re import six class CloudEndureAccountRequest: """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { "id": "str", "request_type": "str", "email": "str", "request_link": "str", "registration_token": "str", } attribute_map = { "id": "id", "request_type": "request_type", "email": "email", "request_link": "request_link", "registration_token": "registration_token", } def __init__( self, id=None, request_type=None, email=None, request_link=None, registration_token=None, ): self._id = None self._request_type = None self._email = None self._request_link = None self._registration_token = None self.discriminator = None if id is not None: self.id = id if request_type is not None: self.request_type = request_type if email is not None: self.email = email if request_link is not None: self.request_link = request_link if registration_token is not None: self.registration_token = registration_token @property def id(self): return self._id @id.setter def id(self, id): self._id = id @property def request_type(self): return self._request_type @request_type.setter def request_type(self, request_type): allowed_values = ["REGISTER", "RESET_PASSWORD"] if request_type not in allowed_values: raise ValueError( "Invalid value for `request_type` ({0}), must be one of {1}".format( request_type, allowed_values ) ) self._request_type = request_type @property
MIT License
restran/fomalhaut
fomalhaut/tests/api_client.py
HMACHandler.response_headers_to_sign
python
def response_headers_to_sign(self, headers): headers_to_sign = {} for name, value in iteritems(headers): if name.startswith(HEADER_X_PREFIX): headers_to_sign[name] = value return headers_to_sign
Select the headers from the request that need to be included in the StringToSign.
https://github.com/restran/fomalhaut/blob/df6762f3aa64c0c0ca50dd8bfd6f2a70b0bced7b/fomalhaut/tests/api_client.py#L80-L90
from __future__ import unicode_literals, absolute_import import hmac import json as json_util import logging import random import time import traceback from base64 import urlsafe_b64encode from hashlib import sha1 import requests from future.moves.urllib.parse import urlparse, urlunparse, urlencode from future.utils import iteritems from fomalhaut.settings import SIGNATURE_EXPIRE_SECONDS, GATEWAY_ERROR_STATUS_CODE, HEADER_X_SIGNATURE, HEADER_X_TIMESTAMP, HEADER_X_APP_ID, HEADER_X_ENCRYPT_TYPE, HEADER_X_NONCE, HEADER_X_SIGN_RESPONSE, HEADER_X_ACCESS_TOKEN, HEADER_X_ENCRYPTED_HEADERS, HEADER_X_ENCRYPTED_URI, HEADER_X_PREFIX from fomalhaut.utils import utf8, utf8_encoded_dict, text_type, AESCipher, unicode_encoded_dict, to_unicode logger = logging.getLogger(__name__) class RequestObject(object): def __init__(self, method=None, uri=None, headers=None, body=None): self.method = method self.uri = uri self.headers = headers self.body = body class APIClient(object): def __init__(self, access_key, secret_key, api_server, *args, **kwargs): self.access_key = access_key self.secret_key = secret_key self.api_server = api_server self.gateway_error_status_code = kwargs.get( 'gateway_error_status_code', GATEWAY_ERROR_STATUS_CODE) self.signature_expire_seconds = kwargs.get( 'signature_expire_seconds', SIGNATURE_EXPIRE_SECONDS) class HMACHandler(object): def __init__(self, client, algorithm=sha1): self.client = client self.algorithm = algorithm def sign_string(self, string_to_sign): logger.debug(string_to_sign) new_hmac = hmac.new( utf8(self.client.secret_key), utf8(string_to_sign), digestmod=self.algorithm) return to_unicode(urlsafe_b64encode(new_hmac.digest()).rstrip(b'=')) def string_to_sign(self, request): headers_to_sign = self.headers_to_sign(request.headers) canonical_headers = self.canonical_headers(headers_to_sign) string_to_sign = b'\n'.join([utf8(request.method.upper()), utf8(request.uri), utf8(canonical_headers), utf8(request.body)]) return string_to_sign
MIT License
olitheolix/aiokubernetes
aiokubernetes/models/v1_ip_block.py
V1IPBlock.__init__
python
def __init__(self, cidr=None, _except=None): self._cidr = None self.__except = None self.discriminator = None self.cidr = cidr if _except is not None: self._except = _except
V1IPBlock - a model defined in Swagger
https://github.com/olitheolix/aiokubernetes/blob/266718b210dff2a9b2212183261ea89adf89115e/aiokubernetes/models/v1_ip_block.py#L42-L51
import pprint import re class V1IPBlock(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'cidr': 'str', '_except': 'list[str]' } attribute_map = { 'cidr': 'cidr', '_except': 'except' }
Apache License 2.0
hibou57/postiats-utilities
postiats/lexemes.py
get_e
python
def get_e(source): return get_char_of_category(source, d.E)
Try to read `E`?.
https://github.com/hibou57/postiats-utilities/blob/2148016083490ba1aeac04fe0f4a3983cd18c707/postiats/lexemes.py#L133-L135
from . import lexemes_defs as d from .lexemes_defs import (Fin, NonFin, Start) class Input: __slots__ = ["source", "length", "pos"] def __init__(self, source): self.source = source self.length = len(source) self.pos = 0 def char(self, offset=0): i = self.pos + offset if i < self.length: return self.source[i] if i == self.length: return d.EOF raise ValueError def consume(self, count=1): assert self.pos + count <= self.length self.pos += count def string(self, start, end=None): if end is None: end = self.pos return self.source[start:self.pos] def at(self, text): i = self.pos j = i + len(text) return self.source[i:j] == text def file_input(path): source_file = open(path, encoding="iso-8859-15") source_text = source_file.read() source_file.close() return Input(source_text) def get_ident(source): result = None c = source.char() if c in d.IDENTFST: result = c while True: c = source.char(len(result)) if c not in d.IDENTRST: break result += c if result is not None: source.consume(len(result)) return result def get_chars_of_category(source, category, count=-1): result = "" count_down = count c = source.char() while c in category and count_down != 0: count_down -= 1 result += c c = source.char(len(result)) source.consume(len(result)) return result if result else None def get_char_of_category(source, category): result = "" c = source.char() if c in category: result = c source.consume(len(result)) return result if result else None def get_char(source, c): result = "" if source.char() == c: result = c source.consume(len(result)) return result if result else None def get_symbol(source): return get_chars_of_category(source, d.SYMBOLIC) def get_oct(source): return get_chars_of_category(source, d.OCTAL) def get_dec(source): return get_chars_of_category(source, d.DIGIT) def get_hex(source): return get_chars_of_category(source, d.XDIGIT) def get_p(source): return get_char_of_category(source, d.P)
BSD 2-Clause Simplified License
ieeerobotics/bot
bot/driver/mec_driver.py
MecDriver.rough_rotate_90
python
def rough_rotate_90(self, direction, r_speed=50, r_time=1): if direction == "right": r_speed = -r_speed self.rotate(r_speed) sleep(r_time) self.rotate(0)
rotates 90 degrees by blindly turning.
https://github.com/ieeerobotics/bot/blob/9228b00f55ec949f3c39a0020a1e0f61dc64d601/bot/driver/mec_driver.py#L314-L324
from math import sin, cos, pi, fabs, hypot, atan2, degrees from time import sleep import bot.lib.lib as lib import bot.driver.driver as driver from bot.hardware.dmcc_motor import DMCCMotorSet class MecDriver(driver.Driver): min_speed = 0 max_speed = 100 min_angle = -360 max_angle = 360 min_angular_rate = -100 max_angular_rate = 100 def __init__(self, mode='power'): super(MecDriver, self).__init__() motor_config = self.config['dmcc_drive_motors'] self.motors = DMCCMotorSet(motor_config) self.mode = mode def __str__(self): return "fr: {}, fl: {} br: {}, bl: {}".format( self.motors["front_right"], self.motors["front_left"], self.motors["back_right"], self.motors["back_left"]) @lib.api_call def get_motor(self, name): if self.mode == 'power': return self.motors[name].power else: return self.motors[name].velocity @lib.api_call def set_motor(self, name, value): if self.mode == 'power': self.motors[name].power = value else: self.motors[name].velocity = value @property def speed(self): v_forward_right = self.get_motor("front_left") + self.get_motor("back_right") v_forward_left = self.get_motor("front_right") + self.get_motor("back_left") return int(round(hypot(v_forward_right / 2, v_forward_left / 2))) @property def angle(self): v_forward_right = self.get_motor("front_left") + self.get_motor("back_right") v_forward_left = self.get_motor("front_right") + self.get_motor("back_left") return int(round( degrees(atan2(v_forward_right, v_forward_left) - pi / 4))) % 360 @lib.api_call def get_rotation(self): v_left = self.get_motor("front_left") + self.get_motor("back_left") v_right = self.get_motor("front_right") + self.get_motor("back_right") rotation = int(round((v_right - v_left) / 4)) self.logger.debug( "Rotation: (left {}: right: {})".format( rotation, v_left, v_right)) return rotation rotation_rate = property(get_rotation) @lib.api_call def rotate(self, rate): self.logger.debug("Rotating with angular rate: {}".format(rate)) try: assert MecDriver.min_angular_rate <= rate <= MecDriver.max_angular_rate except AssertionError: raise AssertionError("Angular rate is out of bounds") self.set_motor("front_left", -rate) self.set_motor("front_right", rate) self.set_motor("back_left", -rate) self.set_motor("back_right", rate) @lib.api_call def move(self, speed, angle=0): self.logger.debug("speed: {}, angle: {}".format(speed, angle)) try: assert MecDriver.min_speed <= speed <= MecDriver.max_speed except AssertionError: raise AssertionError("Speed is out of bounds") try: assert MecDriver.min_angle <= angle <= MecDriver.max_angle except AssertionError: raise AssertionError("Angle is out of bounds") if speed == 0: self.logger.debug("Special case for speed == 0") self.set_motor("front_left", 0) self.set_motor("front_right", 0) self.set_motor("back_left", 0) self.set_motor("back_right", 0) return front_left = speed * sin(angle * pi / 180 + pi / 4) front_right = speed * cos(angle * pi / 180 + pi / 4) back_left = speed * cos(angle * pi / 180 + pi / 4) back_right = speed * sin(angle * pi / 180 + pi / 4) self.logger.debug(( "pre-scale : front_left: {:6.2f}, front_right: {:6.2f}," " back_left: {:6.2f}, back_right: {:6.2f}").format( front_left, front_right, back_left, back_right)) max_wheel_speed = max( [fabs(front_left), fabs(front_right), fabs(back_left), fabs(back_right)] ) front_left = front_left * speed / max_wheel_speed front_right = front_right * speed / max_wheel_speed back_left = back_left * speed / max_wheel_speed back_right = back_right * speed / max_wheel_speed self.logger.debug( ("post-scale: front_left: {:6.2f}, front_right: {:6.2f}," " back_left: {:6.2f}, back_right: {:6.2f}").format( front_left, front_right, back_left, back_right)) self.set_motor("front_left", front_left) self.set_motor("front_right", front_right) self.set_motor("back_left", back_left) self.set_motor("back_right", back_right) @lib.api_call def hard_stop(self, current_speed, current_angle=0): self.move(100, (current_angle + 180) % 180) sleep(0.1) self.move(0, 0) @lib.api_call def move_forward_strafe(self, forward, strafe): speed = hypot(forward, strafe) if speed < MecDriver.min_speed: speed = MecDriver.min_speed elif speed > MecDriver.max_speed: speed = MecDriver.max_speed angle = degrees(atan2(strafe, forward)) % 360 self.move(speed, angle) @lib.api_call def compound_move(self, translate_speed, translate_angle, angular_rate): total_speed = translate_speed + angular_rate if total_speed > MecDriver.max_speed: self.logger.warn("Total speed of move exceeds max: {}/{}".format( total_speed, MecDriver.max_speed)) self.logger.debug("translate_speed: {}, " + "translate_angle: {}, " + "angular_rate: {}".format(translate_speed, translate_angle, angular_rate)) front_left = translate_speed * sin(translate_angle * pi / 180 + pi / 4) + angular_rate front_right = translate_speed * cos(translate_angle * pi / 180 + pi / 4) - angular_rate back_left = translate_speed * cos(translate_angle * pi / 180 + pi / 4) + angular_rate back_right = translate_speed * sin(translate_angle * pi / 180 + pi / 4) - angular_rate max_wheel_speed = max( [fabs(front_left), fabs(front_right), fabs(back_left), fabs(back_right)] ) total_speed = translate_speed + angular_rate front_left = front_left * total_speed / max_wheel_speed front_right = front_right * total_speed / max_wheel_speed back_left = back_left * total_speed / max_wheel_speed back_right = back_right * total_speed / max_wheel_speed self.logger.debug( ("post-scale: front_left: {:6.2f}, front_right: {:6.2f}," " back_left: {:6.2f}, back_right: {:6.2f}").format( front_left, front_right, back_left, back_right)) self.set_motor("front_left", front_left) self.set_motor("front_right", front_right) self.set_motor("back_left", back_left) self.set_motor("back_right", back_right) @lib.api_call def drive(self, speed=50, angle=0, duration=1): self.move(speed, angle) sleep(duration) self.move(0, 0) @lib.api_call def rotate_t(self, r_speed, r_time=999): self.rotate(r_speed) sleep(r_time) self.move(0, 0) @lib.api_call
BSD 2-Clause Simplified License
contextlab/hypertools
hypertools/_externals/srm.py
SRM._srm
python
def _srm(self, data): samples = data[0].shape[1] subjects = len(data) np.random.seed(self.rand_seed) w, voxels = _init_w_transforms(data, self.features) x, mu, rho2, trace_xtx = self._init_structures(data, subjects) shared_response = np.zeros((self.features, samples)) sigma_s = np.identity(self.features) for iteration in range(self.n_iter): logger.info('Iteration %d' % (iteration + 1)) rho0 = (1 / rho2).sum() (chol_sigma_s, lower_sigma_s) = scipy.linalg.cho_factor( sigma_s, check_finite=False) inv_sigma_s = scipy.linalg.cho_solve( (chol_sigma_s, lower_sigma_s), np.identity(self.features), check_finite=False) sigma_s_rhos = inv_sigma_s + np.identity(self.features) * rho0 (chol_sigma_s_rhos, lower_sigma_s_rhos) = scipy.linalg.cho_factor( sigma_s_rhos, check_finite=False) inv_sigma_s_rhos = scipy.linalg.cho_solve( (chol_sigma_s_rhos, lower_sigma_s_rhos), np.identity(self.features), check_finite=False) wt_invpsi_x = np.zeros((self.features, samples)) trace_xt_invsigma2_x = 0.0 for subject in range(subjects): wt_invpsi_x += (w[subject].T.dot(x[subject])) / rho2[subject] trace_xt_invsigma2_x += trace_xtx[subject] / rho2[subject] log_det_psi = np.sum(np.log(rho2) * voxels) shared_response = sigma_s.dot( np.identity(self.features) - rho0 * inv_sigma_s_rhos).dot( wt_invpsi_x) sigma_s = (inv_sigma_s_rhos + shared_response.dot(shared_response.T) / samples) trace_sigma_s = samples * np.trace(sigma_s) for subject in range(subjects): a_subject = x[subject].dot(shared_response.T) perturbation = np.zeros(a_subject.shape) np.fill_diagonal(perturbation, 0.001) u_subject, s_subject, v_subject = np.linalg.svd( a_subject + perturbation, full_matrices=False) w[subject] = u_subject.dot(v_subject) rho2[subject] = trace_xtx[subject] rho2[subject] += -2 * np.sum(w[subject] * a_subject).sum() rho2[subject] += trace_sigma_s rho2[subject] /= samples * voxels[subject] if logger.isEnabledFor(logging.INFO): loglike = self._likelihood( chol_sigma_s_rhos, log_det_psi, chol_sigma_s, trace_xt_invsigma2_x, inv_sigma_s_rhos, wt_invpsi_x, samples) logger.info('Objective function %f' % loglike) return sigma_s, w, mu, rho2, shared_response
Expectation-Maximization algorithm for fitting the probabilistic SRM. Parameters ---------- data : list of 2D arrays, element i has shape=[voxels_i, samples] Each element in the list contains the fMRI data of one subject. Returns ------- sigma_s : array, shape=[features, features] The covariance :math:`\\Sigma_s` of the shared response Normal distribution. w : list of array, element i has shape=[voxels_i, features] The orthogonal transforms (mappings) :math:`W_i` for each subject. mu : list of array, element i has shape=[voxels_i] The voxel means :math:`\\mu_i` over the samples for each subject. rho2 : array, shape=[subjects] The estimated noise variance :math:`\\rho_i^2` for each subject s : array, shape=[features, samples] The shared response.
https://github.com/contextlab/hypertools/blob/948050a22b345c7dcccf729672c76f49609b1ac8/hypertools/_externals/srm.py#L319-L431
from __future__ import division import logging import numpy as np import scipy from sklearn.base import BaseEstimator, TransformerMixin from sklearn.utils import assert_all_finite from sklearn.utils.validation import NotFittedError __all__ = [ "SRM", "DetSRM" ] logger = logging.getLogger(__name__) def _init_w_transforms(data, features): w = [] subjects = len(data) voxels = np.empty(subjects, dtype=int) for subject in range(subjects): voxels[subject] = data[subject].shape[0] rnd_matrix = np.random.random((voxels[subject], features)) q, r = np.linalg.qr(rnd_matrix) w.append(q) return w, voxels class SRM(BaseEstimator, TransformerMixin): def __init__(self, n_iter=10, features=50, rand_seed=0): self.n_iter = n_iter self.features = features self.rand_seed = rand_seed return def fit(self, X, y=None): logger.info('Starting Probabilistic SRM') if len(X) <= 1: raise ValueError("There are not enough subjects " "({0:d}) to train the model.".format(len(X))) if X[0].shape[1] < self.features: raise ValueError( "There are not enough samples to train the model with " "{0:d} features.".format(self.features)) number_trs = X[0].shape[1] number_subjects = len(X) for subject in range(number_subjects): assert_all_finite(X[subject]) if X[subject].shape[1] != number_trs: raise ValueError("Different number of samples between subjects" ".") self.sigma_s_, self.w_, self.mu_, self.rho2_, self.s_ = self._srm(X) return self def transform(self, X, y=None): if hasattr(self, 'w_') is False: raise NotFittedError("The model fit has not been run yet.") if len(X) != len(self.w_): raise ValueError("The number of subjects does not match the one" " in the model.") s = [None] * len(X) for subject in range(len(X)): s[subject] = self.w_[subject].T.dot(X[subject]) return s def _init_structures(self, data, subjects): x = [] mu = [] rho2 = np.zeros(subjects) trace_xtx = np.zeros(subjects) for subject in range(subjects): mu.append(np.mean(data[subject], 1)) rho2[subject] = 1 trace_xtx[subject] = np.sum(data[subject] ** 2) x.append(data[subject] - mu[subject][:, np.newaxis]) return x, mu, rho2, trace_xtx def _likelihood(self, chol_sigma_s_rhos, log_det_psi, chol_sigma_s, trace_xt_invsigma2_x, inv_sigma_s_rhos, wt_invpsi_x, samples): log_det = (np.log(np.diag(chol_sigma_s_rhos) ** 2).sum() + log_det_psi + np.log(np.diag(chol_sigma_s) ** 2).sum()) loglikehood = -0.5 * samples * log_det - 0.5 * trace_xt_invsigma2_x loglikehood += 0.5 * np.trace( wt_invpsi_x.T.dot(inv_sigma_s_rhos).dot(wt_invpsi_x)) return loglikehood
MIT License
mavensdc/cdflib
cdflib/cdfwrite.py
CDF.write_variableattrs
python
def write_variableattrs(self, variableAttrs): if not (isinstance(variableAttrs, dict)): raise ValueError('Variable attribute(s) not in dictionary form') dataType = None numElems = None with self.path.open('rb+') as f: f.seek(0, 2) for attr, attrs in variableAttrs.items(): if not (isinstance(attr, str)): raise ValueError('Attribute name must be a string') return if (attr in self.gattrs): raise ValueError(f'Variable attribute: {attr}' + ' is already a global variable') return if (attr in self.vattrs): attrNum = self.vattrs.index(attr) offsetA = self.attrsinfo[attrNum][2] else: attrNum, offsetA = self._write_adr(f, False, attr) entries = 0 if (attrs is None): continue if not (isinstance(attrs, dict)): raise ValueError('An attribute''s attribute(s) not in dictionary form') entryNumX = -1 poffset = -1 for entryID, value in attrs.items(): if (isinstance(entryID, str) and (not (entryID in self.zvars) and not (entryID in self.rvars))): raise KeyError(f'{entryID} not found in the CDF') if (isinstance(entryID, numbers.Number) and (len(self.zvars) > 0 and len(self.rvars) > 0)): raise ValueError(f'{entryID} can not be used as the CDF has both zVariables and rVariables') if (isinstance(entryID, str)): try: entryNum = self.zvars.index(entryID) zVar = True except Exception: try: entryNum = self.rvars.index(entryID) zVar = False except Exception: raise KeyError(f'{entryID} not found') else: entryNum = int(entryID) if (len(self.zvars) > 0 and len(self.rvars) > 0): raise ValueError('Can not use integer form for variable id as there ', 'are both zVariables and rVaribales') if (len(self.zvars) > 0): if (entryNum >= len(self.zvars)): raise ValueError('Variable id: ', entryID, ' not found') else: zVar = True else: if (entryNum >= len(self.rvars)): raise ValueError('Variable id: ', entryID, ' not found') else: zVar = False if (entryNum > entryNumX): entryNumX = entryNum if (isinstance(value, list) or isinstance(value, tuple)): if (len(value) == 2): value2 = value[1] dataType = self._datatype_token(value2) if (dataType > 0): data = value[0] if (dataType == self.CDF_CHAR or dataType == self.CDF_UCHAR): if (isinstance(data, list) or isinstance(data, tuple)): raise ValueError('Invalid variable attribute value') numElems = len(data) elif (dataType == self.CDF_EPOCH or dataType == self.CDF_EPOCH16 or dataType == self.CDF_TIME_TT2000): cvalue = [] if (isinstance(data, list) or isinstance(data, tuple)): numElems = len(data) for x in range(0, numElems): if (isinstance(data[x], str)): avalue = cdfepoch.CDFepoch.parse(data[x]) else: avalue = data[x] if (dataType == self.CDF_EPOCH16): cvalue.append(avalue.real) cvalue.append(avalue.imag) else: cvalue.append(avalue) data = cvalue else: if (isinstance(data, str)): data = cdfepoch.CDFepoch.parse(data) numElems = 1 else: if (isinstance(data, list) or isinstance(data, tuple)): numElems = len(data) else: numElems = 1 else: data = value numElems, dataType = self._datatype_define(value[0]) numElems = len(value) else: data = value numElems, dataType = self._datatype_define(value[0]) numElems = len(value) else: data = value numElems, dataType = self._datatype_define(value) if (numElems is None): warnings.warn('Unknown data') return offset = self._write_aedr(f, False, attrNum, entryNum, data, dataType, numElems, zVar) if (entries == 0): if zVar: self._update_offset_value(f, offsetA+48, 8, offset) else: self._update_offset_value(f, offsetA+20, 8, offset) else: self._update_offset_value(f, poffset+12, 8, offset) poffset = offset entries = entries + 1 if zVar: self._update_offset_value(f, offsetA+56, 4, entries) self._update_offset_value(f, offsetA+60, 4, entryNumX) else: self._update_offset_value(f, offsetA+36, 4, entries) self._update_offset_value(f, offsetA+40, 4, entryNumX)
Writes a variable's attributes, provided the variable already exists. Parameters ---------- variableAttrs : dict Variable attribute name and its entry value pair(s). The entry value is also a dictionary of variable id and value pair(s). Variable id can be the variable name or its id number in the file. Use write_var function if the variable does not exist. For example:: variableAttrs={} entries_1={} entries_1['var_name_1'] = 'abcd' entries_1['var_name_2'] = [12, 'cdf_int4'] .... variableAttrs['attr_name_1']=entries_1 entries_2={} entries_2['var_name_1'] = 'xyz' entries_2['var_name_2'] = [[12, 34], 'cdf_int4'] .... variableAttrs['attr_name_2']=entries_2 .... .... f.write_variableattrs(variableAttrs)
https://github.com/mavensdc/cdflib/blob/e0b57ed32ab74197d2c9aa4ff948bb92593da5fd/cdflib/cdfwrite.py#L456-L622
from typing import Tuple import logging import numpy as np import sys import struct import gzip import hashlib import platform as pf import binascii import cdflib.epochs as cdfepoch import numbers import math import pathlib import warnings def is_open(func): def ensure_open(self, *args, **kwargs): if self.is_closed: raise OSError("This file is already closed, and can no longer be modified.") else: return func(self, *args, **kwargs) return ensure_open class CDF: version = 3 release = 7 increment = 0 CDF_VAR_NAME_LEN256 = 256 CDF_ATTR_NAME_LEN256 = 256 CDF_COPYRIGHT_LEN = 256 CDF_PATHNAME_LEN = 512 CDF_INT1 = 1 CDF_INT2 = 2 CDF_INT4 = 4 CDF_INT8 = 8 CDF_UINT1 = 11 CDF_UINT2 = 12 CDF_UINT4 = 14 CDF_REAL4 = 21 CDF_REAL8 = 22 CDF_EPOCH = 31 CDF_EPOCH16 = 32 CDF_TIME_TT2000 = 33 CDF_BYTE = 41 CDF_FLOAT = 44 CDF_DOUBLE = 45 CDF_CHAR = 51 CDF_UCHAR = 52 NETWORK_ENCODING = 1 SUN_ENCODING = 2 VAX_ENCODING = 3 DECSTATION_ENCODING = 4 SGi_ENCODING = 5 IBMPC_ENCODING = 6 IBMRS_ENCODING = 7 HOST_ENCODING = 8 PPC_ENCODING = 9 HP_ENCODING = 11 NeXT_ENCODING = 12 ALPHAOSF1_ENCODING = 13 ALPHAVMSd_ENCODING = 14 ALPHAVMSg_ENCODING = 15 ALPHAVMSi_ENCODING = 16 ARM_LITTLE_ENCODING = 17 ARM_BIG_ENCODING = 18 VARY = -1 NOVARY = 0 ROW_MAJOR = 1 COLUMN_MAJOR = 2 NO_CHECKSUM = 0 MD5_CHECKSUM = 1 OTHER_CHECKSUM = 2 GLOBAL_SCOPE = 1 VARIABLE_SCOPE = 2 GZIP_COMPRESSION = 5 NO_SPARSERECORDS = 0 PAD_SPARSERECORDS = 1 PREV_SPARSERECORDS = 2 V3magicNUMBER_1 = 'cdf30001' V3magicNUMBER_2 = '0000ffff' V3magicNUMBER_2c = 'cccc0001' CDR_ = 1 GDR_ = 2 rVDR_ = 3 ADR_ = 4 AgrEDR_ = 5 VXR_ = 6 VVR_ = 7 zVDR_ = 8 AzEDR_ = 9 CCR_ = 10 CPR_ = 11 SPR_ = 12 CVVR_ = 13 NUM_VXR_ENTRIES = 7 NUM_VXRlvl_ENTRIES = 3 UUIR_BASE_SIZE64 = 12 UIR_BASE_SIZE64 = 28 CDR_BASE_SIZE64 = 56 GDR_BASE_SIZE64 = 84 zVDR_BASE_SIZE64 = 88 + CDF_VAR_NAME_LEN256 rVDR_BASE_SIZE64 = 84 + CDF_VAR_NAME_LEN256 VXR_BASE_SIZE64 = 28 VVR_BASE_SIZE64 = 12 ADR_BASE_SIZE64 = 68 + CDF_ATTR_NAME_LEN256 AEDR_BASE_SIZE64 = 56 CCR_BASE_SIZE64 = 32 CPR_BASE_SIZE64 = 24 SPR_BASE_SIZE64 = 24 CVVR_BASE_SIZE64 = 24 BLOCKING_BYTES = 65536 level = 0 def __init__(self, path, cdf_spec=None, delete=False): path = pathlib.Path(path).expanduser() major = 1 if cdf_spec is not None: if (isinstance(major, str)): major = self._majority_token(major) encoding = cdf_spec.get('Encoding', 8) if (isinstance(encoding, str)): encoding = self._encoding_token(encoding) checksum = cdf_spec.get('Checksum', False) cdf_compression = cdf_spec.get('Compressed', 0) if (isinstance(cdf_compression, int)): if not 0 <= cdf_compression <= 9: cdf_compression = 0 else: cdf_compression = 6 if cdf_compression else 0 rdim_sizes = cdf_spec.get('rDim_sizes', None) num_rdim = len(rdim_sizes) if rdim_sizes is not None else 0 else: encoding = 8 checksum = False cdf_compression = 0 num_rdim = 0 rdim_sizes = None if (major < 1 or major > 2): raise OSError('Bad major.') osSystem = pf.system() osMachine = pf.uname()[5] if (encoding == 8): if osSystem != 'SunOS' or osMachine != 'sparc': self._encoding = self.IBMPC_ENCODING else: self._encoding = self.SUN_ENCODING else: self._encoding = encoding if (self._encoding == -1): raise OSError('Bad encoding.') if not isinstance(checksum, bool): raise ValueError('Bad checksum.') if path.suffix != '.cdf': path = path.with_suffix('.cdf') if len(str(path)) > self.CDF_PATHNAME_LEN: raise OSError('CDF:', path, ' longer than allowed length.') if path.is_file(): if not delete: raise OSError('file: ', path, ' already exists....\n', 'Delete it or specify the \'delete=False\' option.') else: path.unlink() self.path = path self.compressed_file = path.with_suffix('.tmp') if cdf_compression > 0 else None self.zvarsinfo = {} self.rvarsinfo = {} self.attrsinfo = {} self.gattrs = [] self.vattrs = [] self.attrs = [] self.zvars = [] self.rvars = [] self.checksum = checksum self.compression = cdf_compression self.num_rdim = num_rdim self.rdim_sizes = rdim_sizes self.majority = major with path.open('wb') as f: f.write(binascii.unhexlify(self.V3magicNUMBER_1)) f.write(binascii.unhexlify(self.V3magicNUMBER_2)) self.cdr_head = self._write_cdr(f, major, self._encoding, checksum) self.gdr_head = self._write_gdr(f) self.offset = f.tell() self.is_closed = False def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.close() return def close(self): if self.is_closed: return if self.compressed_file is None: with self.path.open('rb+') as f: f.seek(0, 2) eof = f.tell() self._update_offset_value(f, self.gdr_head+36, 8, eof) if self.checksum: f.write(self._md5_compute(f)) self.is_closed = True return with self.path.open('rb+') as f: f.seek(0, 2) eof = f.tell() self._update_offset_value(f, self.gdr_head+36, 8, eof) with self.compressed_file.open('wb+') as g: g.write(bytearray.fromhex(self.V3magicNUMBER_1)) g.write(bytearray.fromhex(self.V3magicNUMBER_2c)) self._write_ccr(f, g, self.compression) if self.checksum: g.seek(0, 2) g.write(self._md5_compute(g)) self.path.unlink() self.compressed_file.rename(self.path) self.is_closed = True @is_open def write_globalattrs(self, globalAttrs): if not (isinstance(globalAttrs, dict)): raise ValueError('Global attribute(s) not in dictionary form') dataType = None numElems = None with self.path.open('rb+') as f: f.seek(0, 2) for attr, entry in globalAttrs.items(): if (attr in self.gattrs): raise ValueError(f'Global attribute: {attr} already exists.') if (attr in self.vattrs): logging.warning(f'Attribute: {attr} already defined as a variable attribute.') continue attrNum, offsetADR = self._write_adr(f, True, attr) entries = 0 if entry is None: continue entryNumMaX = -1 poffset = -1 for entryNum, value in entry.items(): if (entryNumMaX < entryNum): entryNumMaX = entryNum if (isinstance(value, list) or isinstance(value, tuple)): if (len(value) == 2): value2 = value[1] dataType = self._datatype_token(value2) if (dataType > 0): data = value[0] if (dataType == self.CDF_CHAR or dataType == self.CDF_UCHAR): if (isinstance(data, list) or isinstance(data, tuple)): warnings.warn('Invalid global attribute value') return numElems = len(data) elif (dataType == self.CDF_EPOCH or dataType == self.CDF_EPOCH16 or dataType == self.CDF_TIME_TT2000): cvalue = [] if (isinstance(data, list) or isinstance(data, tuple)): numElems = len(data) for x in range(0, numElems): if (isinstance(data[x], str)): cvalue.append(cdfepoch.CDFepoch.parse(data[x])) else: cvalue.append(data[x]) data = cvalue else: if (isinstance(data, str)): data = cdfepoch.CDFepoch.parse(data) numElems = 1 else: if (isinstance(data, list) or isinstance(data, tuple)): numElems = len(data) else: numElems = 1 else: data = value numElems, dataType = self._datatype_define(value[0]) numElems = len(value) else: data = value numElems, dataType = self._datatype_define(value[0]) numElems = len(value) else: data = value numElems, dataType = self._datatype_define(value) if (numElems is None): warnings.warn('Unknown data') return offset = self._write_aedr(f, True, attrNum, entryNum, data, dataType, numElems, None) if (entries == 0): self._update_offset_value(f, offsetADR+20, 8, offset) else: self._update_offset_value(f, poffset+12, 8, offset) poffset = offset entries = entries + 1 self._update_offset_value(f, offsetADR+36, 4, entries) self._update_offset_value(f, offsetADR+40, 4, entryNumMaX) @is_open
MIT License
fusionauth/fusionauth-python-client
src/main/python/fusionauth/fusionauth_client.py
FusionAuthClient.action_user
python
def action_user(self, request): return self.start().uri('/api/user/action') .body_handler(JSONBodyHandler(request)) .post() .go()
Takes an action on a user. The user being actioned is called the "actionee" and the user taking the action is called the "actioner". Both user ids are required in the request object. Attributes: request: The action request that includes all of the information about the action being taken including the id of the action, any options and the duration (if applicable).
https://github.com/fusionauth/fusionauth-python-client/blob/20bf313710eb0af6bfb9c07b7864b52fe5853eb0/src/main/python/fusionauth/fusionauth_client.py#L39-L51
from deprecated import deprecated from fusionauth.rest_client import RESTClient, JSONBodyHandler, FormDataBodyHandler class FusionAuthClient: def __init__(self, api_key, base_url): self.api_key = api_key self.base_url = base_url self.tenant_id = None def set_tenant_id(self, tenant_id): self.tenant_id = tenant_id
Apache License 2.0
sberbank-ai-lab/lightautoml
lightautoml/reader/base.py
Reader.fit_read
python
def fit_read( self, train_data: Any, features_names: Optional[List[str]] = None, roles: UserRolesDefinition = None, **kwargs: Any ): raise NotImplementedError
Abstract function to get dataset with initial feature selection.
https://github.com/sberbank-ai-lab/lightautoml/blob/51a4e2bd0ebffbe0817fb50434280f8e7c40fa4c/lightautoml/reader/base.py#L100-L108
import logging from copy import deepcopy from typing import Any from typing import Dict from typing import List from typing import Optional from typing import Sequence from typing import TypeVar from typing import Union from typing import cast import numpy as np import pandas as pd from pandas import DataFrame from pandas import Series from ..dataset.base import array_attr_roles from ..dataset.base import valid_array_attributes from ..dataset.np_pd_dataset import PandasDataset from ..dataset.roles import CategoryRole from ..dataset.roles import ColumnRole from ..dataset.roles import DatetimeRole from ..dataset.roles import DropRole from ..dataset.roles import NumericRole from ..dataset.utils import roles_parser from ..tasks import Task from .guess_roles import calc_category_rules from .guess_roles import calc_encoding_rules from .guess_roles import get_category_roles_stat from .guess_roles import get_null_scores from .guess_roles import get_numeric_roles_stat from .guess_roles import rule_based_cat_handler_guess from .guess_roles import rule_based_roles_guess from .utils import set_sklearn_folds logger = logging.getLogger(__name__) RoleType = TypeVar("RoleType", bound=ColumnRole) RolesDict = Dict[str, RoleType] UserDefinedRole = Optional[Union[str, RoleType]] UserDefinedRolesDict = Dict[UserDefinedRole, Sequence[str]] UserDefinedRolesSequence = Sequence[UserDefinedRole] UserRolesDefinition = Optional[Union[UserDefinedRole, UserDefinedRolesDict, UserDefinedRolesSequence]] class Reader: def __init__(self, task: Task, *args: Any, **kwargs: Any): self.task = task self._roles = {} self._dropped_features = [] self._used_array_attrs = {} self._used_features = [] @property def roles(self) -> RolesDict: return self._roles @property def dropped_features(self) -> List[str]: return self._dropped_features @property def used_features(self) -> List[str]: return self._used_features @property def used_array_attrs(self) -> Dict[str, str]: return self._used_array_attrs
Apache License 2.0
tcalmant/ipopo
pelix/utilities.py
EventData.data
python
def data(self): return self.__data
Returns the associated value
https://github.com/tcalmant/ipopo/blob/1d4b81207e67890dfccc8f562336c7104f194c17/pelix/utilities.py#L596-L601
import collections import contextlib import functools import inspect import logging import sys import threading import traceback try: from typing import Any, Optional, Union except ImportError: pass import pelix.constants __version_info__ = (1, 0, 1) __version__ = ".".join(str(x) for x in __version_info__) __docformat__ = "restructuredtext en" PYTHON_3 = sys.version_info[0] == 3 @contextlib.contextmanager def use_service(bundle_context, svc_reference): if svc_reference is None: raise TypeError("Invalid ServiceReference") try: yield bundle_context.get_service(svc_reference) finally: try: bundle_context.unget_service(svc_reference) except pelix.constants.BundleException: pass ArgSpec = collections.namedtuple("ArgSpec", "args varargs keywords defaults") if hasattr(inspect, "signature"): def get_method_arguments(method): signature = inspect.signature(method) args = [] varargs = None keywords = None defaults = [] for param in signature.parameters.values(): kind = param.kind if kind == inspect.Parameter.VAR_POSITIONAL: varargs = param.name elif kind == inspect.Parameter.VAR_KEYWORD: keywords = param.name else: args.append(param.name) if param.default is not param.empty: defaults.append(param.default) return ArgSpec(args, varargs, keywords, defaults or None) else: import types def get_method_arguments(method): arg_spec = inspect.getargspec(method) if not isinstance(method, types.FunctionType): args = arg_spec.args[1:] else: args = arg_spec.args return ArgSpec( args, arg_spec.varargs, arg_spec.keywords, arg_spec.defaults ) class Deprecated(object): def __init__(self, message=None, logger=None): self.__message = message or "Deprecated method" self.__logger = logger or None self.__already_logged = False def __log(self, method_name): if not self.__already_logged: stack = "\n\t".join(traceback.format_stack()) logging.getLogger(self.__logger).warning( "%s: %s\n%s", method_name, self.__message, stack ) self.__already_logged = True def __call__(self, method): @functools.wraps(method) def wrapped(*args, **kwargs): self.__log(method.__name__) return method(*args, **kwargs) return wrapped class Synchronized(object): def __init__(self, lock=None): if not is_lock(lock): self.__lock = threading.RLock() else: self.__lock = lock def __call__(self, method): @functools.wraps(method) def wrapped(*args, **kwargs): with self.__lock: return method(*args, **kwargs) return wrapped def SynchronizedClassMethod(*locks_attr_names, **kwargs): locks_attr_names = [ lock_name for lock_name in locks_attr_names if lock_name ] if not locks_attr_names: raise ValueError("The lock names list can't be empty") if "sorted" not in kwargs or kwargs["sorted"]: locks_attr_names = list(locks_attr_names) locks_attr_names.sort() def wrapped(method): @functools.wraps(method) def synchronized(self, *args, **kwargs): locks = [getattr(self, attr_name) for attr_name in locks_attr_names] locked = collections.deque() i = 0 try: for lock in locks: if lock is None: raise AttributeError( "Lock '{0}' can't be None in class {1}".format( locks_attr_names[i], type(self).__name__ ) ) i += 1 lock.acquire() locked.appendleft(lock) return method(self, *args, **kwargs) finally: for lock in locked: lock.release() locked.clear() del locks[:] return synchronized return wrapped def is_lock(lock): if lock is None: return False for attr in "acquire", "release", "__enter__", "__exit__": if not hasattr(lock, attr): return False return True def read_only_property(value): return property(lambda cls: value) def remove_all_occurrences(sequence, item): if sequence is None: return while item in sequence: sequence.remove(item) def remove_duplicates(items): if items is None: return items new_list = [] for item in items: if item not in new_list: new_list.append(item) return new_list def add_listener(registry, listener): if listener is None or listener in registry: return False registry.append(listener) return True def remove_listener(registry, listener): if listener is not None and listener in registry: registry.remove(listener) return True return False if PYTHON_3: def is_bytes(string): return isinstance(string, bytes) def is_string(string): return isinstance(string, str) def to_bytes(data, encoding="UTF-8"): if isinstance(data, bytes): return data return data.encode(encoding) def to_str(data, encoding="UTF-8"): if isinstance(data, str): return data return str(data, encoding) to_unicode = to_str else: def is_bytes(string): return isinstance(string, str) def is_string(string): return isinstance(string, (str, unicode)) def to_str(data, encoding="UTF-8"): if type(data) is str: return data return data.encode(encoding) to_bytes = to_str def to_unicode(data, encoding="UTF-8"): if type(data) is unicode: return data return data.decode(encoding) def to_iterable(value, allow_none=True): if value is None: if allow_none: return None return [] elif isinstance(value, (list, tuple, set, frozenset)): return value return [value] class EventData(object): __slots__ = ("__event", "__data", "__exception") def __init__(self): self.__event = threading.Event() self.__data = None self.__exception = None @property
Apache License 2.0
readthedocs/readthedocs.org
readthedocs/oauth/migrations/0006_move_oauth_source.py
forwards_move_org_source
python
def forwards_move_org_source(apps, schema_editor): RemoteOrganization = apps.get_model('oauth', 'RemoteOrganization') SocialAccount = apps.get_model('socialaccount', 'SocialAccount') for account in SocialAccount.objects.all(): rows = (RemoteOrganization.objects .filter(users=account.user, source=account.provider) .update(account=account))
Use source field to set organization account.
https://github.com/readthedocs/readthedocs.org/blob/2cff8376f0ef8f25ae6d8763bdbec86f47e33ab9/readthedocs/oauth/migrations/0006_move_oauth_source.py#L23-L30
from django.db import migrations def forwards_move_repo_source(apps, schema_editor): RemoteRepository = apps.get_model('oauth', 'RemoteRepository') SocialAccount = apps.get_model('socialaccount', 'SocialAccount') for account in SocialAccount.objects.all(): rows = (RemoteRepository.objects .filter(users=account.user, source=account.provider) .update(account=account)) def backwards_move_repo_source(apps, schema_editor): apps.get_model('oauth', 'RemoteRepository') SocialAccount = apps.get_model('socialaccount', 'SocialAccount') for account in SocialAccount.objects.all(): rows = (account.remote_repositories .update(account=None, source=account.provider))
MIT License
dnandha/mopac
softlearning/scripts/console_scripts.py
launch_example_ec2_cmd
python
def launch_example_ec2_cmd(*args, **kwargs): return launch_example_ec2(*args, **kwargs)
Forwards call to `launch_example_cluster` after adding ec2 defaults. This optionally sets the ray autoscaler configuration file to the default ec2 configuration file, and then calls `launch_example_cluster` to execute the original command on autoscaled ec2 cluster by parsing the args. See `launch_example_cluster` for further details.
https://github.com/dnandha/mopac/blob/058128183d16b7f8dcdaf2758a38b10f348566aa/softlearning/scripts/console_scripts.py#L174-L183
from __future__ import absolute_import from __future__ import division from __future__ import print_function import logging import click from mopac.examples.instrument import ( run_example_dry, run_example_local, run_example_debug, run_example_cluster, launch_example_cluster, launch_example_gce, launch_example_ec2) logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) def add_options(options): def decorator(f): for option in options[::-1]: click.decorators._param_memo(f, option) return f return decorator @click.group() def cli(): pass @cli.command( name='run_example_dry', context_settings={'ignore_unknown_options': True}) @click.argument("example_module_name", required=True, type=str) @click.argument('example_argv', nargs=-1, type=click.UNPROCESSED) def run_example_dry_cmd(example_module_name, example_argv): return run_example_dry(example_module_name, example_argv) @cli.command( name='run_local', context_settings={'ignore_unknown_options': True}) @click.argument("example_module_name", required=True, type=str) @click.argument('example_argv', nargs=-1, type=click.UNPROCESSED) def run_example_local_cmd(example_module_name, example_argv): return run_example_local(example_module_name, example_argv) @cli.command( name='run_example_debug', context_settings={'ignore_unknown_options': True}) @click.argument("example_module_name", required=True, type=str) @click.argument('example_argv', nargs=-1, type=click.UNPROCESSED) def run_example_debug_cmd(example_module_name, example_argv): return run_example_debug(example_module_name, example_argv) @cli.command( name='run_example_cluster', context_settings={'ignore_unknown_options': True}) @click.argument("example_module_name", required=True, type=str) @click.argument('example_argv', nargs=-1, type=click.UNPROCESSED) def run_example_cluster_cmd(example_module_name, example_argv): run_example_cluster(example_module_name, example_argv) @cli.command( name='launch_example_cluster', context_settings={ 'allow_extra_args': True, 'ignore_unknown_options': True }) @click.argument("example_module_name", required=True, type=str) @click.argument('example_argv', nargs=-1, type=click.UNPROCESSED) @click.option( "--config_file", required=False, type=str) @click.option( "--stop/--no-stop", is_flag=True, default=True, help="Stop the cluster after the command finishes running.") @click.option( "--start/--no-start", is_flag=True, default=True, help="Start the cluster if needed.") @click.option( "--screen/--no-screen", is_flag=True, default=False, help="Run the command in a screen.") @click.option( "--tmux/--no-tmux", is_flag=True, default=True, help="Run the command in tmux.") @click.option( "--override-cluster-name", required=False, type=str, help="Override the configured cluster name.") @click.option( "--port-forward", required=False, type=int, help="Port to forward.") def launch_example_cluster_cmd(*args, **kwargs): return launch_example_cluster(*args, **kwargs) @cli.command( name='launch_example_gce', context_settings={ 'allow_extra_args': True, 'ignore_unknown_options': True }) @add_options(launch_example_cluster_cmd.params) def launch_example_gce_cmd(*args, **kwargs): return launch_example_gce(*args, **kwargs) @cli.command( name='launch_example_ec2', context_settings={ 'allow_extra_args': True, 'ignore_unknown_options': True }) @add_options(launch_example_cluster_cmd.params)
MIT License
coarse-graining/cgnet
cgnet/feature/utils.py
ShiftedSoftplus.forward
python
def forward(self, input_tensor): return nn.functional.softplus(input_tensor) - np.log(2.0)
Applies the shifted softplus function element-wise Parameters ---------- input_tensor: torch.Tensor Input tensor of size (n_examples, *) where `*` means, any number of additional dimensions. Returns ------- Output: torch.Tensor Same size (n_examples, *) as the input.
https://github.com/coarse-graining/cgnet/blob/ce7dadb1f8e66771032275ef87b8193ad234d495/cgnet/feature/utils.py#L39-L53
import numpy as np import torch import torch.nn as nn class ShiftedSoftplus(nn.Module): def __init__(self): super(ShiftedSoftplus, self).__init__()
BSD 3-Clause New or Revised License
labd/commercetools-python-sdk
src/commercetools/services/reviews.py
ReviewService.create
python
def create(self, draft: ReviewDraft, *, expand: OptionalListStr = None) -> Review: params = self._serialize_params({"expand": expand}, traits.ExpandableSchema) return self._client._post( endpoint="reviews", params=params, data_object=draft, response_class=Review )
Reviews are used to evaluate products and channels.
https://github.com/labd/commercetools-python-sdk/blob/d8ec285f08d56ede2e4cad45c74833f5b609ab5c/src/commercetools/services/reviews.py#L79-L84
import typing from commercetools.helpers import RemoveEmptyValuesMixin from commercetools.platform.models.review import ( Review, ReviewDraft, ReviewPagedQueryResponse, ReviewUpdate, ReviewUpdateAction, ) from commercetools.typing import OptionalListStr from . import abstract, traits class _ReviewQuerySchema( traits.ExpandableSchema, traits.SortableSchema, traits.PagingSchema, traits.QuerySchema, ): pass class _ReviewUpdateSchema(traits.ExpandableSchema, traits.VersionedSchema): pass class _ReviewDeleteSchema( traits.VersionedSchema, traits.ExpandableSchema, traits.DataErasureSchema ): pass class ReviewService(abstract.AbstractService): def get_by_id(self, id: str, *, expand: OptionalListStr = None) -> Review: params = self._serialize_params({"expand": expand}, traits.ExpandableSchema) return self._client._get( endpoint=f"reviews/{id}", params=params, response_class=Review ) def get_by_key(self, key: str, *, expand: OptionalListStr = None) -> Review: params = self._serialize_params({"expand": expand}, traits.ExpandableSchema) return self._client._get( endpoint=f"reviews/key={key}", params=params, response_class=Review ) def query( self, *, expand: OptionalListStr = None, sort: OptionalListStr = None, limit: int = None, offset: int = None, with_total: bool = None, where: OptionalListStr = None, predicate_var: typing.Dict[str, str] = None, ) -> ReviewPagedQueryResponse: params = self._serialize_params( { "expand": expand, "sort": sort, "limit": limit, "offset": offset, "with_total": with_total, "where": where, "predicate_var": predicate_var, }, _ReviewQuerySchema, ) return self._client._get( endpoint="reviews", params=params, response_class=ReviewPagedQueryResponse )
MIT License
nrel/rdtools
rdtools/analysis_chains.py
TrendAnalysis.set_clearsky
python
def set_clearsky(self, pvlib_location=None, pv_azimuth=None, pv_tilt=None, poa_global_clearsky=None, temperature_cell_clearsky=None, temperature_ambient_clearsky=None, albedo=0.25, solar_position_method='nrel_numpy'): max_timedelta = self.max_timedelta if poa_global_clearsky is not None: poa_global_clearsky = normalization.interpolate( poa_global_clearsky, self.pv_energy.index, max_timedelta) if temperature_cell_clearsky is not None: temperature_cell_clearsky = normalization.interpolate( temperature_cell_clearsky, self.pv_energy.index, max_timedelta) if temperature_ambient_clearsky is not None: temperature_ambient_clearsky = normalization.interpolate( temperature_ambient_clearsky, self.pv_energy.index, max_timedelta) if isinstance(pv_azimuth, (pd.Series, pd.DataFrame)): pv_azimuth = normalization.interpolate( pv_azimuth, self.pv_energy.index, max_timedelta) if isinstance(pv_tilt, (pd.Series, pd.DataFrame)): pv_tilt = normalization.interpolate( pv_tilt, self.pv_energy.index, max_timedelta) self.pvlib_location = pvlib_location self.pv_azimuth = pv_azimuth self.pv_tilt = pv_tilt self.poa_global_clearsky = poa_global_clearsky self.temperature_cell_clearsky = temperature_cell_clearsky self.temperature_ambient_clearsky = temperature_ambient_clearsky self.albedo = albedo self.solar_position_method = solar_position_method
Initialize values for a clearsky analysis which requires configuration of location and orientation details. If optional parameters `poa_global_clearsky`, `temperature_ambient_clearsky` are not passed, they will be modeled based on location and orientation. Parameters ---------- pvlib_location : pvlib.location.Location Used for calculating clearsky temperature and irradiance pv_azimuth : numeric Azimuth of PV array in degrees from north. Can be right-labeled Pandas Time Series or single numeric value. pv_tilt : numeric Tilt of PV array in degrees from horizontal. Can be right-labeled Pandas Time Series or single numeric value. poa_global_clearsky : pandas.Series Right-labeled time Series of clear-sky plane of array irradiance temperature_cell_clearsky : pandas.Series Right-labeled time series of cell temperature in clear-sky conditions in Celsius. In practice, back of module temperature works as a good approximation. temperature_ambient_clearsky : pandas.Series Right-label time series of ambient temperature in clear sky conditions in Celsius albedo : numeric Albedo to be used in irradiance transposition calculations. Can be right-labeled Pandas Time Series or single numeric value. solar_position_method : str, default 'nrel_numpy' Optional method name to pass to :py:func:`pvlib.solarposition.get_solarposition`. Switching methods may improve calculation time.
https://github.com/nrel/rdtools/blob/4ca70e3e2cec85fead10cb8e6ef5e098eeb6f686/rdtools/analysis_chains.py#L140-L201
import pvlib import pandas as pd import numpy as np import matplotlib.pyplot as plt from rdtools import normalization, filtering, aggregation, degradation from rdtools import clearsky_temperature, plotting import warnings class TrendAnalysis(): def __init__(self, pv, poa_global=None, temperature_cell=None, temperature_ambient=None, gamma_pdc=None, aggregation_freq='D', pv_input='power', windspeed=0, power_expected=None, temperature_model=None, power_dc_rated=None, interp_freq=None, max_timedelta=None): if interp_freq is not None: pv = normalization.interpolate(pv, interp_freq, max_timedelta) if poa_global is not None: poa_global = normalization.interpolate( poa_global, pv.index, max_timedelta) if temperature_cell is not None: temperature_cell = normalization.interpolate( temperature_cell, pv.index, max_timedelta) if temperature_ambient is not None: temperature_ambient = normalization.interpolate( temperature_ambient, pv.index, max_timedelta) if power_expected is not None: power_expected = normalization.interpolate( power_expected, pv.index, max_timedelta) if isinstance(windspeed, pd.Series): windspeed = normalization.interpolate( windspeed, pv.index, max_timedelta) if pv_input == 'power': self.pv_power = pv self.pv_energy = normalization.energy_from_power( pv, max_timedelta=max_timedelta) elif pv_input == 'energy': self.pv_power = None self.pv_energy = pv self.temperature_cell = temperature_cell self.temperature_ambient = temperature_ambient self.poa_global = poa_global self.gamma_pdc = gamma_pdc self.aggregation_freq = aggregation_freq self.windspeed = windspeed self.power_expected = power_expected self.temperature_model = temperature_model self.power_dc_rated = power_dc_rated self.interp_freq = interp_freq self.max_timedelta = max_timedelta self.results = {} self.filter_params = { 'normalized_filter': {}, 'poa_filter': {}, 'tcell_filter': {}, 'clip_filter': {}, 'csi_filter': {}, 'ad_hoc_filter': None } if power_expected is not None and temperature_cell is None: del self.filter_params['tcell_filter']
MIT License
mrknow/filmkodi
plugin.video.mrknow/mylib/pydevd_attach_to_process/winappdbg/breakpoint.py
Breakpoint.set_action
python
def set_action(self, action = None): self.__action = action
Sets a new action callback for the breakpoint. @type action: function @param action: (Optional) Action callback function.
https://github.com/mrknow/filmkodi/blob/0162cde9ae25ddbf4a69330948714833ff2f78c9/plugin.video.mrknow/mylib/pydevd_attach_to_process/winappdbg/breakpoint.py#L382-L389
__revision__ = "$Id$" __all__ = [ 'Breakpoint', 'CodeBreakpoint', 'PageBreakpoint', 'HardwareBreakpoint', 'Hook', 'ApiHook', 'BufferWatch', 'BreakpointWarning', 'BreakpointCallbackWarning', ] from winappdbg import win32 from winappdbg import compat import sys from winappdbg.process import Process, Thread from winappdbg.util import DebugRegister, MemoryAddresses from winappdbg.textio import HexDump import ctypes import warnings import traceback class BreakpointWarning (UserWarning): class BreakpointCallbackWarning (RuntimeWarning): class Breakpoint (object): DISABLED = 0 ENABLED = 1 ONESHOT = 2 RUNNING = 3 typeName = 'breakpoint' stateNames = { DISABLED : 'disabled', ENABLED : 'enabled', ONESHOT : 'one shot', RUNNING : 'running', } def __init__(self, address, size = 1, condition = True, action = None): self.__address = address self.__size = size self.__state = self.DISABLED self.set_condition(condition) self.set_action(action) def __repr__(self): if self.is_disabled(): state = 'Disabled' else: state = 'Active (%s)' % self.get_state_name() if self.is_conditional(): condition = 'conditional' else: condition = 'unconditional' name = self.typeName size = self.get_size() if size == 1: address = HexDump.address( self.get_address() ) else: begin = self.get_address() end = begin + size begin = HexDump.address(begin) end = HexDump.address(end) address = "range %s-%s" % (begin, end) msg = "<%s %s %s at remote address %s>" msg = msg % (state, condition, name, address) return msg def is_disabled(self): return self.get_state() == self.DISABLED def is_enabled(self): return self.get_state() == self.ENABLED def is_one_shot(self): return self.get_state() == self.ONESHOT def is_running(self): return self.get_state() == self.RUNNING def is_here(self, address): begin = self.get_address() end = begin + self.get_size() return begin <= address < end def get_address(self): return self.__address def get_size(self): return self.__size def get_span(self): address = self.get_address() size = self.get_size() return ( address, address + size ) def get_state(self): return self.__state def get_state_name(self): return self.stateNames[ self.get_state() ] def is_conditional(self): return self.__condition is not True def is_unconditional(self): return self.__condition is True def get_condition(self): return self.__condition def set_condition(self, condition = True): if condition is None: self.__condition = True else: self.__condition = condition def eval_condition(self, event): condition = self.get_condition() if condition is True: return True if callable(condition): try: return bool( condition(event) ) except Exception: e = sys.exc_info()[1] msg = ("Breakpoint condition callback %r" " raised an exception: %s") msg = msg % (condition, traceback.format_exc(e)) warnings.warn(msg, BreakpointCallbackWarning) return False return bool( condition ) def is_automatic(self): return self.__action is not None def is_interactive(self): return self.__action is None def get_action(self): return self.__action
Apache License 2.0
rustychris/stompy
stompy/model/fish_ptm/ptm_tools.py
PtmBin.dt_seconds
python
def dt_seconds(self): dnum1,data = self.read_timestep(0) dnum2,data = self.read_timestep(1) return (dnum2-dnum1).total_seconds()
Return the bin file output interval in decimal seconds.
https://github.com/rustychris/stompy/blob/ef04d8b3ee9c9af827c87c72c7b50d365e5e567d/stompy/model/fish_ptm/ptm_tools.py#L131-L137
import os import time import numpy as np import xarray as xr from datetime import datetime import matplotlib.pyplot as plt from ...spatial import wkb2shp from ... import memoize, utils import pandas as pd class PtmBin(object): use_memmap=True fp=None def __init__(self,fn,release_name=None,idx_fn='auto'): self.fn = fn if release_name is None: release_name = os.path.basename(fn) release_name = release_name.replace("_bin.out","") self.release = release_name self.fp = open(self.fn,'rb') self.fn_bytes = os.stat(self.fn).st_size self.read_bin_header() self.offsets = {} self.offsets[0] = self.fp.tell() if idx_fn=='auto': idx_fn=fn.replace('_bin.out','_bin.idx') if os.path.exists(idx_fn): self.idx_fn=idx_fn self.read_index() else: self.getTime() def __del__(self): if self.fp is not None: self.fp.close() self.fp=None def read_index(self): df=pd.read_csv(self.idx_fn,sep='\s+', names=['year','month','day','hour','minute','offset','count']) df['time']=pd.to_datetime(df[['year','month','day','hour','minute']]) self.time=df.time.dt.to_pydatetime() self.offsets=dict(zip(df.index.values,df.offset.values)) def read_bin_header(self): self.Nattr = int( np.fromstring(self.fp.read(4),np.int32) ) atts = [] for i in range(self.Nattr): idx = int( np.fromstring( self.fp.read(4), np.int32) ) type_str = self.fp.read(80).strip() name_str = self.fp.read(80).strip() atts.append( (idx,type_str,name_str) ) self.atts=atts def scan_to_timestep(self,ts): if ts<0: nsteps=self.count_timesteps() ts=nsteps+ts assert ts>=0 if ts not in self.offsets: for ts_scan in range(1,ts+1): if ts_scan not in self.offsets: self.fp.seek( self.offsets[ts_scan-1]) tstep_header = np.fromstring( self.fp.read( 6*4 ), np.int32 ) Npart = tstep_header[5] frame = 6*4 + Npart * (2*4 + 3*8) self.offsets[ts_scan] = self.offsets[ts_scan-1] + frame if self.offsets[ts_scan] >= self.fn_bytes: return False if self.offsets[ts] >= self.fn_bytes: return False self.fp.seek(self.offsets[ts]) return True def count_timesteps(self): saved_pos = self.fp.tell() valid_ts = -1 while 1: if self.scan_to_timestep(valid_ts+1): valid_ts += 1 else: break self.fp.seek(saved_pos) return valid_ts + 1
MIT License
digital-concrete/light-sync
phue_lib.py
Sensor.state
python
def state(self): data = self._get('state') self._state.clear() self._state.update(data) return self._state
A dictionary of sensor state. Some values can be updated, some are read-only. [dict]
https://github.com/digital-concrete/light-sync/blob/b2f8405971b6204f4d43f5a63ae91381462913f2/phue_lib.py#L412-L417
import json import logging import os import platform import sys import socket if sys.version_info[0] > 2: PY3K = True else: PY3K = False if PY3K: import http.client as httplib else: import httplib logger = logging.getLogger('phue') if platform.system() == 'Windows': USER_HOME = 'USERPROFILE' else: USER_HOME = 'HOME' __version__ = '1.1' def is_string(data): if PY3K: return isinstance(data, str) else: return isinstance(data, str) or isinstance(data, unicode) class PhueException(Exception): def __init__(self, id, message): self.id = id self.message = message class PhueRegistrationException(PhueException): pass class PhueRequestTimeout(PhueException): pass class Light(object): def __init__(self, bridge, light_id): self.bridge = bridge self.light_id = light_id self._name = None self._on = None self._brightness = None self._colormode = None self._hue = None self._saturation = None self._xy = None self._colortemp = None self._effect = None self._alert = None self.transitiontime = None self._reset_bri_after_on = None self._reachable = None self._type = None def __repr__(self): return '<{0}.{1} object "{2}" at {3}>'.format( self.__class__.__module__, self.__class__.__name__, self.name, hex(id(self))) def _get(self, *args, **kwargs): return self.bridge.get_light(self.light_id, *args, **kwargs) def _set(self, *args, **kwargs): if self.transitiontime is not None: kwargs['transitiontime'] = self.transitiontime logger.debug("Setting with transitiontime = {0} ds = {1} s".format( self.transitiontime, float(self.transitiontime) / 10)) if (args[0] == 'on' and args[1] is False) or ( kwargs.get('on', True) is False): self._reset_bri_after_on = True return self.bridge.set_light(self.light_id, *args, **kwargs) @property def name(self): if PY3K: self._name = self._get('name') else: self._name = self._get('name').encode('utf-8') return self._name @name.setter def name(self, value): old_name = self.name self._name = value self._set('name', self._name) logger.debug("Renaming light from '{0}' to '{1}'".format( old_name, value)) self.bridge.lights_by_name[self.name] = self del self.bridge.lights_by_name[old_name] @property def on(self): self._on = self._get('on') return self._on @on.setter def on(self, value): if self._on and value is False: self._reset_bri_after_on = self.transitiontime is not None if self._reset_bri_after_on: logger.warning( 'Turned off light with transitiontime specified, brightness will be reset on power on') self._set('on', value) if self._on is False and value is True: if self._reset_bri_after_on: logger.warning( 'Light was turned off with transitiontime specified, brightness needs to be reset now.') self.brightness = self._brightness self._reset_bri_after_on = False self._on = value @property def colormode(self): self._colormode = self._get('colormode') return self._colormode @property def brightness(self): self._brightness = self._get('bri') return self._brightness @brightness.setter def brightness(self, value): self._brightness = value self._set('bri', self._brightness) @property def hue(self): self._hue = self._get('hue') return self._hue @hue.setter def hue(self, value): self._hue = int(value) self._set('hue', self._hue) @property def saturation(self): self._saturation = self._get('sat') return self._saturation @saturation.setter def saturation(self, value): self._saturation = value self._set('sat', self._saturation) @property def xy(self): self._xy = self._get('xy') return self._xy @xy.setter def xy(self, value): self._xy = value self._set('xy', self._xy) @property def colortemp(self): self._colortemp = self._get('ct') return self._colortemp @colortemp.setter def colortemp(self, value): if value < 154: logger.warn('154 mireds is coolest allowed color temp') elif value > 500: logger.warn('500 mireds is warmest allowed color temp') self._colortemp = value self._set('ct', self._colortemp) @property def colortemp_k(self): self._colortemp = self._get('ct') return int(round(1e6 / self._colortemp)) @colortemp_k.setter def colortemp_k(self, value): if value > 6500: logger.warn('6500 K is max allowed color temp') value = 6500 elif value < 2000: logger.warn('2000 K is min allowed color temp') value = 2000 colortemp_mireds = int(round(1e6 / value)) logger.debug("{0:d} K is {1} mireds".format(value, colortemp_mireds)) self.colortemp = colortemp_mireds @property def effect(self): self._effect = self._get('effect') return self._effect @effect.setter def effect(self, value): self._effect = value self._set('effect', self._effect) @property def alert(self): self._alert = self._get('alert') return self._alert @alert.setter def alert(self, value): if value is None: value = 'none' self._alert = value self._set('alert', self._alert) @property def reachable(self): self._reachable = self._get('reachable') return self._reachable @property def type(self): self._type = self._get('type') return self._type class SensorState(dict): def __init__(self, bridge, sensor_id): self._bridge = bridge self._sensor_id = sensor_id def __setitem__(self, key, value): dict.__setitem__(self, key, value) self._bridge.set_sensor_state(self._sensor_id, self) class SensorConfig(dict): def __init__(self, bridge, sensor_id): self._bridge = bridge self._sensor_id = sensor_id def __setitem__(self, key, value): dict.__setitem__(self, key, value) self._bridge.set_sensor_config(self._sensor_id, self) class Sensor(object): def __init__(self, bridge, sensor_id): self.bridge = bridge self.sensor_id = sensor_id self._name = None self._model = None self._swversion = None self._type = None self._uniqueid = None self._manufacturername = None self._state = SensorState(bridge, sensor_id) self._config = {} self._recycle = None def __repr__(self): return '<{0}.{1} object "{2}" at {3}>'.format( self.__class__.__module__, self.__class__.__name__, self.name, hex(id(self))) def _get(self, *args, **kwargs): return self.bridge.get_sensor(self.sensor_id, *args, **kwargs) def _set(self, *args, **kwargs): return self.bridge.set_sensor(self.sensor_id, *args, **kwargs) @property def name(self): if PY3K: self._name = self._get('name') else: self._name = self._get('name').encode('utf-8') return self._name @name.setter def name(self, value): old_name = self.name self._name = value self._set('name', self._name) logger.debug("Renaming sensor from '{0}' to '{1}'".format( old_name, value)) self.bridge.sensors_by_name[self.name] = self del self.bridge.sensors_by_name[old_name] @property def modelid(self): self._modelid = self._get('modelid') return self._modelid @property def swversion(self): self._swversion = self._get('swversion') return self._swversion @property def type(self): self._type = self._get('type') return self._type @property def uniqueid(self): self._uniqueid = self._get('uniqueid') return self._uniqueid @property def manufacturername(self): self._manufacturername = self._get('manufacturername') return self._manufacturername @property
MIT License
berkeley-reclab/reclab
reclab/environments/latent_factors.py
LatentFactorBehavior._get_rating
python
def _get_rating(self, user_id, item_id): raw_rating = (self._user_factors[user_id] @ self._item_factors[item_id] + self._user_biases[user_id] + self._item_biases[item_id] + self._offset) boredom_penalty = 0 for item_id_hist in self._user_histories[user_id]: item_factor = self._item_factors[item_id_hist] if item_factor is not None: similarity = ((self._item_factors[item_id] @ item_factor) / np.linalg.norm(item_factor) / np.linalg.norm(self._item_factors[item_id])) if similarity > self._boredom_threshold: boredom_penalty += (similarity - self._boredom_threshold) boredom_penalty *= self._boredom_penalty rating = np.clip(raw_rating - boredom_penalty + self._dynamics_random.randn() * self._noise, 1, 5) return rating
Compute user's rating of item based on model. Parameters ---------- user_id : int The id of the user making the rating. item_id : int The id of the item being rated. Returns ------- rating : int The rating the item was given by the user.
https://github.com/berkeley-reclab/reclab/blob/09d5b1639e9b7f6cbd230f181130b681e31cf4f0/reclab/environments/latent_factors.py#L100-L133
import collections import json import os import numpy as np from . import environment from .. import data_utils class LatentFactorBehavior(environment.DictEnvironment): def __init__(self, latent_dim, num_users, num_items, rating_frequency=0.02, num_init_ratings=0, noise=0.0, memory_length=0, affinity_change=0.0, boredom_threshold=0, boredom_penalty=0.0, user_dist_choice='uniform'): super().__init__(rating_frequency, num_init_ratings, memory_length, user_dist_choice) self._latent_dim = latent_dim self._num_users = num_users self._num_items = num_items self._noise = noise self._affinity_change = affinity_change self._boredom_threshold = boredom_threshold self._boredom_penalty = boredom_penalty if self._memory_length > 0: self._boredom_penalty /= self._memory_length self._user_factors = None self._user_biases = None self._item_factors = None self._item_biases = None self._offset = None @property def name(self): return 'latent' def _get_dense_ratings(self): ratings = (self._user_factors @ self._item_factors.T + self._user_biases[:, np.newaxis] + self._item_biases[np.newaxis, :] + self._offset) item_norms = np.linalg.norm(self._item_factors, axis=1) normalized_items = self._item_factors / item_norms[:, np.newaxis] similarities = normalized_items @ normalized_items.T similarities -= self._boredom_threshold similarities[similarities < 0] = 0 penalties = self._boredom_penalty * similarities for user_id in range(self._num_users): for item_id in self._user_histories[user_id]: if item_id is not None: ratings[user_id] -= penalties[item_id] return ratings
MIT License
thunlp-mt/pr4nmt
thumt/nmt.py
RNNsearch.get_attention
python
def get_attention(self, x, xmask, y, ymask): if not hasattr(self, "get_attentioner"): self.get_attentioner = theano.function(inputs = [self.x, self.xmask, self.y, self.ymask], outputs = [self.attention]) return self.get_attentioner(x, xmask, y, ymask)
Get the attention weight of parallel sentences.
https://github.com/thunlp-mt/pr4nmt/blob/104766db729f2babe1db69c5b10a1aa45f578bf3/thumt/nmt.py#L508-L515
import numpy import theano import theano.tensor as tensor from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams import tools from layer import LayerFactory import json import traceback import cPickle import logging class model(object): def __init__(self): pass def sample(self, x, length, n_samples = 1): sample, probs = self.get_sample(x.reshape((x.shape[0],1)), length, n_samples) return numpy.asarray(sample, dtype = 'int64').transpose(), probs def translate(self, x, beam_size = 10, return_array = False, rerank = False): result = [[]] loss = [0.] result_eos = [] loss_eos = [] beam = beam_size c, state = self.get_context_and_init(x) emb_y = numpy.zeros((1, self.config['dim_emb_trg']), dtype = 'float32') for l in range(x.shape[0] * 3): energy, ctx = self.get_probs(numpy.repeat(c, len(result), axis = 1), state, emb_y) probs = tools.softmax(energy) losses = -numpy.log(probs) if l < x.shape[0] / 2: losses[:, self.config['index_eos_trg']] = numpy.inf for i in range(len(loss)): losses[i] += loss[i] best_index_flatten = numpy.argpartition(losses.flatten(), beam)[:beam] best_index = [(index / self.config['num_vocab_trg'], index % self.config['num_vocab_trg']) for index in best_index_flatten] new_ctx = numpy.zeros((beam, 2 * self.config['dim_rec_enc']), dtype = 'float32') new_y = [] new_state = numpy.zeros((beam, self.config['dim_rec_dec']), dtype = 'float32') new_result = [] new_loss = [] for i in range(beam): index = best_index[i] new_result.append(result[index[0]] + [index[1]]) new_loss.append(losses[index[0], index[1]]) new_ctx[i] = ctx[index[0]] new_y.append(index[1]) new_state[i] = state[index[0]] new_emby = self.get_trg_embedding(numpy.asarray(new_y, dtype = 'int64'))[0] new_state = self.get_next(new_ctx, new_state, new_emby) state = [] emb_y = [] result = [] loss = [] for i in range(beam): if new_result[i][-1] == self.config['index_eos_trg']: result_eos.append(new_result[i]) loss_eos.append(new_loss[i]) beam -= 1 else: result.append(new_result[i]) loss.append(new_loss[i]) state.append(new_state[i]) emb_y.append(new_emby[i]) if beam <= 0: break state = numpy.asarray(state, dtype = 'float32') emb_y = numpy.asarray(emb_y, dtype = 'float32') if return_array: if len(result_eos) > 0: return result_eos else: return [result[-1][:1]] if len(result_eos) > 0: if rerank: for i in range(len(result_eos)): feature_value = numpy.asarray([],dtype = 'float32') for j in range(len(self.fls)): fl = self.fls[j] if isinstance(fl,featureListAttn): fe =fl.getFeatures(xf, result_eos[i], [attentions_eos[i]]) else: fe =fl.getFeatures(xf, result_eos[i]) feature_value = numpy.concatenate((feature_value, fe)) loss_eos[i] -= (feature_value * self.feature_weight.get_value()).sum() return result_eos[numpy.argmin(loss_eos)] elif beam_size > 100: logging.warning('cannot find translation in beam size %d' % beam_size) return [] else: logging.info('cannot find translation in beam size %d, try %d' % (beam_size, beam_size * 2)) return self.translate(x, beam_size = beam_size * 2) def translate_rerank(self, x, beam_size = 10, return_array = False): return self.translate(x, beam_size, return_array, rerank = True) def save(self, path, data = None, mapping = None): values = {} for p in self.creater.params: values[p.name] = p.get_value() values['config'] = json.dumps(self.config) if data: values['vocab_src'] = json.dumps(data.vocab_src) values['ivocab_src'] = json.dumps(data.ivocab_src) values['vocab_trg'] = json.dumps(data.vocab_trg) values['ivocab_trg'] = json.dumps(data.ivocab_trg) if mapping: values['mapping'] = json.dumps(mapping) numpy.savez(path, **values) def load(self, path, decode = False): try: values = numpy.load(path) for p in self.creater.params: if p.name in values: if values[p.name].shape != p.get_value().shape: logging.warning(p.name + ' needs ' + str(p.get_value().shape) + ', given ' + str(values[p.name].shape) + ' , initializing') else: p.set_value(values[p.name]) logging.debug(p.name + ' loaded ' + str(values[p.name].shape)) else: logging.warning('No parameter ' + p.name + ', initializing') if decode: return values except: if self.config['MRT'] or self.config['semi_learning'] or self.config['PR']: logging.info('Initializing the model from ' + str(self.config['init_model'])) self.load(self.config['init_model']) else: logging.info('No model file. Starting from scratch.') class RNNsearch(model): def __init__(self, config, name = '', fls = None): self.config = config self.name = name self.creater = LayerFactory() self.fls = fls self.trng = RandomStreams(numpy.random.randint(int(10e6))) def sampling_step(self, state, prev, context): emb = self.emb_trg.forward(prev) energy, c = self.decoderGRU.decode_probs(context, state, emb) probs = tensor.nnet.softmax(energy) sample = self.trng.multinomial(pvals = probs, dtype = 'int64').argmax(axis = -1) newemb = self.emb_trg.forward(sample) newstate = self.decoderGRU.decode_next(c, state, newemb) return newstate, sample, probs def decode_sample(self, state_init, c, length, n_samples): state = tensor.repeat(state_init, n_samples, axis = 0) sample = tensor.zeros((n_samples,), dtype = 'int64') c = tensor.repeat(c, n_samples, axis = 1) result, updates = theano.scan(self.sampling_step, outputs_info = [state, sample, None], non_sequences = [c], n_steps = length) samples = result[1] probs = result[2] y_idx = tensor.arange(samples.flatten().shape[0]) * self.config['num_vocab_trg'] + samples.flatten() return samples, probs, updates def build(self, verbose = False): config = self.config logging.info('Initializing layers') self.emb_src = self.creater.createLookupTable(self.name + 'emb_src', config['num_vocab_src'], config['dim_emb_src'], offset = True) self.emb_trg = self.creater.createLookupTable(self.name + 'emb_trg', config['num_vocab_trg'], config['dim_emb_trg'], offset = True) self.encoderGRU = self.creater.createGRU(self.name + 'GRU_enc', config['dim_emb_src'], config['dim_rec_enc'], verbose = verbose) self.encoderGRU_back = self.creater.createGRU(self.name + 'GRU_enc_back', config['dim_emb_src'], config['dim_rec_enc'], verbose = verbose) self.decoderGRU = self.creater.createGRU_attention(self.name + 'GRU_dec', config['dim_emb_trg'], 2 * config['dim_rec_enc'], config['dim_rec_dec'], config['num_vocab_trg'], verbose = verbose) self.initer = self.creater.createFeedForwardLayer(self.name + 'initer', config['dim_rec_enc'], config['dim_rec_dec'], offset = True) if self.fls: fl_weight = [] for fl in self.fls: fl_weight.append(fl.feature_weight) print fl.feature_weight fl_weight = numpy.concatenate(fl_weight) self.feature_weight = theano.shared(fl_weight.astype('float32'), name = "feature_weight") self.creater.params += [self.feature_weight] self.feature_weight_dim = self.feature_weight.dimshuffle('x', 0) self.x = tensor.matrix('x', dtype = 'int64') self.xmask = tensor.matrix('x_mask', dtype = 'float32') self.y = tensor.matrix('y', dtype = 'int64') self.ymask = tensor.matrix('y_mask', dtype = 'float32') if 'MRT' in config and config['MRT'] is True: self.MRTLoss = tensor.vector('MRTLoss') self.inputs = [self.x, self.xmask, self.y, self.ymask, self.MRTLoss] else: self.MRTLoss = None self.inputs = [self.x, self.xmask, self.y, self.ymask] if config['PR']: self.ans = tensor.scalar('ans', dtype = 'int64') self.features = tensor.matrix('features', dtype = 'float32') self.inputs += [self.features, self.ans] logging.info('Building computational graph') emb = self.emb_src.forward(self.x.flatten()) back_emb = self.emb_src.forward(self.x[::-1].flatten()) self.encode_forward = self.encoderGRU.forward(emb, self.x.shape[0], batch_size = self.x.shape[1], mask = self.xmask) self.encode_backward = self.encoderGRU_back.forward(back_emb, self.x.shape[0], batch_size = self.x.shape[1], mask = self.xmask[::-1]) context_forward = self.encode_forward[0] context_backward = self.encode_backward[0][::-1] self.context = tensor.concatenate((context_forward, context_backward), axis=2) self.init_c = context_backward[0] self.state_init = self.initer.forward(context_backward[0]) emb = self.emb_trg.forward(self.y.flatten()) self.decode = self.decoderGRU.forward(emb, self.y.shape[0], self.context, state_init = self.state_init, batch_size = self.y.shape[1], mask = self.ymask, cmask = self.xmask) energy = self.decode[1] self.attention = self.decode[2] self.softmax = tensor.nnet.softmax(energy) y_idx = tensor.arange(self.y.flatten().shape[0]) * self.config['num_vocab_trg'] + self.y.flatten() cost = self.softmax.flatten()[y_idx] cost = -tensor.log(cost) self.cost = cost.reshape((self.y.shape[0], self.y.shape[1])) * self.ymask self.cost_per_sample = self.cost.sum(axis = 0) if 'MRT' in config and config['MRT'] is True: self.cost_per_sample = self.cost.sum(axis = 0) tmp = self.cost_per_sample tmp *= config['MRT_alpha'] tmp -= tmp.min() tmp = tensor.exp(-tmp) tmp /= tmp.sum() tmp *= self.MRTLoss tmp = -tmp.sum() self.cost = tmp elif config['PR'] and self.fls: self.cost_per_sample = self.cost.sum(axis = 0) self.cost_per_sample *= config['alpha_PR'] cost_min = self.cost_per_sample - self.cost_per_sample.min() probs = tensor.exp(-cost_min) log_probs = -cost_min - tensor.log(probs.sum()) probs /= probs.sum() self.probs = log_probs energy_q = self.features * self.feature_weight_dim energy_q = energy_q.sum(axis = 1) self.energy_q = energy_q energy_q_min = energy_q - energy_q.max() probs_q = tensor.exp(energy_q_min) log_probs_q = energy_q_min - tensor.log(probs_q.sum()) probs_q /= probs_q.sum() self.probs_q = log_probs_q cost_KL = tensor.exp(log_probs_q) * (log_probs_q - log_probs) self.cost_KLs = cost_KL self.cost_KL = cost_KL.sum() self.cost_NMT = self.cost_per_sample[self.ans] self.cost = config['lambda_PR'] * self.cost_KL + config['lambda_MLE'] * self.cost_NMT else: self.cost = self.cost.sum() self.x_sample = tensor.matrix('x_sample', dtype = 'int64') self.n_samples = tensor.scalar('n_samples', dtype = 'int64') self.length_sample = tensor.scalar('length', dtype = 'int64') emb_sample = self.emb_src.forward(self.x_sample.flatten()) back_emb_sample = self.emb_src.forward(self.x_sample[::-1].flatten()) encode_forward_sample = self.encoderGRU.forward(emb_sample, self.x_sample.shape[0], batch_size = self.x_sample.shape[1]) encode_backward_sample = self.encoderGRU_back.forward(back_emb_sample, self.x_sample.shape[0], batch_size = self.x_sample.shape[1]) context_sample = tensor.concatenate((encode_forward_sample[0], encode_backward_sample[0][::-1]), axis = 2) state_init_sample =self.initer.forward(encode_backward_sample[0][::-1][0]) self.state_init_sample = state_init_sample self.context_sample = context_sample self.samples, self.probs_sample, self.updates_sample = self.decode_sample(state_init_sample, context_sample, self.length_sample, self.n_samples) self.y_decode = tensor.vector('y_decode', dtype = 'int64') self.context_decode = tensor.tensor3('context_decode', dtype = 'float32') self.c_decode = tensor.matrix('c_decode', dtype = 'float32') self.state_decode = tensor.matrix('state_decode', dtype = 'float32') self.emb_decode = tensor.matrix('emb_decode', dtype = 'float32') def encode(self, x): if not hasattr(self, "encoder"): self.encoder = theano.function(inputs = [self.x,self.xmask], outputs = [self.context]) x = numpy.reshape(x, (x.shape[0], 1)) xmask = numpy.ones(x.shape, dtype = 'float32') return self.encoder(x, xmask) def get_trg_embedding(self, y): if not hasattr(self, "get_trg_embeddinger"): self.get_trg_embeddinger = theano.function(inputs = [self.y_decode], outputs = [self.emb_trg.forward(self.y_decode)]) return self.get_trg_embeddinger(y) def get_init(self, c): if not hasattr(self, "get_initer"): self.get_initer = theano.function(inputs = [self.context], outputs = [self.initer.forward(context_backward[0])]) return self.get_initer(c) def get_context_and_init(self, x): if not hasattr(self, "get_context_and_initer"): self.get_context_and_initer = theano.function(inputs = [self.x,self.xmask], outputs = [self.context, self.state_init]) x = numpy.reshape(x, (x.shape[0], 1)) xmask = numpy.ones(x.shape, dtype = 'float32') return self.get_context_and_initer(x, xmask) def get_probs(self, c, state, emb): if not hasattr(self, "get_probser"): self.get_probser = theano.function(inputs = [self.context_decode, self.state_decode, self.emb_decode], outputs = self.decoderGRU.decode_probs(self.context_decode, self.state_decode, self.emb_decode)) return self.get_probser(c, state, emb) def get_next(self, c, state, emb): if not hasattr(self, "get_nexter"): self.get_nexter = theano.function(inputs = [self.c_decode, self.state_decode, self.emb_decode], outputs = self.decoderGRU.decode_next(self.c_decode, self.state_decode, self.emb_decode)) return self.get_nexter(c, state, emb) def get_cost(self, x, xmask, y, ymask): if not hasattr(self, "get_coster"): self.get_coster = theano.function(inputs = [self.x, self.xmask, self.y, self.ymask], outputs = [self.cost]) return self.get_coster(x, xmask, y, ymask) def get_sample(self, x, length, n_samples): if not hasattr(self, "get_sampler"): self.get_sampler = theano.function(inputs = [self.x_sample, self.length_sample, self.n_samples], outputs = [self.samples, self.probs_sample], updates = self.updates_sample) return self.get_sampler(x, length, n_samples)
BSD 3-Clause New or Revised License
rapid7/vm-console-client-python
rapid7vmconsole/models/scan_template_vulnerability_checks.py
ScanTemplateVulnerabilityChecks.unsafe
python
def unsafe(self, unsafe): self._unsafe = unsafe
Sets the unsafe of this ScanTemplateVulnerabilityChecks. Whether checks considered \"unsafe\" are assessed during a scan. # noqa: E501 :param unsafe: The unsafe of this ScanTemplateVulnerabilityChecks. # noqa: E501 :type: bool
https://github.com/rapid7/vm-console-client-python/blob/55e1f573967bce27cc9a2d10c12a949b1142c2b3/rapid7vmconsole/models/scan_template_vulnerability_checks.py#L228-L237
import pprint import re import six class ScanTemplateVulnerabilityChecks(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'categories': 'ScanTemplateVulnerabilityCheckCategories', 'correlate': 'bool', 'individual': 'ScanTemplateVulnerabilityCheckIndividual', 'links': 'list[Link]', 'potential': 'bool', 'types': 'VulnerabilityCheckType', 'unsafe': 'bool' } attribute_map = { 'categories': 'categories', 'correlate': 'correlate', 'individual': 'individual', 'links': 'links', 'potential': 'potential', 'types': 'types', 'unsafe': 'unsafe' } def __init__(self, categories=None, correlate=None, individual=None, links=None, potential=None, types=None, unsafe=None): self._categories = None self._correlate = None self._individual = None self._links = None self._potential = None self._types = None self._unsafe = None self.discriminator = None if categories is not None: self.categories = categories if correlate is not None: self.correlate = correlate if individual is not None: self.individual = individual if links is not None: self.links = links if potential is not None: self.potential = potential if types is not None: self.types = types if unsafe is not None: self.unsafe = unsafe @property def categories(self): return self._categories @categories.setter def categories(self, categories): self._categories = categories @property def correlate(self): return self._correlate @correlate.setter def correlate(self, correlate): self._correlate = correlate @property def individual(self): return self._individual @individual.setter def individual(self, individual): self._individual = individual @property def links(self): return self._links @links.setter def links(self, links): self._links = links @property def potential(self): return self._potential @potential.setter def potential(self, potential): self._potential = potential @property def types(self): return self._types @types.setter def types(self, types): self._types = types @property def unsafe(self): return self._unsafe @unsafe.setter
MIT License
missionpinball/mpf
mpf/core/platform.py
DriverPlatform.__init__
python
def __init__(self, machine): super().__init__(machine) self.features['has_drivers'] = True self.features['max_pulse'] = 255
Add driver feature and default max_pulse length.
https://github.com/missionpinball/mpf/blob/1eda6ba6892b8f7cc6dedf6cb6472ff92293b8ef/mpf/core/platform.py#L513-L520
import abc import asyncio from collections import namedtuple from enum import Enum from typing import Optional, Dict, List from mpf.core.logging import LogMixin from mpf.core.utility_functions import Util MYPY = False if MYPY: from mpf.devices.switch import Switch from mpf.devices.stepper import Stepper from mpf.platforms.interfaces.driver_platform_interface import DriverPlatformInterface from mpf.platforms.interfaces.switch_platform_interface import SwitchPlatformInterface from mpf.platforms.interfaces.light_platform_interface import LightPlatformInterface from mpf.platforms.interfaces.servo_platform_interface import ServoPlatformInterface from mpf.platforms.interfaces.segment_display_platform_interface import SegmentDisplayPlatformInterface from mpf.platforms.interfaces.hardware_sound_platform_interface import HardwareSoundPlatformInterface from mpf.platforms.interfaces.stepper_platform_interface import StepperPlatformInterface from mpf.platforms.interfaces.accelerometer_platform_interface import AccelerometerPlatformInterface from mpf.platforms.interfaces.i2c_platform_interface import I2cPlatformInterface from mpf.platforms.interfaces.dmd_platform import DmdPlatformInterface from mpf.core.machine import MachineController class BasePlatform(LogMixin, metaclass=abc.ABCMeta): __slots__ = ["machine", "features", "debug"] def __init__(self, machine): self.machine = machine self.features = {} super().__init__() self.debug = False self.features['has_dmds'] = False self.features['has_rgb_dmds'] = False self.features['has_accelerometers'] = False self.features['has_i2c'] = False self.features['has_servos'] = False self.features['has_lights'] = False self.features['has_switches'] = False self.features['has_drivers'] = False self.features['tickless'] = False self.features['has_segment_displays'] = False self.features['has_hardware_sound_systems'] = False self.features['has_steppers'] = False self.features['allow_empty_numbers'] = False self.features['hardware_eos_repulse'] = False def assert_has_feature(self, feature_name): if not self.features.get("has_{}".format(feature_name), False): self.raise_config_error("Platform {} does not support to configure {feature_name}. " "Please make sure the platform " "you configured for {feature_name} actually supports that type " "of devices.".format(self.__class__, feature_name=feature_name), 99) def _configure_device_logging_and_debug(self, logger_name, config): if config['debug']: self.debug = True config['console_log'] = 'full' config['file_log'] = 'full' self.configure_logging(logger_name, config['console_log'], config['file_log']) @classmethod def get_config_spec(cls): return False def get_info_string(self) -> str: return "Not implemented" def update_firmware(self) -> str: async def initialize(self): async def start(self): def tick(self): def stop(self): class DmdPlatform(BasePlatform, metaclass=abc.ABCMeta): __slots__ = [] def __init__(self, machine): super().__init__(machine) self.features['has_dmds'] = True @abc.abstractmethod def configure_dmd(self) -> "DmdPlatformInterface": raise NotImplementedError class HardwareSoundPlatform(BasePlatform, metaclass=abc.ABCMeta): __slots__ = [] def __init__(self, machine): super().__init__(machine) self.features['has_hardware_sound_systems'] = True @abc.abstractmethod def configure_hardware_sound_system(self) -> "HardwareSoundPlatformInterface": raise NotImplementedError class RgbDmdPlatform(BasePlatform, metaclass=abc.ABCMeta): __slots__ = [] def __init__(self, machine): super().__init__(machine) self.features['has_rgb_dmds'] = True @abc.abstractmethod def configure_rgb_dmd(self, name: str) -> "DmdPlatformInterface": raise NotImplementedError class SegmentDisplayPlatform(BasePlatform, metaclass=abc.ABCMeta): __slots__ = [] def __init__(self, machine): super().__init__(machine) self.features['has_segment_displays'] = True @classmethod def get_segment_display_config_section(cls) -> Optional[str]: return None def validate_segment_display_section(self, segment_display, config) -> dict: if self.get_segment_display_config_section(): spec = self.get_segment_display_config_section() config = segment_display.machine.config_validator.validate_config(spec, config, segment_display.name) elif config: raise AssertionError("No platform_config supported but not empty {} for segment display {}". format(config, segment_display.name)) return config @abc.abstractmethod async def configure_segment_display(self, number: str, display_size: int, platform_settings) -> "SegmentDisplayPlatformInterface": raise NotImplementedError class SegmentDisplaySoftwareFlashPlatform(SegmentDisplayPlatform, metaclass=abc.ABCMeta): def __init__(self, machine): super().__init__(machine) self._displays = set() self._display_flash_task = None async def initialize(self): await super().initialize() self._display_flash_task = self.machine.clock.loop.create_task(self._display_flash()) self._display_flash_task.add_done_callback(Util.raise_exceptions) async def _display_flash(self): wait_time = 1 / (self.config['display_flash_frequency'] * 2) while True: await asyncio.sleep(wait_time) for display in self._displays: display.set_software_flash(True) await asyncio.sleep(wait_time) for display in self._displays: display.set_software_flash(False) def stop(self): super().stop() if self._display_flash_task: self._display_flash_task.cancel() def _handle_software_flash(self, display): self._displays.add(display) class AccelerometerPlatform(BasePlatform, metaclass=abc.ABCMeta): __slots__ = [] def __init__(self, machine): super().__init__(machine) self.features['has_accelerometers'] = True @abc.abstractmethod def configure_accelerometer(self, number: str, config: dict, callback) -> "AccelerometerPlatformInterface": raise NotImplementedError class I2cPlatform(BasePlatform, metaclass=abc.ABCMeta): __slots__ = [] def __init__(self, machine): super().__init__(machine) self.features['has_i2c'] = True async def configure_i2c(self, number: str) -> "I2cPlatformInterface": raise NotImplementedError class ServoPlatform(BasePlatform, metaclass=abc.ABCMeta): __slots__ = [] def __init__(self, machine): super().__init__(machine) self.features['has_servos'] = True @abc.abstractmethod async def configure_servo(self, number: str) -> "ServoPlatformInterface": raise NotImplementedError class StepperPlatform(BasePlatform, metaclass=abc.ABCMeta): __slots__ = [] def __init__(self, machine): super().__init__(machine) self.features['has_steppers'] = True @classmethod def get_stepper_config_section(cls) -> Optional[str]: return None def validate_stepper_section(self, stepper: "Stepper", config: dict) -> dict: if self.get_stepper_config_section(): spec = self.get_stepper_config_section() config = stepper.machine.config_validator.validate_config(spec, config, stepper.name) elif config: raise AssertionError("No platform_config supported but not empty {} for stepper {}". format(config, stepper.name)) return config @abc.abstractmethod async def configure_stepper(self, number: str, config: dict) -> "StepperPlatformInterface": raise NotImplementedError class LightConfigColors(Enum): RED = 1 GREEN = 2 BLUE = 3 WHITE = 4 NONE = 5 LightConfig = namedtuple("LightConfig", ["name", "color"]) class LightsPlatform(BasePlatform, metaclass=abc.ABCMeta): __slots__ = [] def __init__(self, machine): super().__init__(machine) self.features['has_lights'] = True @abc.abstractmethod def parse_light_number_to_channels(self, number: str, subtype: str): raise NotImplementedError def light_sync(self): @abc.abstractmethod def configure_light(self, number: str, subtype: str, config: LightConfig, platform_settings: dict) -> "LightPlatformInterface": raise NotImplementedError SwitchConfig = namedtuple("SwitchConfig", ["name", "invert", "debounce"]) class SwitchPlatform(BasePlatform, metaclass=abc.ABCMeta): __slots__ = [] def __init__(self, machine): super().__init__(machine) self.features['has_switches'] = True @abc.abstractmethod def configure_switch(self, number: str, config: SwitchConfig, platform_config: dict) -> "SwitchPlatformInterface": raise NotImplementedError @classmethod def get_switch_config_section(cls) -> Optional[str]: return None def validate_switch_section(self, switch: "Switch", config: dict) -> dict: if self.get_switch_config_section(): spec = self.get_switch_config_section() config = switch.machine.config_validator.validate_config(spec, config, switch.name) elif config: raise AssertionError("No platform_config supported but not empty {} for switch {}". format(config, switch.name)) return config @abc.abstractmethod async def get_hw_switch_states(self) -> Dict[str, bool]: raise NotImplementedError SwitchSettings = namedtuple("SwitchSettings", ["hw_switch", "invert", "debounce"]) DriverSettings = namedtuple("DriverSettings", ["hw_driver", "pulse_settings", "hold_settings", "recycle"]) DriverConfig = namedtuple("DriverConfig", ["name", "default_pulse_ms", "default_pulse_power", "default_hold_power", "default_recycle", "max_pulse_ms", "max_pulse_power", "max_hold_power"]) RepulseSettings = namedtuple("RepulseSettings", ["enable_repulse", "debounce_ms"]) class DriverPlatform(BasePlatform, metaclass=abc.ABCMeta): __slots__ = []
MIT License
opennetworkingfoundation/tapi
RI/flask_server/tapi_server/models/tapi_connectivity_context_augmentation4.py
TapiConnectivityContextAugmentation4.__init__
python
def __init__(self, connectivity_context=None): self.openapi_types = { 'connectivity_context': TapiConnectivityConnectivityContext } self.attribute_map = { 'connectivity_context': 'connectivity-context' } self._connectivity_context = connectivity_context
TapiConnectivityContextAugmentation4 - a model defined in OpenAPI :param connectivity_context: The connectivity_context of this TapiConnectivityContextAugmentation4. # noqa: E501 :type connectivity_context: TapiConnectivityConnectivityContext
https://github.com/opennetworkingfoundation/tapi/blob/1f3fd9483d5674552c5a31206c97399c8c151897/RI/flask_server/tapi_server/models/tapi_connectivity_context_augmentation4.py#L19-L33
from __future__ import absolute_import from datetime import date, datetime from typing import List, Dict from tapi_server.models.base_model_ import Model from tapi_server.models.tapi_connectivity_connectivity_context import TapiConnectivityConnectivityContext from tapi_server import util class TapiConnectivityContextAugmentation4(Model):
Apache License 2.0
datadotworld/data.world-py
datadotworld/client/_swagger/models/oauth_token_reference.py
OauthTokenReference.owner
python
def owner(self, owner): if owner is None: raise ValueError("Invalid value for `owner`, must not be `None`") if owner is not None and len(owner) > 31: raise ValueError("Invalid value for `owner`, length must be less than or equal to `31`") if owner is not None and len(owner) < 3: raise ValueError("Invalid value for `owner`, length must be greater than or equal to `3`") if owner is not None and not re.search('[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]', owner): raise ValueError("Invalid value for `owner`, must be a follow pattern or equal to `/[a-z0-9](?:-(?!-)|[a-z0-9])+[a-z0-9]/`") self._owner = owner
Sets the owner of this OauthTokenReference. User name of the owner of the OAuth token within data.world. :param owner: The owner of this OauthTokenReference. :type: str
https://github.com/datadotworld/data.world-py/blob/7e5f474b655f4f0c88cc6862353e4d52c0e0bb31/datadotworld/client/_swagger/models/oauth_token_reference.py#L93-L110
from pprint import pformat from six import iteritems import re class OauthTokenReference(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'id': 'str', 'owner': 'str', 'site': 'str' } attribute_map = { 'id': 'id', 'owner': 'owner', 'site': 'site' } def __init__(self, id=None, owner=None, site=None): self._id = None self._owner = None self._site = None self.id = id self.owner = owner self.site = site @property def id(self): return self._id @id.setter def id(self, id): if id is None: raise ValueError("Invalid value for `id`, must not be `None`") self._id = id @property def owner(self): return self._owner @owner.setter
Apache License 2.0
geoscienceaustralia/agdc
tests/test_landsat_tiler.py
TestLandsatTiler.get_tile_pathnames
python
def get_tile_pathnames(expected_conn, output_conn): sql = """-- Retrieve list of tile.tile_pathname from each database select tile_type_id, tile_pathname from tile where tile_class_id = 1 """ db_cursor = expected_conn.cursor() db_cursor.execute(sql) expected_dict = {} for record in db_cursor: expected_dict[os.path.basename(record[1])] = (record[0], record[1]) db_cursor = output_conn.cursor() db_cursor.execute(sql) output_dict = {} for record in db_cursor: output_dict[os.path.basename(record[1])] = (record[0], record[1]) return (expected_dict, output_dict)
From two different databases, get the tile pathnames from the tile table. Return each as a dictionary of {basename: (tile_type_id, full path)}
https://github.com/geoscienceaustralia/agdc/blob/2e22c6bdd9305555db3615305ff6a5df6219cd51/tests/test_landsat_tiler.py#L503-L521
import sys import os import subprocess import unittest import dbutil import dbcompare from osgeo import gdal import numpy import re class TestLandsatTiler(unittest.TestCase): def process_args(self): MODULE = 'new_ingest_benchmark' SUITE = 'benchmark' self.INPUT_DIR = dbutil.input_directory(MODULE, SUITE) self.OUTPUT_DIR = dbutil.output_directory(MODULE, SUITE) self.EXPECTED_DIR = dbutil.expected_directory(MODULE, SUITE) mode_desc_dict = {0: 'Initialise benchmark data in the expected directory', 1: 'Do not do ingestion. Compare existing ingestion ' 'in\n %s\n with benchmark\n %s\n' %(self.OUTPUT_DIR, self.EXPECTED_DIR), 2: 'Compare from this run with ' 'expected benchmark database exiting if ' 'they are different', 3: 'Compare databases and also compare tiles, even if ' 'the databases are different'} if len(sys.argv) < 2: mode = -1 else: try: mode = int(sys.argv[1]) except ValueError: mode = -1 msg = '' if mode not in [0, 1, 2, 3]: msg = 'Please specify a mode as follows:\n' for mode_num, desc in mode_desc_dict.items(): msg = msg + 'python test_landsat_tiler.py %d:\t%s\n' %(mode_num, desc) return mode, msg def setUp(self): self.PQA_CONTIGUITY_BIT = 8 self.test_dbname = None self.expected_dbname = None self.test_conn = None self.expected_conn = None self.logfile = None self.bands_expected = None self.bands_output = None self.mode, self.msg = self.process_args() if self.mode == 0: self.OUTPUT_DIR = self.EXPECTED_DIR print 'OUTPUT_DIR =%s' %self.OUTPUT_DIR print self.mode print self.msg def test_landsat_tiler(self): if self.mode not in [0, 1, 2, 3]: self.skipTest('Skipping test_landsat_tiler since flag is not in [0, 1, 2, 3]') logfile_path = os.path.join(self.OUTPUT_DIR, "test_landsat_tiler.log") self.logfile = open(logfile_path, "w") self.test_dbname = dbutil.random_name("test_tiler") print 'About to create dbase from %s' %(os.path.join(self.INPUT_DIR, "hypercube_empty.sql")) if self.mode != 1: dbutil.TESTSERVER.create(self.test_dbname, self.INPUT_DIR, "hypercube_empty.sql") tile_root = os.path.join(self.OUTPUT_DIR, "tiles") configuration_dict = {'dbname': self.test_dbname, 'tile_root': tile_root} config_file_path = dbutil.update_config_file2(configuration_dict, self.INPUT_DIR, self.OUTPUT_DIR, "test_datacube.conf") ingest_dir = os.path.join(self.INPUT_DIR, 'tiler_testing') dbupdater_cmd = ["python", "dbupdater.py", "--debug", "--config=%s" % config_file_path, "--source=%s" % ingest_dir, "--removedblist", "--followsymlinks"] if self.mode != 1: subprocess.check_call(dbupdater_cmd, stdout=self.logfile, stderr=subprocess.STDOUT) landsat_tiler_cmd = ["python", "landsat_tiler.py", "--config=%s" % config_file_path] if self.mode != 1: subprocess.check_call(landsat_tiler_cmd, stdout=self.logfile, stderr=subprocess.STDOUT) if self.mode != 1: dbutil.TESTSERVER.save(self.test_dbname, self.OUTPUT_DIR, "tiler_testing.sql") if self.mode > 0 and os.path.isfile(os.path.join(self.EXPECTED_DIR, "tiler_testing.sql")): print 'starting to check differences' if self.mode == 1: self.test_dbname = dbutil.random_name("tiler_testing") dbutil.TESTSERVER.create(self.test_dbname, self.OUTPUT_DIR, "tiler_testing.sql") self.expected_dbname = dbutil.random_name("expected_tiler_testing") dbutil.TESTSERVER.create(self.expected_dbname, self.EXPECTED_DIR, "tiler_testing.sql") self.test_conn = dbutil.TESTSERVER.connect(self.test_dbname) self.expected_conn = dbutil.TESTSERVER.connect(self.expected_dbname) dbases_agree = dbcompare.compare_databases(self.test_conn, self.expected_conn, output=self.logfile, verbosity=3) if self.mode == 2: assert dbases_agree, "Databases do not match." expected_tile_dict, output_tile_dict = self.get_tile_pathnames(self.expected_conn, self.test_conn) tiles_expected = set(expected_tile_dict.keys()) tiles_output = set(output_tile_dict.keys()) tiles_expected_or_output = tiles_expected | tiles_output self.bands_expected = self.construct_bands_source_dict(self.expected_conn) self.bands_output = self.construct_bands_source_dict(self.test_conn) file_pattern = [r'(?P<sat>\w+)_(?P<sensor>\w+)_', r'(?P<processing_level>\w+)_', r'(?P<xindex>-*\d+)_(?P<yindex>-*\d+)_' r'(?P<year>\d+)-(?P<month>\d+)-' r'(?P<day>\d+)T(?P<hour>\d+)-(?P<minute>\d+)-', r'(?P<second_whole>\d+)\.(?P<second_fraction>\d+)' r'\.(?P<file_extension>.+)'] pattern = re.compile(''.join(file_pattern)) pixel_count_dict = {} difference_count_dict = {} all_levels_info_dict = {} for tile_name in tiles_expected_or_output: print 'processing tile %s' %tile_name tile_type_id_expected = None tile_type_id_output = None fname_expected = None fname_output = None if tile_name in tiles_expected: tile_type_id_expected, fname_expected = expected_tile_dict[tile_name] if tile_name in tiles_output: tile_type_id_output, fname_output = output_tile_dict[tile_name] matchobj = re.match(pattern, tile_name) tile_name_dict = matchobj.groupdict() full_key_expected = self.get_tiletype_sat_sens_level(tile_type_id_expected, tile_name_dict) full_key_output = self.get_tiletype_sat_sens_level(tile_type_id_output, tile_name_dict) full_key = self.check_equal_or_null(full_key_expected, full_key_output) level_dict_expected = {} level_dict_output = {} if full_key in all_levels_info_dict: (level_dict_expected, level_dict_output) = all_levels_info_dict[full_key] if level_dict_expected == {} and full_key_expected != None: level_dict_expected = self.collect_source_bands(self.bands_expected, full_key) if level_dict_output == {} and full_key_output != None: level_dict_output = self.collect_source_bands(self.bands_output, full_key) if full_key not in all_levels_info_dict: all_levels_info_dict[full_key] = [level_dict_expected, level_dict_output] if all_levels_info_dict[full_key][0] == {} and level_dict_expected != {}: all_levels_info_dict[full_key][0] = level_dict_expected if all_levels_info_dict[full_key][1] == {} and level_dict_output != {}: all_levels_info_dict[full_key][1] = level_dict_output ([data_expected, data_output], number_layers) = self.load_and_check(fname_expected, fname_output, level_dict_expected, level_dict_output) assert bool(fname_expected) == (data_expected != None) and bool(fname_output) == (data_output != None), "data array should exist if and only if fname exists" for ilayer in range(number_layers): band_expected, dtype_expected = self.get_band_data(data_expected, ilayer) band_output, dtype_output = self.get_band_data(data_output, ilayer) assert (band_expected == None) == (dtype_expected == None) and (band_output == None) == (dtype_output == None), "band data should exist if and only if dtype exists" dtype_this = self.check_equal_or_null(dtype_expected, dtype_output) if tile_name_dict['processing_level'] == 'PQA': bin_count = 16 + 1 else: bin_count = numpy.iinfo(dtype_this).max - numpy.iinfo(dtype_this).min + 1 assert bin_count < 66000, "datatype is more than 16" "bits, need to add code to coarsen the" "histogram bins or use apriori max and" "min values of the data" result_key = (full_key[3], ilayer + 1) if result_key not in pixel_count_dict: pixel_count_dict[result_key] = numpy.zeros(shape=(5), dtype=numpy.uint64) difference_count_dict[result_key] = numpy.zeros(shape=(bin_count), dtype=numpy.uint64) pixel_count = pixel_count_dict[result_key] difference_count = difference_count_dict[result_key] if tile_name_dict['processing_level'] == 'PQA': if band_expected is None: band_expected = 0 if band_output is None: band_output = 0 index_expected = numpy.bitwise_and(band_expected, 1 << self.PQA_CONTIGUITY_BIT) > 0 index_output = numpy.bitwise_and(band_output, 1 << self.PQA_CONTIGUITY_BIT) > 0 else: nodata_value = level_dict_output[ilayer + 1]['nodata_value'] if band_expected is None: band_expected = nodata_value if band_output is None: band_output = nodata_value index_expected = band_expected != nodata_value index_output = band_output != nodata_value pixel_count[0] += numpy.count_nonzero(index_expected) pixel_count[1] += numpy.count_nonzero(index_output) pixel_count[2] += numpy.count_nonzero(numpy.logical_and(index_expected, index_output)) pixel_count[3] += numpy.count_nonzero(numpy.logical_and (index_expected, ~index_output)) pixel_count[4] += numpy.count_nonzero(numpy.logical_and (~index_expected, index_output)) index_both = numpy.logical_and(index_expected, index_output) if numpy.count_nonzero(index_both) == 0: continue valid_data_expected = band_expected[index_both].ravel() valid_data_output = band_output[index_both].ravel() if tile_name_dict['processing_level'] == 'PQA': difference = self.count_bitwise_diffs(valid_data_expected, valid_data_output) else: difference = abs(valid_data_output.astype(numpy.int64) - valid_data_expected.astype(numpy.int64)) hist, dummy_bin_edges = numpy.histogram(difference, numpy.array(range(bin_count + 1), dtype=numpy.uint64)) difference_count += hist band_expected = None band_output = None difference = None data_expected = None data_output = None fp = open(os.path.join(self.OUTPUT_DIR, 'Histogram_output.txt'), 'w') fp.writelines('##### COMPARISON OF TILED DATA IN FOLLOWING ' 'DIRECTORES\n%s\n%s\n' %(self.EXPECTED_DIR, self.OUTPUT_DIR)) result_keys_processed = [] for full_key in all_levels_info_dict.keys(): dummy, dummy, dummy, processing_level = full_key top_layer_result_key = (processing_level, 1) if top_layer_result_key in result_keys_processed: continue fp.writelines('#### Processing Level: %s\n' %processing_level) level_dict_expected, level_dict_output = all_levels_info_dict[full_key] assert set(level_dict_expected.keys()) == set(level_dict_output.keys()), "different key sets" number_layers = len(level_dict_output.keys()) for this_layer in range(1, number_layers + 1): result_key = (processing_level, this_layer) result_keys_processed.append(result_key) fp.writelines('### tile_layer = %d\n' %this_layer) for key, val in level_dict_expected[this_layer].items(): if key == 'tile_layer' or key == 'level_name': continue outline = '# %s = %s' %(key, val) if str(level_dict_output[this_layer][key]) != str(val): outline = '%s (%s in output database)' %(outline, level_dict_output[this_layer][key]) fp.writelines('%s\n' %outline) fp.writelines('#Valid data counts\n') pixel_count = pixel_count_dict[result_key] count_desc = ['Expected\t', 'Output\t\t', 'Common\t\t', 'Missing\t\t', 'Extra\t\t'] for desc, num in zip(count_desc, pixel_count): fp.writelines('\t\t%s%d\n' %(desc, num)) fp.writelines('#Histogram of differences in valid data\n') difference_count = difference_count_dict[result_key] index_nonzero_bins = difference_count > 0 for bin_no in range(len(difference_count)): if index_nonzero_bins[bin_no]: fp.writelines('\t\tDifference of %d: %d\n' %(bin_no, difference_count[bin_no])) fp.close() else: if self.mode > 0: self.skipTest("Expected database save file not found.") def tearDown(self): if self.test_conn: self.test_conn.close() if self.expected_conn: self.expected_conn.close() if self.test_dbname: dbutil.TESTSERVER.drop(self.test_dbname) if self.expected_dbname: dbutil.TESTSERVER.drop(self.expected_dbname) if self.logfile: self.logfile.close() @staticmethod
BSD 3-Clause New or Revised License
faucetsdn/ryu
ryu/lib/bfdlib.py
ARPPacket.arp_packet
python
def arp_packet(opcode, src_mac, src_ip, dst_mac, dst_ip): pkt = packet.Packet() eth_pkt = ethernet.ethernet(dst_mac, src_mac, ETH_TYPE_ARP) pkt.add_protocol(eth_pkt) arp_pkt = arp.arp_ip(opcode, src_mac, src_ip, dst_mac, dst_ip) pkt.add_protocol(arp_pkt) pkt.serialize() return pkt.data
Generate ARP packet with ethernet encapsulated.
https://github.com/faucetsdn/ryu/blob/537f35f4b2bc634ef05e3f28373eb5e24609f989/ryu/lib/bfdlib.py#L596-L610
import logging import time import random import six from ryu.base import app_manager from ryu.controller import event from ryu.controller import ofp_event from ryu.controller.handler import CONFIG_DISPATCHER, MAIN_DISPATCHER from ryu.controller.handler import set_ev_cls from ryu.exception import RyuException from ryu.ofproto.ether import ETH_TYPE_IP, ETH_TYPE_ARP from ryu.ofproto import ofproto_v1_3 from ryu.ofproto import inet from ryu.lib import hub from ryu.lib.packet import packet from ryu.lib.packet import ethernet from ryu.lib.packet import ipv4 from ryu.lib.packet import udp from ryu.lib.packet import bfd from ryu.lib.packet import arp from ryu.lib.packet.arp import ARP_REQUEST, ARP_REPLY LOG = logging.getLogger(__name__) UINT16_MAX = (1 << 16) - 1 UINT32_MAX = (1 << 32) - 1 BFD_CONTROL_UDP_PORT = 3784 BFD_ECHO_UDP_PORT = 3785 class BFDSession(object): def __init__(self, app, my_discr, dpid, ofport, src_mac, src_ip, src_port, dst_mac="FF:FF:FF:FF:FF:FF", dst_ip="255.255.255.255", detect_mult=3, desired_min_tx_interval=1000000, required_min_rx_interval=1000000, auth_type=0, auth_keys=None): auth_keys = auth_keys if auth_keys else {} assert not (auth_type and len(auth_keys) == 0) self.app = app self._session_state = bfd.BFD_STATE_DOWN self._remote_session_state = bfd.BFD_STATE_DOWN self._local_discr = my_discr self._remote_discr = 0 self._local_diag = 0 self._desired_min_tx_interval = 1000000 self._required_min_rx_interval = required_min_rx_interval self._remote_min_rx_interval = -1 self._demand_mode = 0 self._remote_demand_mode = 0 self._detect_mult = detect_mult self._auth_type = auth_type self._auth_keys = auth_keys if self._auth_type in [bfd.BFD_AUTH_KEYED_MD5, bfd.BFD_AUTH_METICULOUS_KEYED_MD5, bfd.BFD_AUTH_KEYED_SHA1, bfd.BFD_AUTH_METICULOUS_KEYED_SHA1]: self._rcv_auth_seq = 0 self._xmit_auth_seq = random.randint(0, UINT32_MAX) self._auth_seq_known = 0 self._cfg_desired_min_tx_interval = desired_min_tx_interval self._cfg_required_min_echo_rx_interval = 0 self._active_role = True self._detect_time = 0 self._xmit_period = None self._update_xmit_period() self._is_polling = True self._pending_final = False self._enable_send = True self._lock = None self.src_mac = src_mac self.dst_mac = dst_mac self.src_ip = src_ip self.dst_ip = dst_ip self.ipv4_id = random.randint(0, UINT16_MAX) self.src_port = src_port self.dst_port = BFD_CONTROL_UDP_PORT if dst_mac == "FF:FF:FF:FF:FF:FF" or dst_ip == "255.255.255.255": self._remote_addr_config = False else: self._remote_addr_config = True self.dpid = dpid self.datapath = None self.ofport = ofport hub.spawn(self._send_loop) LOG.info("[BFD][%s][INIT] BFD Session initialized.", hex(self._local_discr)) @property def my_discr(self): return self._local_discr @property def your_discr(self): return self._remote_discr def set_remote_addr(self, dst_mac, dst_ip): self.dst_mac = dst_mac self.dst_ip = dst_ip if not (dst_mac == "FF:FF:FF:FF:FF:FF" or dst_ip == "255.255.255.255"): self._remote_addr_config = True LOG.info("[BFD][%s][REMOTE] Remote address configured: %s, %s.", hex(self._local_discr), self.dst_ip, self.dst_mac) def recv(self, bfd_pkt): LOG.debug("[BFD][%s][RECV] BFD Control received: %s", hex(self._local_discr), six.binary_type(bfd_pkt)) self._remote_discr = bfd_pkt.my_discr self._remote_state = bfd_pkt.state self._remote_demand_mode = bfd_pkt.flags & bfd.BFD_FLAG_DEMAND if self._remote_min_rx_interval != bfd_pkt.required_min_rx_interval: self._remote_min_rx_interval = bfd_pkt.required_min_rx_interval self._update_xmit_period() if bfd_pkt.flags & bfd.BFD_FLAG_FINAL and self._is_polling: self._is_polling = False if self._session_state == bfd.BFD_STATE_ADMIN_DOWN: return if bfd_pkt.state == bfd.BFD_STATE_ADMIN_DOWN: if self._session_state != bfd.BFD_STATE_DOWN: self._set_state(bfd.BFD_STATE_DOWN, bfd.BFD_DIAG_NEIG_SIG_SESS_DOWN) else: if self._session_state == bfd.BFD_STATE_DOWN: if bfd_pkt.state == bfd.BFD_STATE_DOWN: self._set_state(bfd.BFD_STATE_INIT) elif bfd_pkt.state == bfd.BFD_STATE_INIT: self._set_state(bfd.BFD_STATE_UP) elif self._session_state == bfd.BFD_STATE_INIT: if bfd_pkt.state in [bfd.BFD_STATE_INIT, bfd.BFD_STATE_UP]: self._set_state(bfd.BFD_STATE_UP) else: if bfd_pkt.state == bfd.BFD_STATE_DOWN: self._set_state(bfd.BFD_STATE_DOWN, bfd.BFD_DIAG_NEIG_SIG_SESS_DOWN) if self._remote_demand_mode and self._session_state == bfd.BFD_STATE_UP and self._remote_session_state == bfd.BFD_STATE_UP: self._enable_send = False if not self._remote_demand_mode or self._session_state != bfd.BFD_STATE_UP or self._remote_session_state != bfd.BFD_STATE_UP: if not self._enable_send: self._enable_send = True hub.spawn(self._send_loop) if self._detect_time == 0: self._detect_time = bfd_pkt.desired_min_tx_interval * bfd_pkt.detect_mult / 1000000.0 hub.spawn(self._recv_timeout_loop) if bfd_pkt.flags & bfd.BFD_FLAG_POLL: self._pending_final = True self._detect_time = bfd_pkt.desired_min_tx_interval * bfd_pkt.detect_mult / 1000000.0 if self._auth_type in [bfd.BFD_AUTH_KEYED_MD5, bfd.BFD_AUTH_METICULOUS_KEYED_MD5, bfd.BFD_AUTH_KEYED_SHA1, bfd.BFD_AUTH_METICULOUS_KEYED_SHA1]: self._rcv_auth_seq = bfd_pkt.auth_cls.seq self._auth_seq_known = 1 if self._lock is not None: self._lock.set() def _set_state(self, new_state, diag=None): old_state = self._session_state LOG.info("[BFD][%s][STATE] State changed from %s to %s.", hex(self._local_discr), bfd.BFD_STATE_NAME[old_state], bfd.BFD_STATE_NAME[new_state]) self._session_state = new_state if new_state == bfd.BFD_STATE_DOWN: if diag is not None: self._local_diag = diag self._desired_min_tx_interval = 1000000 self._is_polling = True self._update_xmit_period() elif new_state == bfd.BFD_STATE_UP: self._desired_min_tx_interval = self._cfg_desired_min_tx_interval self._is_polling = True self._update_xmit_period() self.app.send_event_to_observers( EventBFDSessionStateChanged(self, old_state, new_state)) def _recv_timeout_loop(self): while self._detect_time: last_wait = time.time() self._lock = hub.Event() self._lock.wait(timeout=self._detect_time) if self._lock.is_set(): if getattr(self, "_auth_seq_known", 0): if last_wait > time.time() + 2 * self._detect_time: self._auth_seq_known = 0 else: LOG.info("[BFD][%s][RECV] BFD Session timed out.", hex(self._local_discr)) if self._session_state not in [bfd.BFD_STATE_DOWN, bfd.BFD_STATE_ADMIN_DOWN]: self._set_state(bfd.BFD_STATE_DOWN, bfd.BFD_DIAG_CTRL_DETECT_TIME_EXPIRED) if getattr(self, "_auth_seq_known", 0): self._auth_seq_known = 0 def _update_xmit_period(self): if self._desired_min_tx_interval > self._remote_min_rx_interval: xmit_period = self._desired_min_tx_interval else: xmit_period = self._remote_min_rx_interval if self._detect_mult == 1: xmit_period *= random.randint(75, 90) / 100.0 else: xmit_period *= random.randint(75, 100) / 100.0 self._xmit_period = xmit_period / 1000000.0 LOG.info("[BFD][%s][XMIT] Transmission period changed to %f", hex(self._local_discr), self._xmit_period) def _send_loop(self): while self._enable_send: hub.sleep(self._xmit_period) if self._remote_discr == 0 and not self._active_role: continue if self._remote_min_rx_interval == 0: continue if self._remote_demand_mode and self._session_state == bfd.BFD_STATE_UP and self._remote_session_state == bfd.BFD_STATE_UP and not self._is_polling: continue self._send() def _send(self): if self.datapath is None: return flags = 0 if self._pending_final: flags |= bfd.BFD_FLAG_FINAL self._pending_final = False self._is_polling = False if self._is_polling: flags |= bfd.BFD_FLAG_POLL auth_cls = None if self._auth_type: auth_key_id = list(self._auth_keys.keys())[ random.randint(0, len(list(self._auth_keys.keys())) - 1)] auth_key = self._auth_keys[auth_key_id] if self._auth_type == bfd.BFD_AUTH_SIMPLE_PASS: auth_cls = bfd.SimplePassword(auth_key_id=auth_key_id, password=auth_key) if self._auth_type in [bfd.BFD_AUTH_KEYED_MD5, bfd.BFD_AUTH_METICULOUS_KEYED_MD5, bfd.BFD_AUTH_KEYED_SHA1, bfd.BFD_AUTH_METICULOUS_KEYED_SHA1]: if self._auth_type in [bfd.BFD_AUTH_KEYED_MD5, bfd.BFD_AUTH_KEYED_SHA1]: if random.randint(0, 1): self._xmit_auth_seq = (self._xmit_auth_seq + 1) & UINT32_MAX else: self._xmit_auth_seq = (self._xmit_auth_seq + 1) & UINT32_MAX auth_cls = bfd.bfd._auth_parsers[self._auth_type]( auth_key_id=auth_key_id, seq=self._xmit_auth_seq, auth_key=auth_key) if auth_cls is not None: flags |= bfd.BFD_FLAG_AUTH_PRESENT if self._demand_mode and self._session_state == bfd.BFD_STATE_UP and self._remote_session_state == bfd.BFD_STATE_UP: flags |= bfd.BFD_FLAG_DEMAND diag = self._local_diag state = self._session_state detect_mult = self._detect_mult my_discr = self._local_discr your_discr = self._remote_discr desired_min_tx_interval = self._desired_min_tx_interval required_min_rx_interval = self._required_min_rx_interval required_min_echo_rx_interval = self._cfg_required_min_echo_rx_interval src_mac = self.src_mac dst_mac = self.dst_mac src_ip = self.src_ip dst_ip = self.dst_ip self.ipv4_id = (self.ipv4_id + 1) & UINT16_MAX ipv4_id = self.ipv4_id src_port = self.src_port dst_port = self.dst_port data = BFDPacket.bfd_packet( src_mac=src_mac, dst_mac=dst_mac, src_ip=src_ip, dst_ip=dst_ip, ipv4_id=ipv4_id, src_port=src_port, dst_port=dst_port, diag=diag, state=state, flags=flags, detect_mult=detect_mult, my_discr=my_discr, your_discr=your_discr, desired_min_tx_interval=desired_min_tx_interval, required_min_rx_interval=required_min_rx_interval, required_min_echo_rx_interval=required_min_echo_rx_interval, auth_cls=auth_cls) datapath = self.datapath ofproto = datapath.ofproto parser = datapath.ofproto_parser actions = [parser.OFPActionOutput(self.ofport)] out = parser.OFPPacketOut(datapath=datapath, buffer_id=ofproto.OFP_NO_BUFFER, in_port=ofproto.OFPP_CONTROLLER, actions=actions, data=data) datapath.send_msg(out) LOG.debug("[BFD][%s][SEND] BFD Control sent.", hex(self._local_discr)) class BFDPacket(object): class BFDUnknownFormat(RyuException): message = '%(msg)s' @staticmethod def bfd_packet(src_mac, dst_mac, src_ip, dst_ip, ipv4_id, src_port, dst_port, diag=0, state=0, flags=0, detect_mult=0, my_discr=0, your_discr=0, desired_min_tx_interval=0, required_min_rx_interval=0, required_min_echo_rx_interval=0, auth_cls=None): pkt = packet.Packet() eth_pkt = ethernet.ethernet(dst_mac, src_mac, ETH_TYPE_IP) pkt.add_protocol(eth_pkt) ipv4_pkt = ipv4.ipv4(proto=inet.IPPROTO_UDP, src=src_ip, dst=dst_ip, tos=192, identification=ipv4_id, ttl=255) pkt.add_protocol(ipv4_pkt) udp_pkt = udp.udp(src_port=src_port, dst_port=dst_port) pkt.add_protocol(udp_pkt) bfd_pkt = bfd.bfd( ver=1, diag=diag, state=state, flags=flags, detect_mult=detect_mult, my_discr=my_discr, your_discr=your_discr, desired_min_tx_interval=desired_min_tx_interval, required_min_rx_interval=required_min_rx_interval, required_min_echo_rx_interval=required_min_echo_rx_interval, auth_cls=auth_cls) pkt.add_protocol(bfd_pkt) pkt.serialize() return pkt.data @staticmethod def bfd_parse(data): pkt = packet.Packet(data) i = iter(pkt) eth_pkt = next(i) assert isinstance(eth_pkt, ethernet.ethernet) ipv4_pkt = next(i) assert isinstance(ipv4_pkt, ipv4.ipv4) udp_pkt = next(i) assert isinstance(udp_pkt, udp.udp) udp_payload = next(i) return bfd.bfd.parser(udp_payload)[0] class ARPPacket(object): class ARPUnknownFormat(RyuException): message = '%(msg)s' @staticmethod
Apache License 2.0
hewlettpackard/python-ilorest-library-old
src/redfish/ris/rmc_helper.py
RmcClient.get_cache_dirname
python
def get_cache_dirname(self): parts = urlparse2.urlparse(self.get_base_url()) pathstr = '%s/%s' % (parts.netloc, parts.path) return pathstr.replace('//', '/')
The rest client's current base URL converted to path
https://github.com/hewlettpackard/python-ilorest-library-old/blob/b00fd417024485a77c4f71f913135831d674a177/src/redfish/ris/rmc_helper.py#L224-L228
import os import json import errno import logging import hashlib import urlparse2 import redfish.rest from .ris import (RisMonolith) from .sharedtypes import (JSONEncoder) from .config import (AutoConfigParser) LOGGER = logging.getLogger(__name__) class RdmcError(Exception): errcode = 1 def __init__(self, message): Exception.__init__(self, message) class InvalidCommandLineError(RdmcError): pass class FailureDuringCommitError(RdmcError): pass class UserNotAdminError(RdmcError): pass class UndefinedClientError(Exception): pass class InstanceNotFoundError(Exception): pass class CurrentlyLoggedInError(Exception): pass class NothingSelectedError(Exception): pass class NothingSelectedFilterError(Exception): pass class NothingSelectedSetError(Exception): pass class InvalidSelectionError(Exception): pass class IdTokenError(Exception): pass class SessionExpired(Exception): pass class ValueChangedError(Exception): pass class LoadSkipSettingError(Exception): pass class InvalidPathError(Exception): pass class UnableToObtainIloVersionError(Exception): pass class ValidationError(Exception): def __init__(self, errlist): super(ValidationError, self).__init__(errlist) self._errlist = errlist def get_errors(self): return self._errlist class IloResponseError(Exception): pass class RmcClient(object): def __init__(self, url=None, username=None, password=None, sessionkey=None, typepath=None, biospassword=None, is_redfish=False): if is_redfish: self._rest_client = redfish.rest.v1.redfish_client(base_url=url, username=username, password=password, sessionkey=sessionkey, biospassword=biospassword, is_redfish=is_redfish) else: self._rest_client = redfish.rest.v1.rest_client(base_url=url, username=username, password=password, sessionkey=sessionkey, biospassword=biospassword, is_redfish=is_redfish) self.typepath = typepath self._get_cache = dict() self._monolith = RisMonolith(self) self._selector = None self._filter_attr = None self._filter_value = None def get_username(self): return self._rest_client.get_username() def set_username(self, username): self._rest_client.set_username(username) def get_password(self): return self._rest_client.get_password() def set_password(self, password): self._rest_client.set_password(password) def get_bios_password(self): return self._rest_client.get_biospassword() def set_bios_password(self, biospasswordword): self._rest_client.set_biospassword(biospasswordword) bios_password = property(get_bios_password, set_bios_password) def get_session_key(self): return self._rest_client.get_session_key() def get_session_location(self): return self._rest_client.get_session_location() def get_authorization_key(self): return self._rest_client.get_authorization_key() def get_base_url(self): return self._rest_client.get_base_url() base_url = property(get_base_url, None)
Apache License 2.0
sohamtriveous/dbdump
dbdump.py
del_folder
python
def del_folder(devicename, path): cmd = add_adb_device(devicename) cmd = cmd + 'shell rm -r ' new_cmd = cmd + path (status, output) = commands.getstatusoutput(new_cmd) if status: print 'Could not delete', path, sys.stderr return False else: return True
delete files at a particular path :param devicename: device :param path: path to be deleted :return:
https://github.com/sohamtriveous/dbdump/blob/f800723c40285df8dd22203990d902c4105ace7d/dbdump.py#L74-L91
__author__ = 'sohammondal' import sys import commands import re def exec_cmd(cmd): (status, output) = commands.getstatusoutput(cmd) if status: print 'Could not execute', cmd, sys.stderr return False return True def find_all(pat, string): matches = re.findall(pat, string) if matches: return matches else: return None def find(pat, string): match = re.search(pat, string) if match: return match.group(1) else: return None def find_basic(pat, string): match = re.search(pat, string) if match: return match else: return None def add_adb_device(devicename=None): cmd = 'adb ' if devicename: cmd = cmd + '-s ' + devicename + ' ' return cmd
Apache License 2.0
microsoft/dowhy
dowhy/causal_estimator.py
CausalEstimator._generate_bootstrap_estimates
python
def _generate_bootstrap_estimates(self, num_bootstrap_simulations, sample_size_fraction): simulation_results = np.zeros(num_bootstrap_simulations) sample_size = int(sample_size_fraction * len(self._data)) if sample_size > len(self._data): self.logger.warning("WARN: The sample size is greater than the data being sampled") self.logger.info("INFO: The sample size: {}".format(sample_size)) self.logger.info("INFO: The number of simulations: {}".format(num_bootstrap_simulations)) for index in range(num_bootstrap_simulations): new_data = resample(self._data, n_samples=sample_size) new_estimator = type(self)( new_data, self._target_estimand, self._target_estimand.treatment_variable, self._target_estimand.outcome_variable, treatment_value=self._treatment_value, control_value=self._control_value, test_significance=False, evaluate_effect_strength=False, confidence_intervals=False, target_units=self._target_units, effect_modifiers=self._effect_modifier_names, params=self.method_params ) new_effect = new_estimator.estimate_effect() simulation_results[index] = new_effect.value estimates = CausalEstimator.BootstrapEstimates(simulation_results, {'num_simulations': num_bootstrap_simulations, 'sample_size_fraction': sample_size_fraction}) return estimates
Helper function to generate causal estimates over bootstrapped samples. :param num_bootstrap_simulations: Number of simulations for the bootstrap method. :param sample_size_fraction: Fraction of the dataset to be resampled. :returns: A collections.namedtuple containing a list of bootstrapped estimates and a dictionary containing parameters used for the bootstrap.
https://github.com/microsoft/dowhy/blob/9c1371efc580fde142cd2017bf7789e1a8e53814/dowhy/causal_estimator.py#L269-L313
import logging import numpy as np import pandas as pd import sympy as sp from collections import namedtuple from sklearn.utils import resample import dowhy.interpreters as interpreters from dowhy.utils.api import parse_state class CausalEstimator: DEFAULT_NUMBER_OF_SIMULATIONS_STAT_TEST = 1000 DEFAULT_NUMBER_OF_SIMULATIONS_CI = 100 DEFAULT_SAMPLE_SIZE_FRACTION = 1 DEFAULT_CONFIDENCE_LEVEL = 0.95 NUM_QUANTILES_TO_DISCRETIZE_CONT_COLS = 5 TEMP_CAT_COLUMN_PREFIX = "__categorical__" DEFAULT_NOTIMPLEMENTEDERROR_MSG = "not yet implemented for {0}. If you would this to be implemented in the next version, please raise an issue at https://github.com/microsoft/dowhy/issues" BootstrapEstimates = namedtuple('BootstrapEstimates', ['estimates', 'params']) DEFAULT_INTERPRET_METHOD = ["textual_effect_interpreter"] def __init__(self, data, identified_estimand, treatment, outcome, control_value=0, treatment_value=1, test_significance=False, evaluate_effect_strength=False, confidence_intervals=False, target_units=None, effect_modifiers=None, params=None): self._data = data self._target_estimand = identified_estimand self._treatment_name = treatment self._outcome_name = outcome[0] self._control_value = control_value self._treatment_value = treatment_value self._significance_test = test_significance self._effect_strength_eval = evaluate_effect_strength self._target_units = target_units self._effect_modifier_names = effect_modifiers self._confidence_intervals = confidence_intervals self._bootstrap_estimates = None self._bootstrap_null_estimates = None self._effect_modifiers = None self.method_params = params self.interpret_method = CausalEstimator.DEFAULT_INTERPRET_METHOD if params is not None: for key, value in params.items(): setattr(self, key, value) self.logger = logging.getLogger(__name__) if not hasattr(self, 'num_null_simulations'): self.num_null_simulations = CausalEstimator.DEFAULT_NUMBER_OF_SIMULATIONS_STAT_TEST if not hasattr(self, 'num_simulations'): self.num_simulations = CausalEstimator.DEFAULT_NUMBER_OF_SIMULATIONS_CI if not hasattr(self, 'sample_size_fraction'): self.sample_size_fraction = CausalEstimator.DEFAULT_SAMPLE_SIZE_FRACTION if not hasattr(self, 'confidence_level'): self.confidence_level = CausalEstimator.DEFAULT_CONFIDENCE_LEVEL if not hasattr(self, 'num_quantiles_to_discretize_cont_cols'): self.num_quantiles_to_discretize_cont_cols = CausalEstimator.NUM_QUANTILES_TO_DISCRETIZE_CONT_COLS if not hasattr(self, 'need_conditional_estimates'): self.need_conditional_estimates = bool(self._effect_modifier_names) if self._data is not None: self._treatment = self._data[self._treatment_name] self._outcome = self._data[self._outcome_name] if self._effect_modifier_names: self._effect_modifiers = self._data[self._effect_modifier_names] self._effect_modifiers = pd.get_dummies(self._effect_modifiers, drop_first=True) self.logger.debug("Effect modifiers: " + ",".join(self._effect_modifier_names)) @staticmethod def get_estimator_object(new_data, identified_estimand, estimate): estimator_class = estimate.params['estimator_class'] new_estimator = estimator_class( new_data, identified_estimand, identified_estimand.treatment_variable, identified_estimand.outcome_variable, control_value=estimate.control_value, treatment_value=estimate.treatment_value, test_significance=False, evaluate_effect_strength=False, confidence_intervals=estimate.params["confidence_intervals"], target_units=estimate.params["target_units"], effect_modifiers=estimate.params["effect_modifiers"], params=estimate.params["method_params"] ) return new_estimator def _estimate_effect(self): raise NotImplementedError( ("Main estimation method is " + CausalEstimator.DEFAULT_NOTIMPLEMENTEDERROR_MSG).format(self.__class__)) def estimate_effect(self): est = self._estimate_effect() est.add_estimator(self) if self._significance_test: self.test_significance(est.value, method=self._significance_test) if self._confidence_intervals: self.estimate_confidence_intervals(est.value, confidence_level=self.confidence_level, method=self._confidence_intervals) if self._effect_strength_eval: effect_strength_dict = self.evaluate_effect_strength(est) est.add_effect_strength(effect_strength_dict) return est def estimate_effect_naive(self): df_withtreatment = self._data.loc[self._data[self._treatment_name] == 1] df_notreatment = self._data.loc[self._data[self._treatment_name] == 0] est = np.mean(df_withtreatment[self._outcome_name]) - np.mean(df_notreatment[self._outcome_name]) return CausalEstimate(est, None, None, control_value=0, treatment_value=1) def _estimate_effect_fn(self, data_df): raise NotImplementedError( ("Conditional treatment effects are " + CausalEstimator.DEFAULT_NOTIMPLEMENTEDERROR_MSG).format( self.__class__)) def _estimate_conditional_effects(self, estimate_effect_fn, effect_modifier_names=None, num_quantiles=None): if effect_modifier_names is None: effect_modifier_names = self._effect_modifier_names if num_quantiles is None: num_quantiles = self.num_quantiles_to_discretize_cont_cols if not effect_modifier_names: raise ValueError("At least one effect modifier should be specified to compute conditional effects.") effect_modifier_names = parse_state(effect_modifier_names) if not all(em in self._effect_modifier_names for em in effect_modifier_names): self.logger.warn( "At least one of the provided effect modifiers was not included while fitting the estimator. You may get incorrect results. To resolve, fit the estimator again by providing the updated effect modifiers in estimate_effect().") effect_modifier_names = effect_modifier_names.copy() prefix = CausalEstimator.TEMP_CAT_COLUMN_PREFIX for i in range(len(effect_modifier_names)): em = effect_modifier_names[i] if pd.api.types.is_numeric_dtype(self._data[em].dtypes): self._data[prefix + str(em)] = pd.qcut(self._data[em], num_quantiles, duplicates="drop") effect_modifier_names[i] = prefix + str(em) by_effect_mods = self._data.groupby(effect_modifier_names) cond_est_fn = lambda x: self._do(self._treatment_value, x) - self._do(self._control_value, x) conditional_estimates = by_effect_mods.apply(estimate_effect_fn) for em in effect_modifier_names: if em.startswith(prefix): self._data.pop(em) return conditional_estimates def _do(self, x, data_df=None): raise NotImplementedError( ("Do-operator is " + CausalEstimator.DEFAULT_NOTIMPLEMENTEDERROR_MSG).format(self.__class__)) def do(self, x, data_df=None): est = self._do(x, data_df) return est def construct_symbolic_estimator(self, estimand): raise NotImplementedError(("Symbolic estimator string is ").format(self.__class__))
MIT License
onshape-public/onshape-clients
python/onshape_client/oas/models/btp_statement_loop_for_in279.py
BTPStatementLoopForIn279.__init__
python
def __init__( self, _check_type=True, _from_server=False, _path_to_item=(), _configuration=None, **kwargs ): self._data_store = {} self._check_type = _check_type self._from_server = _from_server self._path_to_item = _path_to_item self._configuration = _configuration constant_args = { "_check_type": _check_type, "_path_to_item": _path_to_item, "_from_server": _from_server, "_configuration": _configuration, } required_args = {} required_arg_names = list(required_args.keys()) for required_arg_name in required_arg_names: if required_args[required_arg_name] is nulltype.Null: del required_args[required_arg_name] model_args = {} model_args.update(required_args) model_args.update(kwargs) composed_info = validate_get_composed_info(constant_args, model_args, self) self._composed_instances = composed_info[0] self._var_name_to_model_instances = composed_info[1] self._additional_properties_model_instances = composed_info[2] unused_args = composed_info[3] for var_name, var_value in required_args.items(): setattr(self, var_name, var_value) for var_name, var_value in six.iteritems(kwargs): if ( var_name in unused_args and self._configuration is not None and self._configuration.discard_unknown_keys and not self._additional_properties_model_instances ): continue setattr(self, var_name, var_value)
btp_statement_loop_for_in279.BTPStatementLoopForIn279 - a model defined in OpenAPI Keyword Args: _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _from_server (bool): True if the data is from the server False if the data is from the client (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. bt_type (str): [optional] # noqa: E501 container (btp_expression9.BTPExpression9): [optional] # noqa: E501 is_var_declared_here (bool): [optional] # noqa: E501 name (btp_identifier8.BTPIdentifier8): [optional] # noqa: E501 space_before_var (btp_space10.BTPSpace10): [optional] # noqa: E501 standard_type (str): [optional] # noqa: E501 type_name (str): [optional] # noqa: E501 var (btp_identifier8.BTPIdentifier8): [optional] # noqa: E501 atomic (bool): [optional] # noqa: E501 documentation_type (str): [optional] # noqa: E501 end_source_location (int): [optional] # noqa: E501 node_id (str): [optional] # noqa: E501 short_descriptor (str): [optional] # noqa: E501 space_after (btp_space10.BTPSpace10): [optional] # noqa: E501 space_before (btp_space10.BTPSpace10): [optional] # noqa: E501 space_default (bool): [optional] # noqa: E501 start_source_location (int): [optional] # noqa: E501 annotation (btp_annotation231.BTPAnnotation231): [optional] # noqa: E501 body (btp_statement269.BTPStatement269): [optional] # noqa: E501 space_after_loop_type (btp_space10.BTPSpace10): [optional] # noqa: E501
https://github.com/onshape-public/onshape-clients/blob/20843a00c628e516e7219e17a23ec4ef2bf9f16f/python/onshape_client/oas/models/btp_statement_loop_for_in279.py#L195-L279
from __future__ import absolute_import import re import sys import six import nulltype from onshape_client.oas.model_utils import ( ModelComposed, ModelNormal, ModelSimple, date, datetime, file_type, int, none_type, str, validate_get_composed_info, ) try: from onshape_client.oas.models import btp_annotation231 except ImportError: btp_annotation231 = sys.modules["onshape_client.oas.models.btp_annotation231"] try: from onshape_client.oas.models import btp_expression9 except ImportError: btp_expression9 = sys.modules["onshape_client.oas.models.btp_expression9"] try: from onshape_client.oas.models import btp_identifier8 except ImportError: btp_identifier8 = sys.modules["onshape_client.oas.models.btp_identifier8"] try: from onshape_client.oas.models import btp_space10 except ImportError: btp_space10 = sys.modules["onshape_client.oas.models.btp_space10"] try: from onshape_client.oas.models import btp_statement269 except ImportError: btp_statement269 = sys.modules["onshape_client.oas.models.btp_statement269"] try: from onshape_client.oas.models import btp_statement_loop277 except ImportError: btp_statement_loop277 = sys.modules[ "onshape_client.oas.models.btp_statement_loop277" ] try: from onshape_client.oas.models import btp_statement_loop_for_in279_all_of except ImportError: btp_statement_loop_for_in279_all_of = sys.modules[ "onshape_client.oas.models.btp_statement_loop_for_in279_all_of" ] class BTPStatementLoopForIn279(ModelComposed): allowed_values = { ("standard_type",): { "UNDEFINED": "UNDEFINED", "BOOLEAN": "BOOLEAN", "NUMBER": "NUMBER", "STRING": "STRING", "ARRAY": "ARRAY", "MAP": "MAP", "BOX": "BOX", "BUILTIN": "BUILTIN", "FUNCTION": "FUNCTION", "UNKNOWN": "UNKNOWN", }, ("documentation_type",): { "FUNCTION": "FUNCTION", "PREDICATE": "PREDICATE", "CONSTANT": "CONSTANT", "ENUM": "ENUM", "USER_TYPE": "USER_TYPE", "FEATURE_DEFINITION": "FEATURE_DEFINITION", "FILE_HEADER": "FILE_HEADER", "UNDOCUMENTABLE": "UNDOCUMENTABLE", "UNKNOWN": "UNKNOWN", }, } validations = {} additional_properties_type = None @staticmethod def openapi_types(): return { "bt_type": (str,), "container": (btp_expression9.BTPExpression9,), "is_var_declared_here": (bool,), "name": (btp_identifier8.BTPIdentifier8,), "space_before_var": (btp_space10.BTPSpace10,), "standard_type": (str,), "type_name": (str,), "var": (btp_identifier8.BTPIdentifier8,), "atomic": (bool,), "documentation_type": (str,), "end_source_location": (int,), "node_id": (str,), "short_descriptor": (str,), "space_after": (btp_space10.BTPSpace10,), "space_before": (btp_space10.BTPSpace10,), "space_default": (bool,), "start_source_location": (int,), "annotation": (btp_annotation231.BTPAnnotation231,), "body": (btp_statement269.BTPStatement269,), "space_after_loop_type": (btp_space10.BTPSpace10,), } @staticmethod def discriminator(): return None attribute_map = { "bt_type": "btType", "container": "container", "is_var_declared_here": "isVarDeclaredHere", "name": "name", "space_before_var": "spaceBeforeVar", "standard_type": "standardType", "type_name": "typeName", "var": "var", "atomic": "atomic", "documentation_type": "documentationType", "end_source_location": "endSourceLocation", "node_id": "nodeId", "short_descriptor": "shortDescriptor", "space_after": "spaceAfter", "space_before": "spaceBefore", "space_default": "spaceDefault", "start_source_location": "startSourceLocation", "annotation": "annotation", "body": "body", "space_after_loop_type": "spaceAfterLoopType", } required_properties = set( [ "_data_store", "_check_type", "_from_server", "_path_to_item", "_configuration", "_composed_instances", "_var_name_to_model_instances", "_additional_properties_model_instances", ] )
MIT License
pelioniot/mbed-cloud-sdk-python
src/mbed_cloud/_backends/iam/models/account_info.py
AccountInfo.idle_timeout
python
def idle_timeout(self): return self._idle_timeout
Gets the idle_timeout of this AccountInfo. The reference token expiration time in minutes for this account. :return: The idle_timeout of this AccountInfo. :rtype: str
https://github.com/pelioniot/mbed-cloud-sdk-python/blob/71dc67fc2a8d1aff31e35ec781fb328e6a60639c/src/mbed_cloud/_backends/iam/models/account_info.py#L557-L565
from pprint import pformat from six import iteritems import re class AccountInfo(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'address_line1': 'str', 'address_line2': 'str', 'aliases': 'list[str]', 'city': 'str', 'company': 'str', 'contact': 'str', 'contract_number': 'str', 'country': 'str', 'created_at': 'datetime', 'custom_fields': 'dict(str, str)', 'customer_number': 'str', 'display_name': 'str', 'email': 'str', 'end_market': 'str', 'etag': 'str', 'expiration_warning_threshold': 'str', 'id': 'str', 'idle_timeout': 'str', 'limits': 'dict(str, str)', 'mfa_status': 'str', 'notification_emails': 'list[str]', 'object': 'str', 'parent_id': 'str', 'password_policy': 'PasswordPolicy', 'phone_number': 'str', 'policies': 'list[FeaturePolicy]', 'postal_code': 'str', 'reason': 'str', 'reference_note': 'str', 'sales_contact': 'str', 'state': 'str', 'status': 'str', 'sub_accounts': 'list[AccountInfo]', 'template_id': 'str', 'tier': 'str', 'updated_at': 'datetime', 'upgraded_at': 'datetime' } attribute_map = { 'address_line1': 'address_line1', 'address_line2': 'address_line2', 'aliases': 'aliases', 'city': 'city', 'company': 'company', 'contact': 'contact', 'contract_number': 'contract_number', 'country': 'country', 'created_at': 'created_at', 'custom_fields': 'custom_fields', 'customer_number': 'customer_number', 'display_name': 'display_name', 'email': 'email', 'end_market': 'end_market', 'etag': 'etag', 'expiration_warning_threshold': 'expiration_warning_threshold', 'id': 'id', 'idle_timeout': 'idle_timeout', 'limits': 'limits', 'mfa_status': 'mfa_status', 'notification_emails': 'notification_emails', 'object': 'object', 'parent_id': 'parent_id', 'password_policy': 'password_policy', 'phone_number': 'phone_number', 'policies': 'policies', 'postal_code': 'postal_code', 'reason': 'reason', 'reference_note': 'reference_note', 'sales_contact': 'sales_contact', 'state': 'state', 'status': 'status', 'sub_accounts': 'sub_accounts', 'template_id': 'template_id', 'tier': 'tier', 'updated_at': 'updated_at', 'upgraded_at': 'upgraded_at' } def __init__(self, address_line1=None, address_line2=None, aliases=None, city=None, company=None, contact=None, contract_number=None, country=None, created_at=None, custom_fields=None, customer_number=None, display_name=None, email=None, end_market=None, etag=None, expiration_warning_threshold=None, id=None, idle_timeout=None, limits=None, mfa_status=None, notification_emails=None, object=None, parent_id=None, password_policy=None, phone_number=None, policies=None, postal_code=None, reason=None, reference_note=None, sales_contact=None, state=None, status=None, sub_accounts=None, template_id=None, tier=None, updated_at=None, upgraded_at=None): self._address_line1 = address_line1 self._address_line2 = address_line2 self._aliases = aliases self._city = city self._company = company self._contact = contact self._contract_number = contract_number self._country = country self._created_at = created_at self._custom_fields = custom_fields self._customer_number = customer_number self._display_name = display_name self._email = email self._end_market = end_market self._etag = etag self._expiration_warning_threshold = expiration_warning_threshold self._id = id self._idle_timeout = idle_timeout self._limits = limits self._mfa_status = mfa_status self._notification_emails = notification_emails self._object = object self._parent_id = parent_id self._password_policy = password_policy self._phone_number = phone_number self._policies = policies self._postal_code = postal_code self._reason = reason self._reference_note = reference_note self._sales_contact = sales_contact self._state = state self._status = status self._sub_accounts = sub_accounts self._template_id = template_id self._tier = tier self._updated_at = updated_at self._upgraded_at = upgraded_at self.discriminator = None @property def address_line1(self): return self._address_line1 @address_line1.setter def address_line1(self, address_line1): self._address_line1 = address_line1 @property def address_line2(self): return self._address_line2 @address_line2.setter def address_line2(self, address_line2): self._address_line2 = address_line2 @property def aliases(self): return self._aliases @aliases.setter def aliases(self, aliases): if aliases is None: raise ValueError("Invalid value for `aliases`, must not be `None`") self._aliases = aliases @property def city(self): return self._city @city.setter def city(self, city): self._city = city @property def company(self): return self._company @company.setter def company(self, company): self._company = company @property def contact(self): return self._contact @contact.setter def contact(self, contact): self._contact = contact @property def contract_number(self): return self._contract_number @contract_number.setter def contract_number(self, contract_number): self._contract_number = contract_number @property def country(self): return self._country @country.setter def country(self, country): self._country = country @property def created_at(self): return self._created_at @created_at.setter def created_at(self, created_at): self._created_at = created_at @property def custom_fields(self): return self._custom_fields @custom_fields.setter def custom_fields(self, custom_fields): self._custom_fields = custom_fields @property def customer_number(self): return self._customer_number @customer_number.setter def customer_number(self, customer_number): self._customer_number = customer_number @property def display_name(self): return self._display_name @display_name.setter def display_name(self, display_name): self._display_name = display_name @property def email(self): return self._email @email.setter def email(self, email): self._email = email @property def end_market(self): return self._end_market @end_market.setter def end_market(self, end_market): if end_market is None: raise ValueError("Invalid value for `end_market`, must not be `None`") self._end_market = end_market @property def etag(self): return self._etag @etag.setter def etag(self, etag): if etag is None: raise ValueError("Invalid value for `etag`, must not be `None`") self._etag = etag @property def expiration_warning_threshold(self): return self._expiration_warning_threshold @expiration_warning_threshold.setter def expiration_warning_threshold(self, expiration_warning_threshold): self._expiration_warning_threshold = expiration_warning_threshold @property def id(self): return self._id @id.setter def id(self, id): if id is None: raise ValueError("Invalid value for `id`, must not be `None`") self._id = id @property
Apache License 2.0
paulgilmartin/graph_wrap
graph_wrap/tastypie/api_transformer.py
field_transformer
python
def field_transformer(tastypie_field): try: transformer_class = FieldTransformerMeta.registry[ (tastypie_field.dehydrated_type, tastypie_field.is_m2m)] except KeyError: raise KeyError('Dehydrated type not recognized') return transformer_class(tastypie_field)
Instantiate the appropriate FieldTransformer class. This acts as a factory-type function, which, given a tastypie field as input, instantiates the appropriate concrete FieldTransformer class for that field.
https://github.com/paulgilmartin/graph_wrap/blob/7fff829dc7d2818c57de00d3055fc6c50fae7484/graph_wrap/tastypie/api_transformer.py#L42-L54
from __future__ import unicode_literals import json from abc import abstractmethod from decimal import Decimal as _Decimal import six from graphene import ( String, Int, Float, Boolean, Decimal, List, Field, Scalar, ObjectType, ) from graphene.types.generic import GenericScalar from graph_wrap.shared.query_resolver import JSONResolver def transform_api(tastypie_resource): class_attrs = dict() graphene_type_name = tastypie_resource._meta.resource_name + '_type' for field_name, field in tastypie_resource.fields.items(): transformer = field_transformer(field) class_attrs[field_name] = transformer.graphene_field() resolver_method_name = 'resolve_{}'.format(field_name) class_attrs[resolver_method_name] = ( transformer.graphene_field_resolver_method()) graphene_type = type( str(graphene_type_name), (ObjectType,), class_attrs, ) return graphene_type
MIT License
ox-it/humfrey
humfrey/streaming/base.py
StreamingParser.get
python
def get(self): if self._cached_get is None: sparql_results_type = self.get_sparql_results_type() if sparql_results_type == 'resultset': self._cached_get = SparqlResultList(self.get_fields(), self.get_bindings()) elif sparql_results_type == 'boolean': self._cached_get = self.get_boolean() elif sparql_results_type == 'graph': graph = rdflib.ConjunctiveGraph() for prefix, namespace_uri in NS.iteritems(): graph.namespace_manager.bind(prefix, namespace_uri) graph += self.get_triples() self._cached_get = graph else: raise AssertionError("Unexpected results type: {0}".format(sparql_results_type)) for name in ('query', 'duration'): if hasattr(self, name): setattr(self._cached_get, name, getattr(self, name)) return self._cached_get
Returns an in-memory object representing the stream. You will either get a SparqlResultsList, a bool, or a ConjunctiveGraph.
https://github.com/ox-it/humfrey/blob/c92e46a24a9bf28aa9638a612f166d209315e76b/humfrey/streaming/base.py#L109-L132
import abc import types import rdflib from humfrey.sparql.results import SparqlResultList from humfrey.utils.namespaces import NS from humfrey.utils.statsd import statsd class ModeError(Exception): pass class StreamingParser(object): __metaclass__ = abc.ABCMeta def __init__(self, stream, encoding='utf-8'): self._stream, self._encoding = stream, encoding self._mode, self._cached_get = None, None @property def mode(self): return self._mode @mode.setter def mode(self, mode): if self._mode == mode: return elif self._mode is not None: raise ModeError() else: self._mode = mode @abc.abstractproperty def media_type(self): @abc.abstractproperty def format_type(self): @abc.abstractmethod def get_sparql_results_type(self): @abc.abstractmethod def get_fields(self): @abc.abstractmethod def get_bindings(self): @abc.abstractmethod def get_boolean(self): @abc.abstractmethod def get_triples(self): def read(self, num=None): self.mode = 'stream' return self._stream.read(num) def readline(self): self.mode = 'stream' return self._stream.readline() def __iter__(self): self.mode = 'stream' return iter(self._stream)
BSD 3-Clause New or Revised License
retr0h/gilt
gilt/shell.py
overlay
python
def overlay(ctx): args = ctx.obj.get("args") filename = args.get("config") debug = args.get("debug") _setup(filename) for c in config.config(filename): with fasteners.InterProcessLock(c.lock_file): util.print_info("{}:".format(c.name)) if not os.path.exists(c.src): git.clone(c.name, c.git, c.src, debug=debug) if c.dst: git.extract(c.src, c.dst, c.version, debug=debug) post_commands = {c.dst: c.post_commands} else: git.overlay(c.src, c.files, c.version, debug=debug) post_commands = { conf.dst: conf.post_commands for conf in c.files } for dst, commands in post_commands.items(): for command in commands: msg = " - running `{}` in {}".format(command, dst) util.print_info(msg) cmd = util.build_sh_cmd(command, cwd=dst) util.run_command(cmd, debug=debug)
Install gilt dependencies
https://github.com/retr0h/gilt/blob/afb6e22f74d2c8a4d064a684964dc5fba0414f7a/gilt/shell.py#L80-L106
import os import click import click_completion import fasteners import gilt from gilt import config from gilt import git from gilt import util click_completion.init() class NotFoundError(Exception): pass @click.group() @click.option( "--config", default="gilt.yml", help="Path to config file. Default gilt.yml", type=click.File("r"), ) @click.option( "--debug/--no-debug", default=False, help="Enable or disable debug mode. Default is disabled.", ) @click.version_option(version=gilt.__version__) @click.pass_context def main(ctx, config, debug): ctx.obj = {} ctx.obj["args"] = {} ctx.obj["args"]["debug"] = debug ctx.obj["args"]["config"] = config.name @click.command() @click.pass_context
MIT License
kuri65536/python-for-android
python-modules/twisted/twisted/lore/tree.py
addMtime
python
def addMtime(document, fullpath): for node in domhelpers.findElementsWithAttribute(document, "class","mtime"): txt = dom.Text() txt.data = time.ctime(os.path.getmtime(fullpath)) node.appendChild(txt)
Set the last modified time of the given document. @type document: A DOM Node or Document @param document: The output template which defines the presentation of the last modified time. @type fullpath: C{str} @param fullpath: The file name from which to take the last modified time. @return: C{None}
https://github.com/kuri65536/python-for-android/blob/26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891/python-modules/twisted/twisted/lore/tree.py#L63-L79
from itertools import count import re, os, cStringIO, time, cgi, string, urlparse from xml.dom import minidom as dom from xml.sax.handler import ErrorHandler, feature_validation from xml.dom.pulldom import SAX2DOM from xml.sax import make_parser from xml.sax.xmlreader import InputSource from twisted.python import htmlizer, text from twisted.python.filepath import FilePath from twisted.python.deprecate import deprecated from twisted.python.versions import Version from twisted.web import domhelpers import process, latex, indexer, numberer, htmlbook def fixLinks(document, ext): supported_schemes=['http', 'https', 'ftp', 'mailto'] for node in domhelpers.findElementsWithAttribute(document, 'href'): href = node.getAttribute("href") if urlparse.urlparse(href)[0] in supported_schemes: continue if node.getAttribute("class") == "absolute": continue if node.getAttribute("class").find('listing') != -1: continue if href.endswith('html') or href[:href.rfind('#')].endswith('html'): fname, fext = os.path.splitext(href) if '#' in fext: fext = ext+'#'+fext.split('#', 1)[1] else: fext = ext node.setAttribute("href", fname + fext)
Apache License 2.0
autofitcloud/isitfit
isitfit/tags/tagsCsvDiff.py
TagsCsvDiff.droppedTags
python
def droppedTags(self): if len(self.old_minus_new)==0: return logger.info("There are %i deleted tag(s)"%len(self.old_minus_new)) logger.info("") old_processed = set() for ni in self.old_minus_new: confirm_msg = colored('Did you completely delete the tag "%s"? yes/[no] '%ni, 'cyan') confirm_del = input(confirm_msg) if confirm_del.lower() in ['y', 'yes']: mig_i = ('rm', ni, None) self.migrations.append(mig_i) old_processed.add(ni) self.old_minus_new -= old_processed
Identify if some tags are completely dropped. Ask the user if indeed dropped, or accident. Follows the idea of django/db/migrations/questioner.py where django asks if fields are renamed or dropped https://github.com/django/django/blob/e90af8bad44341cf8ebd469dac57b61a95667c1d/django/db/migrations/questioner.py
https://github.com/autofitcloud/isitfit/blob/6ffc0c67c00140120f5d5ad8dfe11c8f0f7dacc1/isitfit/tags/tagsCsvDiff.py#L121-L152
from isitfit.cli.click_descendents import IsitfitCliError from isitfit.utils import logger from termcolor import colored class TagsCsvDiff: def __init__(self, df_old, df_new): self.df_old = df_old self.df_new = df_new self.old_minus_new = set() self.new_minus_old = set() self.migrations = [] def noChanges(self): to_json = lambda x: x.sort_values('instance_id', ascending=True)[sorted(list(x.columns))].to_json(orient='records') json_old = to_json(self.df_old) json_new = to_json(self.df_new) if json_old == json_new: raise IsitfitCliError("Aborting `tags push` due to no changes detected.") def noNewInstances(self): inst_old = set(self.df_old.instance_id) inst_new = set(self.df_new.instance_id) inst_created = inst_new - inst_old inst_created = sorted(list(inst_created)) if len(inst_created)>0: msg_1 = "Found new instances IDs: %s%s" msg_2 = ", ".join(inst_created[:5]) msg_3 ="..." if len(inst_created)>5 else "" msg_4 = msg_1%(msg_2,msg_3) raise IsitfitCliError(msg_4) def getDiffCols(self): old_cols = set(self.df_old.columns) new_cols = set(self.df_new.columns) self.old_minus_new = old_cols - new_cols self.new_minus_old = new_cols - old_cols def renamedTags(self): import difflib corr_method = lambda s1, s2: 1 if s1 in s2 else (difflib.SequenceMatcher(None, s1, s2).ratio()) import pandas as pd import numpy as np col_dist = pd.DataFrame( np.zeros(shape=( len(self.old_minus_new), len(self.new_minus_old) )), index=self.old_minus_new, columns=self.new_minus_old ) new_processed = set() old_processed = set() for c1 in self.old_minus_new: for c2 in self.new_minus_old: d12 = corr_method(c1, c2) col_dist.loc[c1, c2] = d12 d1_maxD = col_dist.loc[c1].max() if d1_maxD > 0.7: d1_maxV = col_dist.loc[c1].idxmax() confirm_msg = colored('Did you rename the tag "%s" to "%s"? yes/[no] '%(c1, d1_maxV), 'cyan') confirm_mv = input(confirm_msg) if confirm_mv.lower() in ['y', 'yes']: mig_i = ('mv', c1, d1_maxV) self.migrations.append(mig_i) old_processed.add(c1) new_processed.add(d1_maxV) self.new_minus_old -= new_processed self.old_minus_new -= old_processed def newTags(self): if len(self.new_minus_old)==0: return logger.info("Found %i new tag(s)"%len(self.new_minus_old)) logger.info("") new_processed = set() for ni in self.new_minus_old: confirm_msg = colored('Did you add the tag "%s"? yes/[no] '%ni, 'cyan') confirm_new = input(confirm_msg) if confirm_new.lower() in ['y', 'yes']: mig_i = ('touch', None, ni) self.migrations.append(mig_i) new_processed.add(ni) self.new_minus_old -= new_processed
Apache License 2.0
azure/autorest.python
test/vanilla/legacy/Expected/AcceptanceTests/ModelFlattening/modelflattening/operations/_auto_rest_resource_flattening_test_service_operations.py
AutoRestResourceFlatteningTestServiceOperationsMixin.put_dictionary
python
def put_dictionary( self, resource_dictionary=None, **kwargs ): cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) content_type = kwargs.pop("content_type", "application/json") if resource_dictionary is not None: json = self._serialize.body(resource_dictionary, "{FlattenedProduct}") else: json = None request = build_put_dictionary_request( content_type=content_type, json=json, template_url=self.put_dictionary.metadata["url"], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error) if cls: return cls(pipeline_response, None, {})
Put External Resource as a Dictionary. :param resource_dictionary: External Resource as a Dictionary to put. :type resource_dictionary: dict[str, ~modelflattening.models.FlattenedProduct] :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError
https://github.com/azure/autorest.python/blob/90d60a965788e3b4c0809e6686bdc3525acac89c/test/vanilla/legacy/Expected/AcceptanceTests/ModelFlattening/modelflattening/operations/_auto_rest_resource_flattening_test_service_operations.py#L470-L513
import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error, ) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from msrest import Serializer from .. import models as _models from .._vendor import _convert_request, _format_url_section if TYPE_CHECKING: from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() def build_put_array_request( **kwargs ): content_type = kwargs.pop('content_type', None) accept = "application/json" url = kwargs.pop("template_url", '/model-flatten/array') header_parameters = kwargs.pop("headers", {}) if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="PUT", url=url, headers=header_parameters, **kwargs ) def build_get_array_request( **kwargs ): accept = "application/json" url = kwargs.pop("template_url", '/model-flatten/array') header_parameters = kwargs.pop("headers", {}) header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_put_wrapped_array_request( **kwargs ): content_type = kwargs.pop('content_type', None) accept = "application/json" url = kwargs.pop("template_url", '/model-flatten/wrappedarray') header_parameters = kwargs.pop("headers", {}) if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="PUT", url=url, headers=header_parameters, **kwargs ) def build_get_wrapped_array_request( **kwargs ): accept = "application/json" url = kwargs.pop("template_url", '/model-flatten/wrappedarray') header_parameters = kwargs.pop("headers", {}) header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_put_dictionary_request( **kwargs ): content_type = kwargs.pop('content_type', None) accept = "application/json" url = kwargs.pop("template_url", '/model-flatten/dictionary') header_parameters = kwargs.pop("headers", {}) if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="PUT", url=url, headers=header_parameters, **kwargs ) def build_get_dictionary_request( **kwargs ): accept = "application/json" url = kwargs.pop("template_url", '/model-flatten/dictionary') header_parameters = kwargs.pop("headers", {}) header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_put_resource_collection_request( **kwargs ): content_type = kwargs.pop('content_type', None) accept = "application/json" url = kwargs.pop("template_url", '/model-flatten/resourcecollection') header_parameters = kwargs.pop("headers", {}) if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="PUT", url=url, headers=header_parameters, **kwargs ) def build_get_resource_collection_request( **kwargs ): accept = "application/json" url = kwargs.pop("template_url", '/model-flatten/resourcecollection') header_parameters = kwargs.pop("headers", {}) header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_put_simple_product_request( **kwargs ): content_type = kwargs.pop('content_type', None) accept = "application/json" url = kwargs.pop("template_url", '/model-flatten/customFlattening') header_parameters = kwargs.pop("headers", {}) if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="PUT", url=url, headers=header_parameters, **kwargs ) def build_post_flattened_simple_product_request( **kwargs ): content_type = kwargs.pop('content_type', None) accept = "application/json" url = kwargs.pop("template_url", '/model-flatten/customFlattening') header_parameters = kwargs.pop("headers", {}) if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="POST", url=url, headers=header_parameters, **kwargs ) def build_put_simple_product_with_grouping_request( name, **kwargs ): content_type = kwargs.pop('content_type', None) accept = "application/json" url = kwargs.pop("template_url", '/model-flatten/customFlattening/parametergrouping/{name}/') path_format_arguments = { "name": _SERIALIZER.url("name", name, 'str'), } url = _format_url_section(url, **path_format_arguments) header_parameters = kwargs.pop("headers", {}) if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="PUT", url=url, headers=header_parameters, **kwargs ) class AutoRestResourceFlatteningTestServiceOperationsMixin(object): @distributed_trace def put_array( self, resource_array=None, **kwargs ): cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) content_type = kwargs.pop("content_type", "application/json") if resource_array is not None: json = self._serialize.body(resource_array, "[Resource]") else: json = None request = build_put_array_request( content_type=content_type, json=json, template_url=self.put_array.metadata["url"], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error) if cls: return cls(pipeline_response, None, {}) put_array.metadata = {"url": "/model-flatten/array"} @distributed_trace def get_array( self, **kwargs ): cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) request = build_get_array_request( template_url=self.get_array.metadata["url"], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize("[FlattenedProduct]", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_array.metadata = {"url": "/model-flatten/array"} @distributed_trace def put_wrapped_array( self, resource_array=None, **kwargs ): cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) content_type = kwargs.pop("content_type", "application/json") if resource_array is not None: json = self._serialize.body(resource_array, "[WrappedProduct]") else: json = None request = build_put_wrapped_array_request( content_type=content_type, json=json, template_url=self.put_wrapped_array.metadata["url"], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error) if cls: return cls(pipeline_response, None, {}) put_wrapped_array.metadata = {"url": "/model-flatten/wrappedarray"} @distributed_trace def get_wrapped_array( self, **kwargs ): cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) request = build_get_wrapped_array_request( template_url=self.get_wrapped_array.metadata["url"], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize("[ProductWrapper]", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_wrapped_array.metadata = {"url": "/model-flatten/wrappedarray"} @distributed_trace
MIT License
berkeleyautomation/autolab_core
autolab_core/image.py
Image._preprocess_data
python
def _preprocess_data(self, data): original_type = data.dtype if len(data.shape) == 1: data = data[:, np.newaxis, np.newaxis] elif len(data.shape) == 2: data = data[:, :, np.newaxis] elif len(data.shape) == 0 or len(data.shape) > 3: raise ValueError( "Illegal data array passed to image. " "Must be 1, 2, or 3 dimensional numpy array" ) return data.astype(original_type)
Converts a data array to the preferred 3D structure. Parameters ---------- data : :obj:`numpy.ndarray` The data to process. Returns ------- :obj:`numpy.ndarray` The data re-formatted (if needed) as a 3D matrix Raises ------ ValueError If the data is not 1, 2, or 3D to begin with.
https://github.com/berkeleyautomation/autolab_core/blob/cda081d2e07e3fe6cc9f3e8c86eea92330910d20/autolab_core/image.py#L137-L165
from abc import ABCMeta, abstractmethod import logging import os import cv2 import numpy as np import PIL.Image as PImage import matplotlib.pyplot as plt import scipy.signal as ssg import scipy.ndimage.filters as sf import scipy.ndimage.interpolation as sni import scipy.ndimage.morphology as snm import scipy.spatial.distance as ssd import sklearn.cluster as sc import skimage.morphology as morph import skimage.transform as skt from .constants import MAX_DEPTH, MIN_DEPTH, MAX_IR, COLOR_IMAGE_EXTS from .points import PointCloud, NormalCloud, PointNormalCloud from .primitives import Contour BINARY_IM_MAX_VAL = np.iinfo(np.uint8).max BINARY_IM_DEFAULT_THRESH = BINARY_IM_MAX_VAL / 2 def imresize(image, size, interp="nearest"): skt_interp_map = { "nearest": 0, "bilinear": 1, "biquadratic": 2, "bicubic": 3, "biquartic": 4, "biquintic": 5, } if interp in ("lanczos", "cubic"): raise ValueError( '"lanczos" and "cubic"' " interpolation are no longer supported." ) assert ( interp in skt_interp_map ), 'Interpolation "{}" not' " supported.".format(interp) if isinstance(size, (tuple, list)): output_shape = size elif isinstance(size, (float)): np_shape = np.asarray(image.shape).astype(np.float32) np_shape[0:2] *= size output_shape = tuple(np_shape.astype(int)) elif isinstance(size, (int)): np_shape = np.asarray(image.shape).astype(np.float32) np_shape[0:2] *= size / 100.0 output_shape = tuple(np_shape.astype(int)) else: raise ValueError('Invalid type for size "{}".'.format(type(size))) return skt.resize( image.astype(np.float), output_shape, order=skt_interp_map[interp], anti_aliasing=False, mode="constant", ) class Image(object): __metaclass__ = ABCMeta def __init__(self, data, frame="unspecified"): if not isinstance(data, np.ndarray): raise ValueError("Must initialize image with a numpy ndarray") if not isinstance(frame, str): raise ValueError("Must provide string name of frame of data") self._check_valid_data(data) self._data = self._preprocess_data(data) self._frame = frame self._encoding = "passthrough"
Apache License 2.0
spyder-ide/spyder-unittest
spyder_unittest/widgets/configdialog.py
ConfigDialog.__init__
python
def __init__(self, frameworks, config, parent=None): super(ConfigDialog, self).__init__(parent) self.setWindowTitle(_('Configure tests')) layout = QVBoxLayout(self) framework_layout = QHBoxLayout() framework_label = QLabel(_('Test framework')) framework_layout.addWidget(framework_label) self.framework_combobox = QComboBox(self) for ix, (name, runner) in enumerate(sorted(frameworks.items())): installed = runner.is_installed() if installed: label = name else: label = '{} ({})'.format(name, _('not available')) self.framework_combobox.addItem(label) self.framework_combobox.model().item(ix).setEnabled(installed) framework_layout.addWidget(self.framework_combobox) layout.addLayout(framework_layout) layout.addSpacing(10) wdir_label = QLabel(_('Directory from which to run tests')) layout.addWidget(wdir_label) wdir_layout = QHBoxLayout() self.wdir_lineedit = QLineEdit(self) wdir_layout.addWidget(self.wdir_lineedit) self.wdir_button = QPushButton(ima.icon('DirOpenIcon'), '', self) self.wdir_button.setToolTip(_("Select directory")) self.wdir_button.clicked.connect(lambda: self.select_directory()) wdir_layout.addWidget(self.wdir_button) layout.addLayout(wdir_layout) layout.addSpacing(20) self.buttons = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel) layout.addWidget(self.buttons) self.buttons.accepted.connect(self.accept) self.buttons.rejected.connect(self.reject) self.ok_button = self.buttons.button(QDialogButtonBox.Ok) self.ok_button.setEnabled(False) self.framework_combobox.currentIndexChanged.connect( self.framework_changed) self.framework_combobox.setCurrentIndex(-1) if config.framework: index = self.framework_combobox.findText(config.framework) if index != -1: self.framework_combobox.setCurrentIndex(index) self.wdir_lineedit.setText(config.wdir)
Construct a dialog window. Parameters ---------- frameworks : dict of (str, type) Names of all supported frameworks with their associated class (assumed to be a subclass of RunnerBase) config : Config Initial configuration parent : QWidget
https://github.com/spyder-ide/spyder-unittest/blob/fc29baa9edd8614a341bbcfde93aa6fea5c4afb5/spyder_unittest/widgets/configdialog.py#L47-L112
from collections import namedtuple import os.path as osp from qtpy.compat import getexistingdirectory from qtpy.QtCore import Slot from qtpy.QtWidgets import (QApplication, QComboBox, QDialog, QDialogButtonBox, QHBoxLayout, QLabel, QLineEdit, QPushButton, QVBoxLayout) from spyder.config.base import get_translation from spyder.py3compat import getcwd, to_text_string from spyder.utils import icon_manager as ima try: _ = get_translation("unittest", dirname="spyder_unittest") except KeyError: import gettext _ = gettext.gettext Config = namedtuple('Config', ['framework', 'wdir']) Config.__new__.__defaults__ = (None, '') class ConfigDialog(QDialog):
MIT License
crespo-otero-group/fromage
fromage/utils/mol/_geom.py
coord_array
python
def coord_array(self): if self.geom.ignore_kinds: self.set_connectivity list_coord = [] for atom in self: add_atom = True if self.geom.ignore_hydrogens: if atom.elem == 'H': add_atom = False if self.geom.ignore_kinds: if atom.kind in self.geom.ignore_kinds: add_atom = False if add_atom: new_row = [atom.x,atom.y,atom.z] list_coord.append(new_row) coord_arr = np.array(list_coord) return coord_arr
Return a numpy array of the coordinates Returns ------- coord_arr : Nat x 3 numpy array Array of the form [[x1,y1,z1],[x2,y2,z2],...]
https://github.com/crespo-otero-group/fromage/blob/9b4a80698ed1672268dde292d5512c72a23cb00a/fromage/utils/mol/_geom.py#L53-L81
import numpy as np import fromage.utils.array_operations as ao class GeomInfo(object): def __init__(self): self.coord_array = np.array([0]) self.plane_coeffs = np.array([0]) self.prin_ax = np.array([0]) self.sec_ax = np.array([0]) self.perp_ax = np.array([0]) self.ignore_kinds = [] self.ignore_hydrogens = False self.linear = False def __str__(self): out_str = "Coordinate array:\n" + str(self.coord_array) + "\nPlane coefficients:\n" + str( self.plane_coeffs) + "\nPrincipal axis:\n" + str(self.prin_ax) + "\nSecondary axis:\n" + str( self.sec_ax) + "\nPerpendicular axis:\n" + str(self.perp_ax) return out_str def __repr__(self): return self.__str__()
MIT License
kriaga/health-checker
HealthChecker/venv/Lib/site-packages/nltk/classify/maxent.py
MaxentFeatureEncodingI.labels
python
def labels(self): raise NotImplementedError()
:return: A list of the \"known labels\" -- i.e., all labels ``l`` such that ``self.encode(fs,l)`` can be a nonzero joint-feature vector for some value of ``fs``. :rtype: list
https://github.com/kriaga/health-checker/blob/3d9ce933f131bcbb897103b0f509cc45393cae4a/HealthChecker/venv/Lib/site-packages/nltk/classify/maxent.py#L377-L384
from __future__ import print_function, unicode_literals try: import numpy except ImportError: pass import tempfile import os import re from collections import defaultdict from six import integer_types from nltk import compat from nltk.data import gzip_open_unicode from nltk.util import OrderedDict from nltk.probability import DictionaryProbDist from nltk.classify.api import ClassifierI from nltk.classify.util import CutoffChecker, accuracy, log_likelihood from nltk.classify.megam import (call_megam, write_megam_file, parse_megam_weights) from nltk.classify.tadm import call_tadm, write_tadm_file, parse_tadm_weights __docformat__ = 'epytext en' @compat.python_2_unicode_compatible class MaxentClassifier(ClassifierI): def __init__(self, encoding, weights, logarithmic=True): self._encoding = encoding self._weights = weights self._logarithmic = logarithmic assert encoding.length() == len(weights) def labels(self): return self._encoding.labels() def set_weights(self, new_weights): self._weights = new_weights assert self._encoding.length() == len(new_weights) def weights(self): return self._weights def classify(self, featureset): return self.prob_classify(featureset).max() def prob_classify(self, featureset): prob_dict = {} for label in self._encoding.labels(): feature_vector = self._encoding.encode(featureset, label) if self._logarithmic: total = 0.0 for (f_id, f_val) in feature_vector: total += self._weights[f_id] * f_val prob_dict[label] = total else: prod = 1.0 for (f_id, f_val) in feature_vector: prod *= self._weights[f_id] ** f_val prob_dict[label] = prod return DictionaryProbDist(prob_dict, log=self._logarithmic, normalize=True) def explain(self, featureset, columns=4): descr_width = 50 TEMPLATE = ' %-'+str(descr_width-2)+'s%s%8.3f' pdist = self.prob_classify(featureset) labels = sorted(pdist.samples(), key=pdist.prob, reverse=True) labels = labels[:columns] print(' Feature'.ljust(descr_width)+''.join( '%8s' % (("%s" % l)[:7]) for l in labels)) print(' '+'-'*(descr_width-2+8*len(labels))) sums = defaultdict(int) for i, label in enumerate(labels): feature_vector = self._encoding.encode(featureset, label) feature_vector.sort(key=lambda fid__: abs(self._weights[fid__[0]]), reverse=True) for (f_id, f_val) in feature_vector: if self._logarithmic: score = self._weights[f_id] * f_val else: score = self._weights[f_id] ** f_val descr = self._encoding.describe(f_id) descr = descr.split(' and label is ')[0] descr += ' (%s)' % f_val if len(descr) > 47: descr = descr[:44]+'...' print(TEMPLATE % (descr, i*8*' ', score)) sums[label] += score print(' '+'-'*(descr_width-1+8*len(labels))) print(' TOTAL:'.ljust(descr_width)+''.join( '%8.3f' % sums[l] for l in labels)) print(' PROBS:'.ljust(descr_width)+''.join( '%8.3f' % pdist.prob(l) for l in labels)) def most_informative_features(self, n=10): if hasattr(self, '_most_informative_features'): return self._most_informative_features[:n] else: self._most_informative_features = sorted(list(range(len(self._weights))), key=lambda fid: abs(self._weights[fid]), reverse=True) return self._most_informative_features[:n] def show_most_informative_features(self, n=10, show='all'): fids = self.most_informative_features(None) if show == 'pos': fids = [fid for fid in fids if self._weights[fid] > 0] elif show == 'neg': fids = [fid for fid in fids if self._weights[fid] < 0] for fid in fids[:n]: print('%8.3f %s' % (self._weights[fid], self._encoding.describe(fid))) def __repr__(self): return ('<ConditionalExponentialClassifier: %d labels, %d features>' % (len(self._encoding.labels()), self._encoding.length())) ALGORITHMS = ['GIS', 'IIS', 'MEGAM', 'TADM'] @classmethod def train(cls, train_toks, algorithm=None, trace=3, encoding=None, labels=None, gaussian_prior_sigma=0, **cutoffs): if algorithm is None: algorithm = 'iis' for key in cutoffs: if key not in ('max_iter', 'min_ll', 'min_lldelta', 'max_acc', 'min_accdelta', 'count_cutoff', 'norm', 'explicit', 'bernoulli'): raise TypeError('Unexpected keyword arg %r' % key) algorithm = algorithm.lower() if algorithm == 'iis': return train_maxent_classifier_with_iis( train_toks, trace, encoding, labels, **cutoffs) elif algorithm == 'gis': return train_maxent_classifier_with_gis( train_toks, trace, encoding, labels, **cutoffs) elif algorithm == 'megam': return train_maxent_classifier_with_megam( train_toks, trace, encoding, labels, gaussian_prior_sigma, **cutoffs) elif algorithm == 'tadm': kwargs = cutoffs kwargs['trace'] = trace kwargs['encoding'] = encoding kwargs['labels'] = labels kwargs['gaussian_prior_sigma'] = gaussian_prior_sigma return TadmMaxentClassifier.train(train_toks, **kwargs) else: raise ValueError('Unknown algorithm %s' % algorithm) ConditionalExponentialClassifier = MaxentClassifier class MaxentFeatureEncodingI(object): def encode(self, featureset, label): raise NotImplementedError() def length(self): raise NotImplementedError()
MIT License
ryry013/rai
cogs/owner.py
Owner.database
python
async def database(self, ctx, depth, *, args): config = self.bot.db if '=' in args: args = f"{depth} {args}" depth = 1 split = args.split(' = ') + [''] path = split[0] set_to = split[1] def process_arg(arg): if arg.startswith('ctx'): obj = ctx for attr in arg.split('.'): if attr == 'ctx': continue obj = getattr(obj, attr) return str(obj) else: return arg for arg in path.split()[:-1]: try: config = config[process_arg(arg)] except KeyError: await hf.safe_send(ctx, f"Invalid arg: `{arg}`") return if set_to: config[path.split()[-1]] = literal_eval(set_to) await hf.safe_send(ctx, f"```\n{config[path.split()[-1]]}"[:1997]+"```") return try: config = config[process_arg(path.split()[-1])] except KeyError: await ctx.send(f"Couldn't find {path.split()[-1]} in the database.") return msg = '' for key in config: if type(key) == str: key_str = f'\"{key}\"' else: key_str = key msg += f"{key_str}\n" if int(depth) >= 2: if isinstance(config[key], dict): for key_2 in config[key]: if type(key_2) == str: key_2_str = f'\"{key_2}\"' else: key_2_str = key_2 msg += f"\t{key_2_str}\n" if int(depth) >= 3: if isinstance(config[key][key_2], dict): for key_3 in config[key][key_2]: if type(key_3) == str: key_3_str = f'\"{key_3}\"' else: key_3_str = key_3 msg += f"\t\t{key_3_str}\n" if int(depth) >= 4: if isinstance(config[key][key_2][key_3], dict): for key_4 in config[key][key_2][key_3]: if type(key_4) == str: key_4_str = f'\"{key_4}\"' else: key_4_str = key_4 msg += f"\t\t\t{key_4_str}\n" else: if type(config[key][key_2][key_3]) == str: s = f"\"{config[key][key_2][key_3]}\"" else: s = config[key][key_2][key_3] msg = msg[:-1] + f": {s}\n" else: if type(config[key][key_2]) == str: s = f"\"{config[key][key_2]}\"" else: s = config[key][key_2] msg = msg[:-1] + f": {s}\n" else: if type(config[key]) == str: s = f"\"{config[key]}\"" else: s = config[key] msg = msg[:-1] + f": {s}\n" await hf.safe_send(ctx, f'```\n{msg[:1993]}```')
Shows or edits database
https://github.com/ryry013/rai/blob/8a8ce07c78a67967a8fc9cb5dabc68329bf85f9b/cogs/owner.py#L163-L255
from discord.ext import commands import asyncio import traceback import discord import textwrap from contextlib import redirect_stdout import io import sys import codecs import json from .utils import helper_functions as hf import re from ast import literal_eval import importlib import datetime from datetime import datetime, timedelta import os dir_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) RYRY_SPAM_CHAN = 275879535977955330 class Owner(commands.Cog): def __init__(self, bot): self.bot = bot self._last_result = None self.sessions = set() async def cog_check(self, ctx): return ctx.author.id in [202995638860906496, 414873201349361664, 528770932613971988] def get_syntax_error(self, e): if e.text is None: return f'```py\n{e.__class__.__name__}: {e}\n```' return f'```py\n{e.text}{"^":>{e.offset}}\n{e.__class__.__name__}: {e}```' @commands.Cog.listener() async def on_command(self, ctx): if not ctx.guild: return if str(ctx.guild.id) not in self.bot.db['guildstats']: self.bot.db['guildstats'][str(ctx.guild.id)] = {'messages': {}, 'commands': {}} config: dict = self.bot.db['guildstats'][str(ctx.guild.id)]['commands'] date_str = datetime.utcnow().strftime("%Y%m%d") config[date_str] = config.setdefault(date_str, 0) + 1 @commands.command() async def guildstats(self, ctx): config = self.bot.db['guildstats'] guild_info = {} id_to_guild = {str(g.id): g for g in self.bot.guilds} for guild_id in config: message_count = 0 for day in config[guild_id]['messages']: days_ago = (datetime.utcnow() - datetime.strptime(day, "%Y%m%d")).days if days_ago > 30: del(config[guild_id]['messages'][day]) else: message_count += config[guild_id]['messages'][day] command_count = 0 for day in config[guild_id]['commands']: days_ago = (datetime.utcnow() - datetime.strptime(day, "%Y%m%d")).days if days_ago > 30: del(config[guild_id]['commands'][day]) else: command_count += config[guild_id]['commands'][day] guild = id_to_guild[guild_id] bot_num = len([m for m in guild.members if m.bot]) human_num = len([m for m in guild.members if not m.bot]) guild_info[guild] = {"messages": message_count, "member_count": guild.member_count, "bots": bot_num, "humans": human_num, "commands": command_count} msg = '' for guild in guild_info: info = guild_info[guild] msg_addition = f"**{guild.name}: ({guild.id})**" f"\n{info['messages']} messages" f"\n{info['member_count']} members " f"({info['humans']} humans, {info['bots']} bots, " f"{round(info['humans']/info['member_count'], 2)})" f"\n{info['commands']} commands\n" if len(msg + msg_addition) < 2000: msg += msg_addition else: await hf.safe_send(ctx, msg) msg = msg_addition if msg: await hf.safe_send(ctx, msg) dead_guilds = [] for guild in self.bot.guilds: if guild not in guild_info: dead_guilds.append(guild) msg = '' for guild in dead_guilds: bots = len([m for m in guild.members if m.bot]) msg_addition = f"{guild.name} -- {guild.id} -- {bots}/{guild.member_count}\n" if len(msg + msg_addition) < 2000: msg += msg_addition else: await hf.safe_send(ctx, msg) msg = msg_addition if msg: await hf.safe_send(ctx, msg) @commands.command() async def edit(self, ctx, message_id, *, content): try: msg = await ctx.channel.fetch_message(int(message_id)) except discord.NotFound: await hf.safe_send(ctx, "Message not found") return await msg.edit(content=content) try: await ctx.message.delete() except discord.NotFound: pass @commands.command(aliases=['repl']) async def reply(self, ctx, index, *, reply=''): channel = self.bot.get_channel(RYRY_SPAM_CHAN) index_re = re.search("^(;repl|;reply) (\d) ", ctx.message.content) if not index_re: reply = f"{index} {reply}" index = 1 else: index = int(index_re.group(2)) if not reply: await hf.safe_send(ctx, "Include reply message") async for msg in channel.history(): result_channel_id = re.search('^(\d{17,22}) <@202995638860906496>$', msg.content) if not result_channel_id: continue else: result_channel_id = result_channel_id.group(1) if result_channel_id: if (datetime.utcnow() - msg.created_at).seconds > 1: if index > 1: index -= 1 else: send_channel = self.bot.get_channel(int(result_channel_id)) try: await send_channel.send(reply) except discord.Forbidden as e: await hf.safe_send(ctx, e) return @commands.command(aliases=['db'])
MIT License
uwdata/termite-visualizations
web2py/gluon/dal.py
ConnectionPool.after_connection_hook
python
def after_connection_hook(self): if callable(self._after_connection): self._after_connection(self) self.after_connection()
hook for the after_connection parameter
https://github.com/uwdata/termite-visualizations/blob/79da58bc607893bbd5db703f7d87a89b5e97c311/web2py/gluon/dal.py#L619-L623
__all__ = ['DAL', 'Field'] DEFAULTLENGTH = {'string':512, 'password':512, 'upload':512, 'text':2**15, 'blob':2**31} TIMINGSSIZE = 100 SPATIALLIBS = { 'Windows':'libspatialite', 'Linux':'libspatialite.so', 'Darwin':'libspatialite.dylib' } DEFAULT_URI = 'sqlite://dummy.db' import re import sys import locale import os import types import datetime import threading import time import csv import cgi import copy import socket import logging import base64 import shutil import marshal import decimal import struct import urllib import hashlib import uuid import glob import traceback import platform PYTHON_VERSION = sys.version_info[:3] if PYTHON_VERSION[0] == 2: import cPickle as pickle import cStringIO as StringIO import copy_reg as copyreg hashlib_md5 = hashlib.md5 bytes, unicode = str, unicode else: import pickle from io import StringIO as StringIO import copyreg long = int hashlib_md5 = lambda s: hashlib.md5(bytes(s,'utf8')) bytes, unicode = bytes, str if PYTHON_VERSION[:2] < (2, 7): from gluon.contrib.ordereddict import OrderedDict else: from collections import OrderedDict CALLABLETYPES = (types.LambdaType, types.FunctionType, types.BuiltinFunctionType, types.MethodType, types.BuiltinMethodType) TABLE_ARGS = set( ('migrate','primarykey','fake_migrate','format','redefine', 'singular','plural','trigger_name','sequence_name','fields', 'common_filter','polymodel','table_class','on_define','rname')) SELECT_ARGS = set( ('orderby', 'groupby', 'limitby','required', 'cache', 'left', 'distinct', 'having', 'join','for_update', 'processor','cacheable', 'orderby_on_limitby')) ogetattr = object.__getattribute__ osetattr = object.__setattr__ exists = os.path.exists pjoin = os.path.join try: from gluon.utils import web2py_uuid except (ImportError, SystemError): import uuid def web2py_uuid(): return str(uuid.uuid4()) try: import portalocker have_portalocker = True except ImportError: have_portalocker = False try: from gluon import serializers have_serializers = True except ImportError: have_serializers = False try: import json as simplejson except ImportError: try: import gluon.contrib.simplejson as simplejson except ImportError: simplejson = None LOGGER = logging.getLogger("web2py.dal") DEFAULT = lambda:0 GLOBAL_LOCKER = threading.RLock() THREAD_LOCAL = threading.local() REGEX_TYPE = re.compile('^([\w\_\:]+)') REGEX_DBNAME = re.compile('^(\w+)(\:\w+)*') REGEX_W = re.compile('^\w+$') REGEX_TABLE_DOT_FIELD = re.compile('^(\w+)\.(\w+)$') REGEX_UPLOAD_PATTERN = re.compile('(?P<table>[\w\-]+)\.(?P<field>[\w\-]+)\.(?P<uuidkey>[\w\-]+)(\.(?P<name>\w+))?\.\w+$') REGEX_CLEANUP_FN = re.compile('[\'"\s;]+') REGEX_UNPACK = re.compile('(?<!\|)\|(?!\|)') REGEX_PYTHON_KEYWORDS = re.compile('^(and|del|from|not|while|as|elif|global|or|with|assert|else|if|pass|yield|break|except|import|print|class|exec|in|raise|continue|finally|is|return|def|for|lambda|try)$') REGEX_SELECT_AS_PARSER = re.compile("\s+AS\s+(\S+)") REGEX_CONST_STRING = re.compile('(\"[^\"]*?\")|(\'[^\']*?\')') REGEX_SEARCH_PATTERN = re.compile('^{[^\.]+\.[^\.]+(\.(lt|gt|le|ge|eq|ne|contains|startswith|year|month|day|hour|minute|second))?(\.not)?}$') REGEX_SQUARE_BRACKETS = re.compile('^.+\[.+\]$') REGEX_STORE_PATTERN = re.compile('\.(?P<e>\w{1,5})$') REGEX_QUOTES = re.compile("'[^']*'") REGEX_ALPHANUMERIC = re.compile('^[0-9a-zA-Z]\w*$') REGEX_PASSWORD = re.compile('\://([^:@]*)\:') REGEX_NOPASSWD = re.compile('\/\/[\w\.\-]+[\:\/](.+)(?=@)') DRIVERS = [] try: from new import classobj from google.appengine.ext import db as gae from google.appengine.ext import ndb from google.appengine.api import namespace_manager, rdbms from google.appengine.api.datastore_types import Key from google.appengine.ext.db.polymodel import PolyModel from google.appengine.ext.ndb.polymodel import PolyModel as NDBPolyModel DRIVERS.append('google') except ImportError: pass if not 'google' in DRIVERS: try: from pysqlite2 import dbapi2 as sqlite2 DRIVERS.append('SQLite(sqlite2)') except ImportError: LOGGER.debug('no SQLite drivers pysqlite2.dbapi2') try: from sqlite3 import dbapi2 as sqlite3 DRIVERS.append('SQLite(sqlite3)') except ImportError: LOGGER.debug('no SQLite drivers sqlite3') try: try: import gluon.contrib.pymysql as pymysql pymysql.ESCAPE_REGEX = re.compile("'") pymysql.ESCAPE_MAP = {"'": "''"} except ImportError: import pymysql DRIVERS.append('MySQL(pymysql)') except ImportError: LOGGER.debug('no MySQL driver pymysql') try: import MySQLdb DRIVERS.append('MySQL(MySQLdb)') except ImportError: LOGGER.debug('no MySQL driver MySQLDB') try: import mysql.connector as mysqlconnector DRIVERS.append("MySQL(mysqlconnector)") except ImportError: LOGGER.debug("no driver mysql.connector") try: import psycopg2 from psycopg2.extensions import adapt as psycopg2_adapt DRIVERS.append('PostgreSQL(psycopg2)') except ImportError: LOGGER.debug('no PostgreSQL driver psycopg2') try: try: import gluon.contrib.pg8000.dbapi as pg8000 except ImportError: import pg8000.dbapi as pg8000 DRIVERS.append('PostgreSQL(pg8000)') except ImportError: LOGGER.debug('no PostgreSQL driver pg8000') try: import cx_Oracle DRIVERS.append('Oracle(cx_Oracle)') except ImportError: LOGGER.debug('no Oracle driver cx_Oracle') try: try: import pyodbc except ImportError: try: import gluon.contrib.pypyodbc as pyodbc except Exception, e: raise ImportError(str(e)) DRIVERS.append('MSSQL(pyodbc)') DRIVERS.append('DB2(pyodbc)') DRIVERS.append('Teradata(pyodbc)') DRIVERS.append('Ingres(pyodbc)') except ImportError: LOGGER.debug('no MSSQL/DB2/Teradata/Ingres driver pyodbc') try: import Sybase DRIVERS.append('Sybase(Sybase)') except ImportError: LOGGER.debug('no Sybase driver') try: import kinterbasdb DRIVERS.append('Interbase(kinterbasdb)') DRIVERS.append('Firebird(kinterbasdb)') except ImportError: LOGGER.debug('no Firebird/Interbase driver kinterbasdb') try: import fdb DRIVERS.append('Firebird(fdb)') except ImportError: LOGGER.debug('no Firebird driver fdb') try: import firebirdsql DRIVERS.append('Firebird(firebirdsql)') except ImportError: LOGGER.debug('no Firebird driver firebirdsql') try: import informixdb DRIVERS.append('Informix(informixdb)') LOGGER.warning('Informix support is experimental') except ImportError: LOGGER.debug('no Informix driver informixdb') try: import sapdb DRIVERS.append('SQL(sapdb)') LOGGER.warning('SAPDB support is experimental') except ImportError: LOGGER.debug('no SAP driver sapdb') try: import cubriddb DRIVERS.append('Cubrid(cubriddb)') LOGGER.warning('Cubrid support is experimental') except ImportError: LOGGER.debug('no Cubrid driver cubriddb') try: from com.ziclix.python.sql import zxJDBC import java.sql from org.sqlite import JDBC zxJDBC_sqlite = java.sql.DriverManager DRIVERS.append('PostgreSQL(zxJDBC)') DRIVERS.append('SQLite(zxJDBC)') LOGGER.warning('zxJDBC support is experimental') is_jdbc = True except ImportError: LOGGER.debug('no SQLite/PostgreSQL driver zxJDBC') is_jdbc = False try: import couchdb DRIVERS.append('CouchDB(couchdb)') except ImportError: LOGGER.debug('no Couchdb driver couchdb') try: import pymongo DRIVERS.append('MongoDB(pymongo)') except: LOGGER.debug('no MongoDB driver pymongo') try: import imaplib DRIVERS.append('IMAP(imaplib)') except: LOGGER.debug('no IMAP driver imaplib') PLURALIZE_RULES = [ (re.compile('child$'), re.compile('child$'), 'children'), (re.compile('oot$'), re.compile('oot$'), 'eet'), (re.compile('ooth$'), re.compile('ooth$'), 'eeth'), (re.compile('l[eo]af$'), re.compile('l([eo])af$'), 'l\\1aves'), (re.compile('sis$'), re.compile('sis$'), 'ses'), (re.compile('man$'), re.compile('man$'), 'men'), (re.compile('ife$'), re.compile('ife$'), 'ives'), (re.compile('eau$'), re.compile('eau$'), 'eaux'), (re.compile('lf$'), re.compile('lf$'), 'lves'), (re.compile('[sxz]$'), re.compile('$'), 'es'), (re.compile('[^aeioudgkprt]h$'), re.compile('$'), 'es'), (re.compile('(qu|[^aeiou])y$'), re.compile('y$'), 'ies'), (re.compile('$'), re.compile('$'), 's'), ] def pluralize(singular, rules=PLURALIZE_RULES): for line in rules: re_search, re_sub, replace = line plural = re_search.search(singular) and re_sub.sub(replace, singular) if plural: return plural def hide_password(uri): if isinstance(uri,(list,tuple)): return [hide_password(item) for item in uri] return REGEX_NOPASSWD.sub('******',uri) def OR(a,b): return a|b def AND(a,b): return a&b def IDENTITY(x): return x def varquote_aux(name,quotestr='%s'): return name if REGEX_W.match(name) else quotestr % name def quote_keyword(a,keyword='timestamp'): regex = re.compile('\.keyword(?=\w)') a = regex.sub('."%s"' % keyword,a) return a if 'google' in DRIVERS: is_jdbc = False class GAEDecimalProperty(gae.Property): data_type = decimal.Decimal def __init__(self, precision, scale, **kwargs): super(GAEDecimalProperty, self).__init__(self, **kwargs) d = '1.' for x in range(scale): d += '0' self.round = decimal.Decimal(d) def get_value_for_datastore(self, model_instance): value = super(GAEDecimalProperty, self) .get_value_for_datastore(model_instance) if value is None or value == '': return None else: return str(value) def make_value_from_datastore(self, value): if value is None or value == '': return None else: return decimal.Decimal(value).quantize(self.round) def validate(self, value): value = super(GAEDecimalProperty, self).validate(value) if value is None or isinstance(value, decimal.Decimal): return value elif isinstance(value, basestring): return decimal.Decimal(value) raise gae.BadValueError("Property %s must be a Decimal or string." % self.name) class NDBDecimalProperty(ndb.StringProperty): data_type = decimal.Decimal def __init__(self, precision, scale, **kwargs): d = '1.' for x in range(scale): d += '0' self.round = decimal.Decimal(d) def _to_base_type(self, value): if value is None or value == '': return None else: return str(value) def _from_base_type(self, value): if value is None or value == '': return None else: return decimal.Decimal(value).quantize(self.round) def _validate(self, value): if value is None or isinstance(value, decimal.Decimal): return value elif isinstance(value, basestring): return decimal.Decimal(value) raise TypeError("Property %s must be a Decimal or string." % self._name) class ConnectionPool(object): POOLS = {} check_active_connection = True @staticmethod def set_folder(folder): THREAD_LOCAL.folder = folder def close(self,action='commit',really=True): if action: if callable(action): action(self) else: getattr(self, action)() if self.pool_size: GLOBAL_LOCKER.acquire() pool = ConnectionPool.POOLS[self.uri] if len(pool) < self.pool_size: pool.append(self.connection) really = False GLOBAL_LOCKER.release() if really: self.close_connection() self.connection = None @staticmethod def close_all_instances(action): dbs = getattr(THREAD_LOCAL,'db_instances',{}).items() for db_uid, db_group in dbs: for db in db_group: if hasattr(db,'_adapter'): db._adapter.close(action) getattr(THREAD_LOCAL,'db_instances',{}).clear() getattr(THREAD_LOCAL,'db_instances_zombie',{}).clear() if callable(action): action(None) return def find_or_make_work_folder(self): self.folder = getattr(THREAD_LOCAL,'folder','') if (os.path.isabs(self.folder) and isinstance(self, UseDatabaseStoredFile) and self.folder.startswith(os.getcwd())): self.folder = os.path.relpath(self.folder, os.getcwd()) if False and self.folder and not exists(self.folder): os.mkdir(self.folder)
BSD 3-Clause New or Revised License
yeti-platform/yeti
core/web/api/file.py
File.get_id
python
def get_id(self, id): try: fileobj = self.objectmanager.objects.get(id=id) return Response(fileobj.body.stream_contents(), mimetype=fileobj.mime_type) except DoesNotExist: abort(404)
Retrieves a file's content. :<id ObjectId corresponding to the file ObjectId
https://github.com/yeti-platform/yeti/blob/fcd3ee3d3d064df772d0392c20c22aad2bc4c8e6/core/web/api/file.py#L57-L66
from __future__ import unicode_literals import zipfile import magic from flask import request, Response, abort from flask_classy import route from mongoengine import DoesNotExist from core import observables from core.database import AttachedFile from core.helpers import stream_sha256 from core.web.api.api import render from core.web.api.crud import CrudApi from core.web.helpers import requires_permissions def save_file(uploaded_file, filename=None): value = "FILE:{}".format(stream_sha256(uploaded_file)) mime_type = magic.from_buffer(uploaded_file.read(100), mime=True) uploaded_file.seek(0) body = AttachedFile.from_upload(uploaded_file, force_mime=mime_type) f = observables.File.get_or_create(value=value, body=body, mime_type=mime_type) if not filename: filename = uploaded_file.filename if filename not in f.filenames: f.filenames.append(filename) return f.save() def save_uploaded_files(): files = [] unzip = bool(request.form.get("unzip") in ["true", "on"]) for uploaded_file in request.files.getlist("files"): if unzip and zipfile.is_zipfile(uploaded_file): with zipfile.ZipFile(uploaded_file, "r") as zf: for info in zf.infolist(): name = info.filename size = info.file_size data = zf.read(name) if size > 0: files.append(save_file(data, filename=name.split("/")[-1])) else: files.append(save_file(uploaded_file)) return files class File(CrudApi): objectmanager = observables.File @route("/get/id/<id>", methods=["GET"]) @requires_permissions("read")
Apache License 2.0
thriftrw/thriftrw-python
thriftrw/compile/scope.py
Scope.resolve_service_spec
python
def resolve_service_spec(self, name, lineno): if name in self.service_specs: return self.service_specs[name].link(self) if '.' in name: include_name, component = name.split('.', 2) if include_name in self.included_scopes: return self.included_scopes[ include_name ].resolve_service_spec(component, lineno) raise ThriftCompilerError( 'Unknown service "%s" referenced at line %d%s' % ( name, lineno, self.__in_path() ) )
Finds and links the ServiceSpec with the given name.
https://github.com/thriftrw/thriftrw-python/blob/22f6ab645f5af48cae2fee0dc1431dfacb971348/thriftrw/compile/scope.py#L114-L131
from __future__ import absolute_import, unicode_literals, print_function import types from ..errors import ThriftCompilerError __all__ = ['Scope'] class Scope(object): __slots__ = ( 'const_specs', 'type_specs', 'module', 'service_specs', 'included_scopes', 'path' ) def __init__(self, name, path=None): self.path = path self.type_specs = {} self.const_specs = {} self.service_specs = {} self.included_scopes = {} self.module = types.ModuleType(str(name)) def __str__(self): return "Scope(%r)" % { 'const_specs': self.const_specs, 'type_specs': self.type_specs, 'service_specs': self.service_specs, 'module': self.module, } __repr__ = __str__ def __in_path(self): if self.path: return ' in "%s"' % self.path else: return '' def resolve_const_spec(self, name, lineno): if name in self.const_specs: return self.const_specs[name].link(self) if '.' in name: include_name, component = name.split('.', 1) if include_name in self.included_scopes: return self.included_scopes[include_name].resolve_const_spec( component, lineno ) raise ThriftCompilerError( 'Unknown constant "%s" referenced at line %d%s' % ( name, lineno, self.__in_path() ) ) def resolve_type_spec(self, name, lineno): if name in self.type_specs: return self.type_specs[name].link(self) if '.' in name: include_name, component = name.split('.', 1) if include_name in self.included_scopes: return self.included_scopes[include_name].resolve_type_spec( component, lineno ) raise ThriftCompilerError( 'Unknown type "%s" referenced at line %d%s' % ( name, lineno, self.__in_path() ) )
MIT License
nastools/homeassistant
homeassistant/components/light/isy994.py
ISYLightDevice.is_on
python
def is_on(self) -> bool: return self.state == STATE_ON
Get whether the ISY994 light is on.
https://github.com/nastools/homeassistant/blob/7ca1180bd42713f2d77bbc3f0b27b231ba8784aa/homeassistant/components/light/isy994.py#L53-L55
import logging from typing import Callable from homeassistant.components.light import ( Light, SUPPORT_BRIGHTNESS, ATTR_BRIGHTNESS) import homeassistant.components.isy994 as isy from homeassistant.const import STATE_ON, STATE_OFF, STATE_UNKNOWN from homeassistant.helpers.typing import ConfigType _LOGGER = logging.getLogger(__name__) VALUE_TO_STATE = { False: STATE_OFF, True: STATE_ON, } UOM = ['2', '51', '78'] STATES = [STATE_OFF, STATE_ON, 'true', 'false', '%'] def setup_platform(hass, config: ConfigType, add_devices: Callable[[list], None], discovery_info=None): if isy.ISY is None or not isy.ISY.connected: _LOGGER.error('A connection has not been made to the ISY controller.') return False devices = [] for node in isy.filter_nodes(isy.NODES, units=UOM, states=STATES): if node.dimmable or '51' in node.uom: devices.append(ISYLightDevice(node)) add_devices(devices) class ISYLightDevice(isy.ISYDevice, Light): def __init__(self, node: object) -> None: isy.ISYDevice.__init__(self, node) @property
MIT License
placeware/thisplace
thisplace.py
WordHasher.to_bytes
python
def to_bytes(self, integer): bytes = [integer & 0b11111111] for n in range(1,6): div = 2**(n*8) bytes.append((integer//div) & 0b11111111) bytes.reverse() return bytes
Convert a 48bit `integer` to a list of 6bytes
https://github.com/placeware/thisplace/blob/8c69fbc494a7fa4261398f0fdc0b3821a4e9f89a/thisplace.py#L168-L176
import random import geohash def get_words(fname): lines = open(fname) words = [] for word in lines: words.append(word.strip()) lines.close() random.seed(634634) random.shuffle(words) words = words[:2**15] assert len(words) == len(set(words)) return words GOOGLE_WORDLIST = get_words("words/google-ngram-list") GOOGLE_4096WORDS = get_words("words/google-ngram-list-4096") WORDNET_LEMMAS = get_words("words/wordnet-list") HUMAN_WORDLIST = ( 'ack', 'alabama', 'alanine', 'alaska', 'alpha', 'angel', 'apart', 'april', 'arizona', 'arkansas', 'artist', 'asparagus', 'aspen', 'august', 'autumn', 'avocado', 'bacon', 'bakerloo', 'batman', 'beer', 'berlin', 'beryllium', 'black', 'blossom', 'blue', 'bluebird', 'bravo', 'bulldog', 'burger', 'butter', 'california', 'carbon', 'cardinal', 'carolina', 'carpet', 'cat', 'ceiling', 'charlie', 'chicken', 'coffee', 'cola', 'cold', 'colorado', 'comet', 'connecticut', 'crazy', 'cup', 'dakota', 'december', 'delaware', 'delta', 'diet', 'don', 'double', 'early', 'earth', 'east', 'echo', 'edward', 'eight', 'eighteen', 'eleven', 'emma', 'enemy', 'equal', 'failed', 'fanta', 'fifteen', 'fillet', 'finch', 'fish', 'five', 'fix', 'floor', 'florida', 'football', 'four', 'fourteen', 'foxtrot', 'freddie', 'friend', 'fruit', 'gee', 'georgia', 'glucose', 'golf', 'green', 'grey', 'hamper', 'happy', 'harry', 'hawaii', 'helium', 'high', 'hot', 'hotel', 'hydrogen', 'idaho', 'illinois', 'india', 'indigo', 'ink', 'iowa', 'island', 'item', 'jersey', 'jig', 'johnny', 'juliet', 'july', 'jupiter', 'kansas', 'kentucky', 'kilo', 'king', 'kitten', 'lactose', 'lake', 'lamp', 'lemon', 'leopard', 'lima', 'lion', 'lithium', 'london', 'louisiana', 'low', 'magazine', 'magnesium', 'maine', 'mango', 'march', 'mars', 'maryland', 'massachusetts', 'may', 'mexico', 'michigan', 'mike', 'minnesota', 'mirror', 'mississippi', 'missouri', 'mobile', 'mockingbird', 'monkey', 'montana', 'moon', 'mountain', 'muppet', 'music', 'nebraska', 'neptune', 'network', 'nevada', 'nine', 'nineteen', 'nitrogen', 'north', 'november', 'nuts', 'october', 'ohio', 'oklahoma', 'one', 'orange', 'oranges', 'oregon', 'oscar', 'oven', 'oxygen', 'papa', 'paris', 'pasta', 'pennsylvania', 'pip', 'pizza', 'pluto', 'potato', 'princess', 'purple', 'quebec', 'queen', 'quiet', 'red', 'river', 'robert', 'robin', 'romeo', 'rugby', 'sad', 'salami', 'saturn', 'september', 'seven', 'seventeen', 'shade', 'sierra', 'single', 'sink', 'six', 'sixteen', 'skylark', 'snake', 'social', 'sodium', 'solar', 'south', 'spaghetti', 'speaker', 'spring', 'stairway', 'steak', 'stream', 'summer', 'sweet', 'table', 'tango', 'ten', 'tennessee', 'tennis', 'texas', 'thirteen', 'three', 'timing', 'triple', 'twelve', 'twenty', 'two', 'uncle', 'undress', 'uniform', 'uranus', 'utah', 'vegan', 'venus', 'vermont', 'victor', 'video', 'violet', 'virginia', 'washington', 'west', 'whiskey', 'white', 'william', 'winner', 'winter', 'wisconsin', 'wolfram', 'wyoming', 'xray', 'yankee', 'yellow', 'zebra', 'zulu') class WordHasher(object): def __init__(self): self._symbols = "0123456789bcdefghjkmnpqrstuvwxyz" self._decode_symbols = dict((ch, i) for (i, ch) in enumerate(self._symbols)) self._encode_symbols = dict((i, ch) for (i, ch) in enumerate(self._symbols)) self.six_wordlist = HUMAN_WORDLIST self.four_wordlist = GOOGLE_4096WORDS self.three_wordlist = GOOGLE_WORDLIST def three_words(self, lat_long): lat, lon = lat_long gh = geohash.encode(lat, lon, 9) words = "-".join(self.three_wordlist[p] for p in self.to_rugbits(self.geo_to_int(gh))) return words def four_words(self, lat_long): lat, lon = lat_long gh = geohash.encode(lat, lon, 9) words = "-".join(self.four_wordlist[p] for p in self.to_quads(self.pad(gh))) return words def six_words(self, lat_long): lat, lon = lat_long gh = geohash.encode(lat, lon, 9) words = "-".join(self.six_wordlist[p] for p in self.to_bytes(self.pad(gh))) return words def decode(self, words): words = words.split("-") if len(words) == 3: i = self.rugbits_to_int([self.three_wordlist.index(w) for w in words]) elif len(words) == 4: i = self.quads_to_int([self.four_wordlist.index(w) for w in words]) i = self.unpad(i) elif len(words) == 6: i = self.bytes_to_int([self.six_wordlist.index(w) for w in words]) i = self.unpad(i) else: raise RuntimeError("Do not know how to decode a set of %i words."%(len(words))) geo_hash = self.int_to_geo(i) return geohash.decode(geo_hash) def geo_to_int(self, geo_hash): base = len(self._symbols) number = 0 for symbol in geo_hash: number = number*base + self._decode_symbols[symbol] return number def int_to_geo(self, integer): base = len(self._symbols) symbols = [] while integer > 0: remainder = integer % base integer //= base symbols.append(self._encode_symbols[remainder]) return ''.join(reversed(symbols)) def pad(self, geo_hash): assert len(geo_hash) == 9 return self.geo_to_int(geo_hash) * 8 def unpad(self, integer): return integer>>3
MIT License
quantmind/lux
lux/utils/files.py
Filehandler.open
python
def open(self, name, mode='rb'): raise NotImplementedError()
Retrieves the specified file from storage, using the optional mixin class to customize what features are available on the File returned.
https://github.com/quantmind/lux/blob/7318fcd86c77616aada41d8182a04339680a554c/lux/utils/files.py#L54-L58
import os import re import itertools __all__ = ['Filehandler'] def skipfile(name): return name.startswith('.') or name.startswith('_') def directory(dir): bd, fname = os.path.split(dir) return dir if fname else bd def get_rel_dir(dir, base, res=''): dir = directory(dir) base = directory(base) if len(base) > len(dir): raise RuntimeError('Base directory not in path') if dir == base: return res dir, fname = os.path.split(dir) if res: fname = os.path.join(fname, res) return get_rel_dir(dir, base, fname) def get_valid_filename(s): s = s.strip().replace(' ', '_') return re.sub(r'(?u)[^-\w.]', '', s) class Filehandler:
BSD 3-Clause New or Revised License
square/mimicandrephrase
src/utils/token_mapper.py
TokenMapper.map_token
python
def map_token(self, token: str) -> int: offset = sum([mapping.output_size() for mapping in self.unk_mappings]) for mapping in self.mappings: if mapping.match(token): return offset + mapping.map(token) offset += mapping.output_size() return -1
This attempts to map a token to one of the special mappings we have in this TokenMapper. The first mapping that triggers wins ties. If no mappings fire, this returns -1. :param token: the token to map :return: an offset into the output matrix, or -1 if no match
https://github.com/square/mimicandrephrase/blob/bd29a995b211cb4f7933fa990b0bba1564c22450/src/utils/token_mapper.py#L175-L188
from typing import List, Sequence, Dict from abc import ABC, abstractmethod import re import TensorflowModel_pb2 as proto def simple_hash(token: str, output_size: int) -> int: encoded = token.encode("utf-8") hash_sum = 0 for letter in encoded: hash_sum = ((31 * hash_sum) + letter) % output_size return hash_sum class TokenMapping(ABC): @abstractmethod def match(self, token: str) -> bool: pass @abstractmethod def map(self, token: str) -> int: pass @abstractmethod def output_size(self) -> int: pass @abstractmethod def debug_token(self, offset: int) -> str: pass @abstractmethod def serialize(self, serialized: proto.TokenMapping) -> proto.TokenMapping: pass class RegexTokenMapping(TokenMapping): def __init__(self, regex: str, num_hash: int, debug_base: str): self.regex = re.compile(regex) self.num_hash = num_hash self.debug_base = debug_base def match(self, token: str): return self.regex.match(token) is not None def map(self, token: str) -> int: return simple_hash(token, self.num_hash) def output_size(self) -> int: return self.num_hash def debug_token(self, offset: int): return self.debug_base+'$'+str(offset) def serialize(self, serialized: proto.TokenMapping): serialized.type = proto.REGEX serialized.regex = self.regex serialized.num_hash = self.num_hash serialized.debug_base = self.debug_base return serialized class HashTokenMapping(TokenMapping): def __init__(self, num_hash: int): self.num_hash = num_hash def match(self, token: str): return True def map(self, token: str): return simple_hash(token.lower(), self.num_hash) def output_size(self): return self.num_hash def debug_token(self, offset: int): return "HASH$"+str(offset) def serialize(self, serialized: proto.TokenMapping): serialized.type = proto.HASH serialized.num_hash = self.num_hash return serialized class ExactTokenMapping(TokenMapping): def __init__(self, vocab: Sequence[str]): self.tokens = list(vocab) self.token_to_idx = {token: idx for idx, token in enumerate(self.tokens)} def match(self, token: str): return token in self.token_to_idx def map(self, token: str): return self.token_to_idx[token] def output_size(self): return len(self.tokens) def debug_token(self, offset: int): return self.tokens[offset] def serialize(self, serialized: proto.TokenMapping): serialized.type = proto.TOKEN serialized.tokens.extend(self.tokens) class TokenMapper: def __init__(self, mappings: List[TokenMapping], unk_mappings: List[TokenMapping]): self.mappings = mappings self.unk_mappings = unk_mappings def mapped_output_size(self) -> int: return sum([mapping.output_size() for mapping in self.unk_mappings + self.mappings])
MIT License
craylabs/smartsim
smartsim/launcher/local/local.py
LocalLauncher.run
python
def run(self, step): if not self.task_manager.actively_monitoring: self.task_manager.start() out, err = step.get_output_files() output = open(out, "w+") error = open(err, "w+") cmd = step.get_launch_cmd() task_id = self.task_manager.start_task( cmd, step.cwd, env=step.env, out=output, err=error ) self.step_mapping.add(step.name, task_id=task_id, managed=False) return task_id
Run a local step created by this launcher. Utilize the shell library to execute the command with a Popen. Output and error files will be written to the entity path. :param step: LocalStep instance to run :type step: LocalStep
https://github.com/craylabs/smartsim/blob/0c4b198650a026d7bd960f38b1866fb3b8c59a96/smartsim/launcher/local/local.py#L84-L103
from ...error import LauncherError from ...settings import RunSettings from ...utils import get_logger from ..step import LocalStep from ..stepInfo import UnmanagedStepInfo from ..stepMapping import StepMapping from ..taskManager import TaskManager logger = get_logger(__name__) class LocalLauncher: def __init__(self): self.task_manager = TaskManager() self.step_mapping = StepMapping() def create_step(self, name, cwd, step_settings): if not isinstance(step_settings, RunSettings): raise TypeError( f"Local Launcher only supports entities with RunSettings, not {type(step_settings)}" ) step = LocalStep(name, cwd, step_settings) return step def get_step_update(self, step_names): updates = [] s_names, s_ids = self.step_mapping.get_ids(step_names, managed=False) for step_name, step_id in zip(s_names, s_ids): status, rc, out, err = self.task_manager.get_task_update(step_id) step_info = UnmanagedStepInfo(status, rc, out, err) update = (step_name, step_info) updates.append(update) return updates def get_step_nodes(self, step_names): return [["127.0.0.1"] * len(step_names)]
BSD 2-Clause Simplified License
wolph/python-progressbar
progressbar/bar.py
ProgressBar.__init__
python
def __init__(self, min_value=0, max_value=None, widgets=None, left_justify=True, initial_value=0, poll_interval=None, widget_kwargs=None, custom_len=utils.len_color, max_error=True, prefix=None, suffix=None, variables=None, min_poll_interval=None, **kwargs): StdRedirectMixin.__init__(self, **kwargs) ResizableMixin.__init__(self, **kwargs) ProgressBarBase.__init__(self, **kwargs) if not max_value and kwargs.get('maxval') is not None: warnings.warn('The usage of `maxval` is deprecated, please use ' '`max_value` instead', DeprecationWarning) max_value = kwargs.get('maxval') if not poll_interval and kwargs.get('poll'): warnings.warn('The usage of `poll` is deprecated, please use ' '`poll_interval` instead', DeprecationWarning) poll_interval = kwargs.get('poll') if max_value: if min_value > max_value: raise ValueError('Max value needs to be bigger than the min ' 'value') self.min_value = min_value self.max_value = max_value self.max_error = max_error if widgets is None: self.widgets = widgets else: self.widgets = [] for widget in widgets: if getattr(widget, 'copy', True): widget = deepcopy(widget) self.widgets.append(widget) self.widgets = widgets self.prefix = prefix self.suffix = suffix self.widget_kwargs = widget_kwargs or {} self.left_justify = left_justify self.value = initial_value self._iterable = None self.custom_len = custom_len self.initial_start_time = kwargs.get('start_time') self.init() poll_interval = utils.deltas_to_seconds(poll_interval, default=None) min_poll_interval = utils.deltas_to_seconds(min_poll_interval, default=None) self._MINIMUM_UPDATE_INTERVAL = utils.deltas_to_seconds( self._MINIMUM_UPDATE_INTERVAL) self.poll_interval = poll_interval self.min_poll_interval = max( min_poll_interval or self._MINIMUM_UPDATE_INTERVAL, self._MINIMUM_UPDATE_INTERVAL, float(os.environ.get('PROGRESSBAR_MINIMUM_UPDATE_INTERVAL', 0)), ) self.variables = utils.AttributeDict(variables or {}) for widget in (self.widgets or []): if isinstance(widget, widgets_module.VariableMixin): if widget.name not in self.variables: self.variables[widget.name] = None
Initializes a progress bar with sane defaults
https://github.com/wolph/python-progressbar/blob/8eb963c6cc97949bc7ac3fc57e645506a2c9ae0c/progressbar/bar.py#L283-L361
from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals from __future__ import with_statement import sys import math import os import time import timeit import logging import warnings from datetime import datetime from copy import deepcopy try: from collections import abc except ImportError: import collections as abc from python_utils import converters import six from . import widgets from . import widgets as widgets_module from . import base from . import utils logger = logging.getLogger(__name__) class ProgressBarMixinBase(object): def __init__(self, **kwargs): self._finished = False def start(self, **kwargs): pass def update(self, value=None): pass def finish(self): self._finished = True def __del__(self): if not self._finished: try: self.finish() except Exception: pass class ProgressBarBase(abc.Iterable, ProgressBarMixinBase): pass class DefaultFdMixin(ProgressBarMixinBase): def __init__(self, fd=sys.stderr, is_terminal=None, line_breaks=None, enable_colors=None, **kwargs): if fd is sys.stdout: fd = utils.streams.original_stdout elif fd is sys.stderr: fd = utils.streams.original_stderr self.fd = fd self.is_ansi_terminal = utils.is_ansi_terminal(fd) self.is_terminal = utils.is_terminal( fd, is_terminal or self.is_ansi_terminal) if line_breaks is None: line_breaks = utils.env_flag('PROGRESSBAR_LINE_BREAKS', not self.is_terminal) self.line_breaks = line_breaks if enable_colors is None: enable_colors = utils.env_flag('PROGRESSBAR_ENABLE_COLORS', self.is_ansi_terminal) self.enable_colors = enable_colors ProgressBarMixinBase.__init__(self, **kwargs) def update(self, *args, **kwargs): ProgressBarMixinBase.update(self, *args, **kwargs) line = converters.to_unicode(self._format_line()) if not self.enable_colors: line = utils.no_color(line) if self.line_breaks: line = line.rstrip() + '\n' else: line = '\r' + line try: self.fd.write(line) except UnicodeEncodeError: self.fd.write(line.encode('ascii', 'replace')) def finish(self, *args, **kwargs): if self._finished: return end = kwargs.pop('end', '\n') ProgressBarMixinBase.finish(self, *args, **kwargs) if end and not self.line_breaks: self.fd.write(end) self.fd.flush() class ResizableMixin(ProgressBarMixinBase): def __init__(self, term_width=None, **kwargs): ProgressBarMixinBase.__init__(self, **kwargs) self.signal_set = False if term_width: self.term_width = term_width else: try: self._handle_resize() import signal self._prev_handle = signal.getsignal(signal.SIGWINCH) signal.signal(signal.SIGWINCH, self._handle_resize) self.signal_set = True except Exception: pass def _handle_resize(self, signum=None, frame=None): w, h = utils.get_terminal_size() self.term_width = w def finish(self): ProgressBarMixinBase.finish(self) if self.signal_set: try: import signal signal.signal(signal.SIGWINCH, self._prev_handle) except Exception: pass class StdRedirectMixin(DefaultFdMixin): def __init__(self, redirect_stderr=False, redirect_stdout=False, **kwargs): DefaultFdMixin.__init__(self, **kwargs) self.redirect_stderr = redirect_stderr self.redirect_stdout = redirect_stdout self._stdout = self.stdout = sys.stdout self._stderr = self.stderr = sys.stderr def start(self, *args, **kwargs): if self.redirect_stdout: utils.streams.wrap_stdout() if self.redirect_stderr: utils.streams.wrap_stderr() self._stdout = utils.streams.original_stdout self._stderr = utils.streams.original_stderr self.stdout = utils.streams.stdout self.stderr = utils.streams.stderr utils.streams.start_capturing(self) DefaultFdMixin.start(self, *args, **kwargs) def update(self, value=None): if not self.line_breaks and utils.streams.needs_clear(): self.fd.write('\r' + ' ' * self.term_width + '\r') utils.streams.flush() DefaultFdMixin.update(self, value=value) def finish(self, end='\n'): DefaultFdMixin.finish(self, end=end) utils.streams.stop_capturing(self) if self.redirect_stdout: utils.streams.unwrap_stdout() if self.redirect_stderr: utils.streams.unwrap_stderr() class ProgressBar(StdRedirectMixin, ResizableMixin, ProgressBarBase): _DEFAULT_MAXVAL = base.UnknownLength _MINIMUM_UPDATE_INTERVAL = 0.050
BSD 3-Clause New or Revised License
tzhangwps/turbulence-and-systemic-risk
src/main.py
MainProcess.append_prices_and_returns
python
def append_prices_and_returns(self): print('\nRequesting data from Yahoo Finance...') self.prices = pd.read_pickle(path.prices_path_historical) self.prices = get.GetPrices().update_weekly_prices(self.prices) self.prices.to_pickle(path.prices_path_current) self.prices = get.CalculateReturns().add_curve_slope(self.prices) self.returns = get.CalculateReturns().calculate_returns(self.prices)
Appends new data to the prices dataset and the returns dataset.
https://github.com/tzhangwps/turbulence-and-systemic-risk/blob/ceb7d1c6a1914da5a2316603f289238a4bb6a826/src/main.py#L26-L35
import pandas as pd import os import TurbulenceSuite_paths as path import src.get_data as get import src.calculate as calc class MainProcess: def __init__(self): self.prices = pd.DataFrame() self.returns = pd.DataFrame() self.turbulence = pd.DataFrame() self.systemic_risk = pd.DataFrame()
MIT License
brython-dev/brython
www/speed/benchmarks/util.py
run_benchmark
python
def run_benchmark(options, num_runs, bench_func, *args): if options.profile: import cProfile prof = cProfile.Profile() prof.runcall(bench_func, num_runs, *args) prof.print_stats(sort=options.profile_sort) else: data = bench_func(num_runs, *args) if options.take_geo_mean: product = reduce(operator.mul, data, 1) print(math.pow(product, 1.0 / len(data))) else: for x in data: print(x)
Run the given benchmark, print results to stdout. Args: options: optparse.Values instance. num_runs: number of times to run the benchmark bench_func: benchmark function. `num_runs, *args` will be passed to this function. This should return a list of floats (benchmark execution times).
https://github.com/brython-dev/brython/blob/33aeaab551f1b73209326c5a0aecf98642d4c126/www/speed/benchmarks/util.py#L10-L32
__author__ = "collinwinter@google.com (Collin Winter)" import math import operator from functools import reduce
BSD 3-Clause New or Revised License
openmined/pyariesfl
aries_cloudagent/admin/server.py
WebhookTarget.__init__
python
def __init__( self, endpoint: str, topic_filter: Sequence[str] = None, retries: int = None ): self.endpoint = endpoint self._topic_filter = None self.retries = retries self.topic_filter = topic_filter
Initialize the webhook target.
https://github.com/openmined/pyariesfl/blob/dd78dcebc771971abfee301b80cdd5d246c14840/aries_cloudagent/admin/server.py#L81-L89
import asyncio import logging from typing import Coroutine, Sequence, Set import uuid from aiohttp import web, ClientSession from aiohttp_apispec import docs, response_schema, setup_aiohttp_apispec import aiohttp_cors from marshmallow import fields, Schema from ..classloader import ClassLoader from ..config.base import ConfigError from ..config.injection_context import InjectionContext from ..messaging.outbound_message import OutboundMessage from ..messaging.responder import BaseResponder from ..stats import Collector from ..task_processor import TaskProcessor from ..transport.outbound.queue.base import BaseOutboundMessageQueue from .base_server import BaseAdminServer from .error import AdminSetupError from .routes import register_module_routes LOGGER = logging.getLogger(__name__) class AdminModulesSchema(Schema): result = fields.List( fields.Str(description="admin module"), description="List of admin modules", ) class AdminStatusSchema(Schema): class AdminResponder(BaseResponder): def __init__(self, send: Coroutine, webhook: Coroutine, **kwargs): super().__init__(**kwargs) self._send = send self._webhook = webhook async def send_outbound(self, message: OutboundMessage): await self._send(message) async def send_webhook(self, topic: str, payload: dict): await self._webhook(topic, payload) class WebhookTarget:
Apache License 2.0
ifding/wavenet-speech-to-text
model/networks.py
ResidualBlock.__init__
python
def __init__(self, res_channels, skip_channels, dilation): super(ResidualBlock, self).__init__() self.dilated = DilatedCausalConv1d(res_channels, dilation=dilation) self.conv_res = torch.nn.Conv1d(res_channels, res_channels, 1) self.conv_skip = torch.nn.Conv1d(res_channels, skip_channels, 1) self.gate_tanh = torch.nn.Tanh() self.gate_sigmoid = torch.nn.Sigmoid()
Residual block :param res_channels: number of residual channel for input, output :param skip_channels: number of skip channel for output :param dilation:
https://github.com/ifding/wavenet-speech-to-text/blob/4d786c2280527ff38ba615974dd227c4f44c93b2/model/networks.py#L61-L75
import torch import numpy as np from utils.exceptions import InputSizeError class DilatedCausalConv1d(torch.nn.Module): def __init__(self, channels, dilation=1): super(DilatedCausalConv1d, self).__init__() self.conv = torch.nn.Conv1d(channels, channels, kernel_size=2, stride=1, dilation=dilation, padding=0, bias=False) def init_weights_for_test(self): for m in self.modules(): if isinstance(m, torch.nn.Conv1d): m.weight.data.fill_(1) def forward(self, x): output = self.conv(x) return output class CausalConv1d(torch.nn.Module): def __init__(self, in_channels, out_channels): super(CausalConv1d, self).__init__() self.conv = torch.nn.Conv1d(in_channels, out_channels, kernel_size=2, stride=1, padding=1, bias=False) def init_weights_for_test(self): for m in self.modules(): if isinstance(m, torch.nn.Conv1d): m.weight.data.fill_(1) def forward(self, x): output = self.conv(x) return output[:, :, :-1] class ResidualBlock(torch.nn.Module):
MIT License
adafruit/adafruit_python_gpio
Adafruit_GPIO/SPI.py
BitBang.set_clock_hz
python
def set_clock_hz(self, hz): pass
Set the speed of the SPI clock. This is unsupported with the bit bang SPI class and will be ignored.
https://github.com/adafruit/adafruit_python_gpio/blob/a12fee39839665966bd124fd22588b2c87ced9d2/Adafruit_GPIO/SPI.py#L173-L177
import operator import time import Adafruit_GPIO as GPIO MSBFIRST = 0 LSBFIRST = 1 class SpiDev(object): def __init__(self, port, device, max_speed_hz=500000): import spidev self._device = spidev.SpiDev() self._device.open(port, device) self._device.max_speed_hz=max_speed_hz self._device.mode = 0 self._device.cshigh = False def set_clock_hz(self, hz): self._device.max_speed_hz=hz def set_mode(self, mode): if mode < 0 or mode > 3: raise ValueError('Mode must be a value 0, 1, 2, or 3.') self._device.mode = mode def set_bit_order(self, order): if order == MSBFIRST: self._device.lsbfirst = False elif order == LSBFIRST: self._device.lsbfirst = True else: raise ValueError('Order must be MSBFIRST or LSBFIRST.') def close(self): self._device.close() def write(self, data): self._device.writebytes(data) def read(self, length): return bytearray(self._device.readbytes(length)) def transfer(self, data): return bytearray(self._device.xfer2(data)) class SpiDevMraa(object): def __init__(self, port, device, max_speed_hz=500000): import mraa self._device = mraa.Spi(0) self._device.mode(0) def set_clock_hz(self, hz): self._device.frequency(hz) def set_mode(self,mode): if mode < 0 or mode > 3: raise ValueError('Mode must be a value 0, 1, 2, or 3.') self._device.mode(mode) def set_bit_order(self, order): if order == MSBFIRST: self._device.lsbmode(False) elif order == LSBFIRST: self._device.lsbmode(True) else: raise ValueError('Order must be MSBFIRST or LSBFIRST.') def close(self): self._device.Spi() def write(self, data): self._device.write(bytearray(data)) class BitBang(object): def __init__(self, gpio, sclk, mosi=None, miso=None, ss=None): self._gpio = gpio self._sclk = sclk self._mosi = mosi self._miso = miso self._ss = ss gpio.setup(sclk, GPIO.OUT) if mosi is not None: gpio.setup(mosi, GPIO.OUT) if miso is not None: gpio.setup(miso, GPIO.IN) if ss is not None: gpio.setup(ss, GPIO.OUT) gpio.set_high(ss) self.set_mode(0) self.set_bit_order(MSBFIRST)
MIT License
datadotworld/data.world-py
datadotworld/client/_swagger/models/file_summary_response.py
FileSummaryResponse.created
python
def created(self, created): if created is None: raise ValueError("Invalid value for `created`, must not be `None`") self._created = created
Sets the created of this FileSummaryResponse. Date and time when file was created. :param created: The created of this FileSummaryResponse. :type: str
https://github.com/datadotworld/data.world-py/blob/7e5f474b655f4f0c88cc6862353e4d52c0e0bb31/datadotworld/client/_swagger/models/file_summary_response.py#L90-L101
from pprint import pformat from six import iteritems import re class FileSummaryResponse(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'created': 'str', 'description': 'str', 'labels': 'list[str]', 'name': 'str', 'size_in_bytes': 'int', 'source': 'FileSourceSummaryResponse', 'updated': 'str' } attribute_map = { 'created': 'created', 'description': 'description', 'labels': 'labels', 'name': 'name', 'size_in_bytes': 'sizeInBytes', 'source': 'source', 'updated': 'updated' } def __init__(self, created=None, description=None, labels=None, name=None, size_in_bytes=None, source=None, updated=None): self._created = None self._description = None self._labels = None self._name = None self._size_in_bytes = None self._source = None self._updated = None self.created = created if description is not None: self.description = description if labels is not None: self.labels = labels self.name = name if size_in_bytes is not None: self.size_in_bytes = size_in_bytes if source is not None: self.source = source self.updated = updated @property def created(self): return self._created @created.setter
Apache License 2.0
twisted/axiom
axiom/item.py
Empowered._getPowerupInterfaces
python
def _getPowerupInterfaces(self): powerupInterfaces = getattr(self.__class__, "powerupInterfaces", ()) pifs = [] for x in powerupInterfaces: if isinstance(x, type(Interface)): pifs.append((x, 0)) else: pifs.append(x) m = getattr(self, "__getPowerupInterfaces__", None) if m is not None: pifs = m(pifs) try: pifs = [(i, p) for (i, p) in pifs] except ValueError: raise ValueError("return value from %r.__getPowerupInterfaces__" " not an iterable of 2-tuples" % (self,)) return pifs
Collect powerup interfaces this object declares that it can be installed on.
https://github.com/twisted/axiom/blob/28191ede99287e9a87c1ff561b831f7d80aaa2fe/axiom/item.py#L379-L402
__metaclass__ = type import gc from zope.interface import implementer, Interface from inspect import getabsfile from weakref import WeakValueDictionary from twisted.python import log from twisted.python.reflect import qual, namedAny from twisted.python.util import mergeFunctionMetadata from twisted.application.service import ( IService, IServiceCollection, MultiService) from axiom import slotmachine, _schema, iaxiom from axiom.errors import ChangeRejected, DeletionDisallowed from axiom.iaxiom import IColumn, IPowerupIndirector from axiom.attributes import ( SQLAttribute, _ComparisonOperatorMuxer, _MatchingOperationMuxer, _OrderingMixin, _ContainableMixin, Comparable, compare, inmemory, reference, text, integer, AND, _cascadingDeletes, _disallows) import six from six.moves import zip _typeNameToMostRecentClass = WeakValueDictionary() def normalize(qualName): return qualName.lower().replace('.', '_') class NoInheritance(RuntimeError): class NotInStore(RuntimeError): class CantInstantiateItem(RuntimeError): class MetaItem(slotmachine.SchemaMetaMachine): def __new__(meta, name, bases, dictionary): T = slotmachine.SchemaMetaMachine.__new__(meta, name, bases, dictionary) if T.__name__ == 'Item' and T.__module__ == __name__: return T T.__already_inherited__ += 1 if T.__already_inherited__ >= 2: raise NoInheritance("already inherited from item once: " "in-database inheritance not yet supported") if T.typeName is None: T.typeName = normalize(qual(T)) if T.schemaVersion is None: T.schemaVersion = 1 if not T.__legacy__ and T.typeName in _typeNameToMostRecentClass: gc.collect() if T.typeName in _typeNameToMostRecentClass: if T.__legacy__: return T otherT = _typeNameToMostRecentClass[T.typeName] if (otherT.__name__ == T.__name__ and getabsfile(T) == getabsfile(otherT) and T.__module__ != otherT.__module__): if len(T.__module__) < len(otherT.__module__): relmod = T.__module__ else: relmod = otherT.__module__ raise RuntimeError( "Use absolute imports; relative import" " detected for type %r (imported from %r)" % ( T.typeName, relmod)) raise RuntimeError("2 definitions of axiom typename {!r}: {!r} {!r}".format( T.typeName, T, _typeNameToMostRecentClass[T.typeName])) _typeNameToMostRecentClass[T.typeName] = T return T def __cmp__(self, other): if isinstance(other, MetaItem): return cmp((self.typeName, self.schemaVersion), (other.typeName, other.schemaVersion)) return NotImplemented def noop(): pass @implementer(IColumn) class _StoreIDComparer(Comparable): def __init__(self, type): self.type = type def __repr__(self): return '<storeID ' + qual(self.type) + '.storeID>' def fullyQualifiedName(self): return qual(self.type)+'.storeID' def infilter(self, pyval, oself, store): return pyval def outfilter(self, dbval, oself): return dbval def getShortColumnName(self, store): return store.getShortColumnName(self) def getColumnName(self, store): return store.getColumnName(self) def __get__(self, item, type=None): if item is None: return self else: return getattr(item, 'storeID') class _SpecialStoreIDAttribute(slotmachine.SetOnce): def __get__(self, oself, type=None): if type is not None and oself is None: if type._storeIDComparer is None: type._storeIDComparer = _StoreIDComparer(type) return type._storeIDComparer return super(_SpecialStoreIDAttribute, self).__get__(oself, type) def serviceSpecialCase(item, pups): if item._axiom_service is not None: return item._axiom_service svc = MultiService() for subsvc in pups: subsvc.setServiceParent(svc) item._axiom_service = svc return svc class Empowered(object): aggregateInterfaces = { IService: serviceSpecialCase, IServiceCollection: serviceSpecialCase} def inMemoryPowerUp(self, powerup, interface): self._inMemoryPowerups[interface] = powerup def powerUp(self, powerup, interface=None, priority=0): if interface is None: for iface, priority in powerup._getPowerupInterfaces(): self.powerUp(powerup, iface, priority) elif interface is IPowerupIndirector: raise TypeError( "You cannot install a powerup for IPowerupIndirector: " + powerup) else: forc = self.store.findOrCreate(_PowerupConnector, item=self, interface=unicode(qual(interface)), powerup=powerup) forc.priority = priority def powerDown(self, powerup, interface=None): if interface is None: for interface, priority in powerup._getPowerupInterfaces(): self.powerDown(powerup, interface) else: for cable in self.store.query(_PowerupConnector, AND(_PowerupConnector.item == self, _PowerupConnector.interface == unicode(qual(interface)), _PowerupConnector.powerup == powerup)): cable.deleteFromStore() return raise ValueError("Not powered up for {!r} with {!r}".format(interface, powerup)) def __conform__(self, interface): if interface is IPowerupIndirector: return pups = self.powerupsFor(interface) aggregator = self.aggregateInterfaces.get(interface, None) if aggregator is not None: return aggregator(self, pups) for pup in pups: return pup def powerupsFor(self, interface): inMemoryPowerup = self._inMemoryPowerups.get(interface, None) if inMemoryPowerup is not None: yield inMemoryPowerup if self.store is None: return name = unicode(qual(interface), 'ascii') for cable in self.store.query( _PowerupConnector, AND(_PowerupConnector.interface == name, _PowerupConnector.item == self), sort=_PowerupConnector.priority.descending): pup = cable.powerup if pup is None: cable.deleteFromStore() else: indirector = IPowerupIndirector(pup, None) if indirector is not None: yield indirector.indirect(interface) else: yield pup def interfacesFor(self, powerup): pc = _PowerupConnector for iface in self.store.query(pc, AND(pc.item == self, pc.powerup == powerup)).getColumn('interface'): yield namedAny(iface)
MIT License
user-cont/conu
conu/backend/nspawn/container.py
NspawnContainer.inspect
python
def inspect(self, refresh=True): return self.get_metadata(refresh=refresh)
return cached metadata by default (a convenience method) :param refresh: bool, returns up to date metadata if set to True :return: dict
https://github.com/user-cont/conu/blob/0d8962560f6f7f17fe1be0d434a4809e2a0ea51d/conu/backend/nspawn/container.py#L108-L116
import logging import subprocess import time from copy import deepcopy from conu.apidefs.container import Container from conu.exceptions import ConuException from conu.utils import run_cmd, random_str, convert_kv_to_dict, command_exists from conu.backend.nspawn import constants logger = logging.getLogger(__name__) class NspawnContainer(Container): def __init__(self, image, container_id, name=None, popen_instance=None, start_process=None, start_action=None): self.systemd_wait_support = None self.system_requirements() super(NspawnContainer, self).__init__(image, container_id, name) self.popen_instance = popen_instance self.start_process = start_process self.start_action = start_action @staticmethod def machined_restart(): logger.debug("restart systemd-machined") return run_cmd("systemctl restart systemd-machined", ignore_status=True) @staticmethod def system_requirements(): command_exists("systemd-run", ["systemd-run", "--help"], "Command systemd-run does not seems to be present on your system. " "Do you have system with systemd?") command_exists( "machinectl", ["machinectl", "--no-pager", "--help"], "Command machinectl does not seems to be present on your system. " "Do you have system with systemd?") def __repr__(self): return "%s(image=%s, name=%s)" % ( self.__class__, self.image, self.name) def __str__(self): return self.name def start(self): self.start_process = NspawnContainer.internal_run_container( name=self.name, callback_method=self.start_action) return self.start_process def get_id(self): return self._id def get_image_name(self): return None
MIT License
facebookresearch/mephisto
mephisto/operations/utils.py
get_extra_argument_dicts
python
def get_extra_argument_dicts(customizable_class: Any) -> List[Dict[str, Any]]: dict_fields = fields(customizable_class.ArgsClass) usable_fields = [] group_field = None for f in dict_fields: if not f.name.startswith("_"): usable_fields.append(f) elif f.name == "_group": group_field = f parsed_fields = [get_dict_from_field(f) for f in usable_fields] help_text = "" if group_field is not None: help_text = group_field.metadata.get("help", "") return [{"desc": help_text, "args": {f["dest"]: f for f in parsed_fields}}]
Produce the argument dicts for the given customizable class (Blueprint, Architect, etc)
https://github.com/facebookresearch/mephisto/blob/ff81d7c8ef1f90ef400fc102cc0312a83d848764/mephisto/operations/utils.py#L218-L235
import os import sys, glob, importlib import shlex from distutils.dir_util import copy_tree import functools from mephisto.data_model.constants import NO_PROJECT_NAME from mephisto.operations.config_handler import ( add_config_arg, get_config_arg, CORE_SECTION, DATA_STORAGE_KEY, DEFAULT_CONFIG_FILE, ) from omegaconf import OmegaConf, MISSING, DictConfig from dataclasses import fields, Field from typing import Optional, Dict, Any, List, Type, TYPE_CHECKING if TYPE_CHECKING: from mephisto.abstractions.crowd_provider import CrowdProvider from mephisto.data_model.task_runner import TaskRunner from mephisto.abstractions.architect import Architect from mephisto.data_model.task_run import TaskRun from mephisto.data_model.requester import Requester loaded_data_dir = None def ensure_user_confirm(display_text, skip_input=False) -> None: if skip_input: return res = input(f'{display_text}\nEnter "n" to exit and anything else to continue:') if res == "n": raise SystemExit(0) return def get_root_dir() -> str: return os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) def get_mock_requester(db) -> "Requester": mock_requesters = db.find_requesters(provider_type="mock") if len(mock_requesters) == 0: db.new_requester("MOCK_REQUESTER", "mock") mock_requesters = db.find_requesters(provider_type="mock") return mock_requesters[0] def get_provider_dir() -> str: return os.path.join(get_root_dir(), "mephisto/abstractions/providers") def get_gallery_dir() -> str: return os.path.join(get_root_dir(), "gallery") def get_dir_for_task(task_name: str, not_exists_ok: bool = False) -> Optional[str]: dir_path = os.path.join(get_tasks_dir(), task_name) if os.path.exists(dir_path) or not_exists_ok: return dir_path dir_path = os.path.join(get_gallery_dir(), task_name) if os.path.exists(dir_path) or not_exists_ok: return dir_path return None def get_tasks_dir() -> str: return os.path.join(get_root_dir(), "mephisto/tasks") def get_root_data_dir() -> str: global loaded_data_dir if loaded_data_dir is None: default_data_dir = os.path.join(get_root_dir(), "data") actual_data_dir = get_config_arg(CORE_SECTION, DATA_STORAGE_KEY) if actual_data_dir is None: data_dir_location = input( "Please enter the full path to a location to store Mephisto run data. By default this " f"would be at '{default_data_dir}'. This dir should NOT be on a distributed file " "store. Press enter to use the default: " ).strip() if len(data_dir_location) == 0: data_dir_location = default_data_dir data_dir_location = os.path.expanduser(data_dir_location) os.makedirs(data_dir_location, exist_ok=True) database_loc = os.path.join(default_data_dir, "database.db") if os.path.exists(database_loc) and data_dir_location != default_data_dir: should_migrate = ( input( "We have found an existing database in the default data directory, do you want to " f"copy any existing data from the default location to {data_dir_location}? (y)es/no: " ) .lower() .strip() ) if len(should_migrate) == 0 or should_migrate[0] == "y": copy_tree(default_data_dir, data_dir_location) print( "Mephisto data successfully copied, once you've confirmed the migration worked, " "feel free to remove all of the contents in " f"{default_data_dir} EXCEPT for `README.md`." ) add_config_arg(CORE_SECTION, DATA_STORAGE_KEY, data_dir_location) loaded_data_dir = get_config_arg(CORE_SECTION, DATA_STORAGE_KEY) if not os.path.isdir(loaded_data_dir): raise NotADirectoryError( f"The provided Mephisto data directory {loaded_data_dir} as set in " f"{DEFAULT_CONFIG_FILE} is not a directory! Please locate your Mephisto " f"data directory and update {DEFAULT_CONFIG_FILE} to point to it." ) return loaded_data_dir def get_data_dir(root_dir: Optional[str] = None) -> str: if root_dir is None: return get_root_data_dir() return os.path.join(root_dir, "data") def get_mephisto_tmp_dir() -> str: return os.path.join(get_root_dir(), "tmp") def get_dir_for_run(task_run: "TaskRun", project_name: str = NO_PROJECT_NAME) -> str: run_id = task_run.db_id root_dir = task_run.db.db_root return os.path.join(get_data_dir(root_dir), "runs", project_name, run_id) def build_arg_list_from_dict(in_dict: Dict[str, Any]) -> List[str]: arg_list = [] for key, val in in_dict.items(): arg_list.append(f"--{key.replace('_', '-')}") arg_list.append(str(val)) return arg_list def find_or_create_qualification(db, qualification_name) -> str: found_qualifications = db.find_qualifications(qualification_name) if len(found_qualifications) == 0: return db.make_qualification(qualification_name) else: return found_qualifications[0].db_id def get_dict_from_field(in_field: Field) -> Dict[str, Any]: found_type = "str" try: found_type = in_field.type.__name__ except AttributeError: found_type = "unknown" return { "dest": in_field.name, "type": found_type, "default": in_field.default, "help": in_field.metadata.get("help"), "choices": in_field.metadata.get("choices"), "required": in_field.metadata.get("required", False), }
MIT License
danielnyga/pracmln
python2/pracmln/logic/common.py
Logic.templ_atoms
python
def templ_atoms(self): templ_atoms = [] for literal in self.literals(): for templ in literal.template_variants(): templ_atoms.append(templ) return templ_atoms
Returns a list of template variants of all atoms that can be generated from this formula and the given mln. :Example: foo(?x, +?y) ^ bar(?x, +?z) --> [foo(?x, X1), foo(?x, X2), ..., bar(?x, Z1), bar(?x, Z2), ...]
https://github.com/danielnyga/pracmln/blob/bbda65696fb8753b11ff007e991280ebe42d78f9/python2/pracmln/logic/common.py#L234-L248
import sys from dnutils import logs, ifnone from pracmln.mln.util import fstr, dict_union, colorize from pracmln.mln.errors import NoSuchDomainError, NoSuchPredicateError from collections import defaultdict import itertools from pracmln.mln.constants import HARD, auto, predicate_color, inherit from grammar import StandardGrammar, PRACGrammar logger = logs.getlogger(__name__) def latexsym(sym): return r'\textit{%s}' % str(sym) class Logic(object): def __init__(self, grammar, mln): if grammar not in ('StandardGrammar', 'PRACGrammar'): raise Exception('Invalid grammar: %s' % grammar) self.grammar = eval(grammar)(self) self.mln = mln def __getstate__(self): d = self.__dict__.copy() d['grammar'] = type(self.grammar).__name__ return d def __setstate__(self, d): self.__dict__ = d self.grammar = eval(d['grammar'])(self) class Constraint(object): def template_variants(self, mln): raise Exception("%s does not implement getTemplateVariants" % str(type(self))) def truth(self, world): raise Exception("%s does not implement truth" % str(type(self))) def islogical(self): raise Exception("%s does not implement islogical" % str(type(self))) def itergroundings(self, mrf, simplify=False, domains=None): raise Exception("%s does not implement itergroundings" % str(type(self))) def idx_gndatoms(self, l=None): raise Exception("%s does not implement idxgndatoms" % str(type(self))) def gndatoms(self, l=None): raise Exception("%s does not implement gndatoms" % str(type(self))) class Formula(Constraint): def __init__(self, mln=None, idx=None): self.mln = mln if idx == auto and mln is not None: self.idx = len(mln.formulas) else: self.idx = idx @property def idx(self): return self._idx @idx.setter def idx(self, idx): self._idx = idx @property def mln(self): return self._mln @mln.setter def mln(self, mln): if hasattr(self, 'children'): for child in self.children: child.mln = mln self._mln = mln @property def weight(self): return self.mln.weight(self.idx) @weight.setter def weight(self, w): if self.idx is None: raise Exception('%s does not have an index' % str(self)) self.mln.weight(self.idx, w) @property def ishard(self): return self.weight == HARD def contains_gndatom(self, gndatomidx): if not hasattr(self, "children"): return False for child in self.children: if child.contains_gndatom(gndatomidx): return True return False def gndatom_indices(self, l=None): if l == None: l = [] if not hasattr(self, "children"): return l for child in self.children: child.gndatom_indices(l) return l def gndatoms(self, l=None): if l is None: l = [] if not hasattr(self, "children"): return l for child in self.children: child.gndatoms(l) return l
BSD 2-Clause Simplified License
netzkolchose/django-computedfields
computedfields/graph.py
Graph.get_cycles
python
def get_cycles(self): left_edges = OrderedDict() cycles = {} for edge in self.edges: left_edges.setdefault(edge.left, []).append(edge) for edge in self.edges: self._get_cycles(edge, left_edges, cycles) return cycles
Gets all cycles in graph. This is not optimised by any means, it simply walks the whole graph recursively and aborts as soon a seen edge gets entered again. Therefore use this and all dependent properties (``edge_cycles`` and ``node_cycles``) for in-depth cycle inspection only. As a start node any node on the left side of an edge will be tested. Returns a mapping of .. code:: python {frozenset(<cycle edges>): { 'entries': set(edges leading to the cycle), 'path': list(cycle edges in last seen order) }} An edge in ``entries`` is not necessarily part of the cycle itself, but once entered it will lead to the cycle.
https://github.com/netzkolchose/django-computedfields/blob/ffa7c963cb0e70d2afe5954f2fdca241c0407b3f/computedfields/graph.py#L268-L298
from collections import OrderedDict from django.core.exceptions import FieldDoesNotExist from django.db.models import ForeignKey from computedfields.helper import pairwise, is_sublist, modelname, parent_to_inherited_path, skip_equal_segments class ComputedFieldsException(Exception): class CycleException(ComputedFieldsException): class CycleEdgeException(CycleException): class CycleNodeException(CycleException): class Edge: instances = {} def __new__(cls, *args): key = (args[0], args[1]) if key in cls.instances: return cls.instances[key] instance = super(Edge, cls).__new__(cls) cls.instances[key] = instance return instance def __init__(self, left, right, data=None): self.left = left self.right = right self.data = data def __str__(self): return 'Edge %s-%s' % (self.left, self.right) def __repr__(self): return str(self) def __eq__(self, other): return self is other def __ne__(self, other): return self is not other def __hash__(self): return id(self) class Node: instances = {} def __new__(cls, *args): if args[0] in cls.instances: return cls.instances[args[0]] instance = super(Node, cls).__new__(cls) cls.instances[args[0]] = instance return instance def __init__(self, data): self.data = data def __str__(self): return self.data if isinstance(self.data, str) else '.'.join(self.data) def __repr__(self): return str(self) def __eq__(self, other): return self is other def __ne__(self, other): return self is not other def __hash__(self): return id(self) class Graph: def __init__(self): self.nodes = set() self.edges = set() self._removed = set() def add_node(self, node): self.nodes.add(node) def remove_node(self, node): self.nodes.remove(node) def add_edge(self, edge): self.edges.add(edge) self.nodes.add(edge.left) self.nodes.add(edge.right) def remove_edge(self, edge): self.edges.remove(edge) def get_dot(self, format='pdf', mark_edges=None, mark_nodes=None): from graphviz import Digraph if not mark_edges: mark_edges = {} if not mark_nodes: mark_nodes = {} dot = Digraph(format=format) for node in self.nodes: dot.node(str(node), str(node), **mark_nodes.get(node, {})) for edge in self.edges: dot.edge(str(edge.left), str(edge.right), **mark_edges.get(edge, {})) return dot def render(self, filename=None, format='pdf', mark_edges=None, mark_nodes=None): self.get_dot(format, mark_edges, mark_nodes).render( filename=filename, cleanup=True) def view(self, format='pdf', mark_edges=None, mark_nodes=None): self.get_dot(format, mark_edges, mark_nodes).view(cleanup=True) def edgepath_to_nodepath(self, path): return [edge.left for edge in path] + [path[-1].right] def nodepath_to_edgepath(self, path): return [Edge(*pair) for pair in pairwise(path)] def _get_edge_paths(self, edge, left_edges, paths, seen=None): if not seen: seen = [] if edge in seen: raise CycleEdgeException(seen[seen.index(edge):]) seen.append(edge) if edge.right in left_edges: for new_edge in left_edges[edge.right]: self._get_edge_paths(new_edge, left_edges, paths, seen[:]) paths.append(seen) def get_edgepaths(self): left_edges = OrderedDict() paths = [] for edge in self.edges: left_edges.setdefault(edge.left, []).append(edge) for edge in self.edges: self._get_edge_paths(edge, left_edges, paths) return paths def get_nodepaths(self): try: paths = self.get_edgepaths() except CycleEdgeException as exc: raise CycleNodeException(self.edgepath_to_nodepath(exc.args[0])) node_paths = [] for path in paths: node_paths.append(self.edgepath_to_nodepath(path)) return node_paths def _get_cycles(self, edge, left_edges, cycles, seen=None): if not seen: seen = [] if edge in seen: cycle = frozenset(seen[seen.index(edge):]) data = cycles.setdefault(cycle, {'entries': set(), 'path': []}) if seen: data['entries'].add(seen[0]) data['path'] = seen[seen.index(edge):] return seen.append(edge) if edge.right in left_edges: for new_edge in left_edges[edge.right]: self._get_cycles(new_edge, left_edges, cycles, seen[:])
MIT License
wayneweiqiang/gmma
gmma/utils/validation.py
check_memory
python
def check_memory(memory): if memory is None or isinstance(memory, str): if parse_version(joblib.__version__) < parse_version('0.12'): memory = joblib.Memory(cachedir=memory, verbose=0) else: memory = joblib.Memory(location=memory, verbose=0) elif not hasattr(memory, 'cache'): raise ValueError("'memory' should be None, a string or have the same" " interface as joblib.Memory." " Got memory='{}' instead.".format(memory)) return memory
Check that ``memory`` is joblib.Memory-like. joblib.Memory-like means that ``memory`` can be converted into a joblib.Memory instance (typically a str denoting the ``location``) or has the same interface (has a ``cache`` method). Parameters ---------- memory : None, str or object with the joblib.Memory interface Returns ------- memory : object with the joblib.Memory interface Raises ------ ValueError If ``memory`` is not joblib.Memory-like.
https://github.com/wayneweiqiang/gmma/blob/30b116edb83f495341fef8e9ad4baa50e4e1f76a/gmma/utils/validation.py#L208-L238
from functools import wraps import warnings import numbers import numpy as np import scipy.sparse as sp from inspect import signature, isclass, Parameter from numpy.core.numeric import ComplexWarning import joblib from contextlib import suppress from .fixes import _object_dtype_isnan, parse_version from .. import get_config as _get_config from ..exceptions import NonBLASDotWarning, PositiveSpectrumWarning from ..exceptions import NotFittedError from ..exceptions import DataConversionWarning FLOAT_DTYPES = (np.float64, np.float32, np.float16) warnings.simplefilter('ignore', NonBLASDotWarning) def _deprecate_positional_args(f): sig = signature(f) kwonly_args = [] all_args = [] for name, param in sig.parameters.items(): if param.kind == Parameter.POSITIONAL_OR_KEYWORD: all_args.append(name) elif param.kind == Parameter.KEYWORD_ONLY: kwonly_args.append(name) @wraps(f) def inner_f(*args, **kwargs): extra_args = len(args) - len(all_args) if extra_args > 0: args_msg = ['{}={}'.format(name, arg) for name, arg in zip(kwonly_args[:extra_args], args[-extra_args:])] warnings.warn("Pass {} as keyword args. From version 0.25 " "passing these as positional arguments will " "result in an error".format(", ".join(args_msg)), FutureWarning) kwargs.update({k: arg for k, arg in zip(sig.parameters, args)}) return f(**kwargs) return inner_f def _assert_all_finite(X, allow_nan=False, msg_dtype=None): from .extmath import _safe_accumulator_op if _get_config()['assume_finite']: return X = np.asanyarray(X) is_float = X.dtype.kind in 'fc' if is_float and (np.isfinite(_safe_accumulator_op(np.sum, X))): pass elif is_float: msg_err = "Input contains {} or a value too large for {!r}." if (allow_nan and np.isinf(X).any() or not allow_nan and not np.isfinite(X).all()): type_err = 'infinity' if allow_nan else 'NaN, infinity' raise ValueError( msg_err.format (type_err, msg_dtype if msg_dtype is not None else X.dtype) ) elif X.dtype == np.dtype('object') and not allow_nan: if _object_dtype_isnan(X).any(): raise ValueError("Input contains NaN") @_deprecate_positional_args def assert_all_finite(X, *, allow_nan=False): _assert_all_finite(X.data if sp.issparse(X) else X, allow_nan) @_deprecate_positional_args def as_float_array(X, *, copy=True, force_all_finite=True): if isinstance(X, np.matrix) or (not isinstance(X, np.ndarray) and not sp.issparse(X)): return check_array(X, accept_sparse=['csr', 'csc', 'coo'], dtype=np.float64, copy=copy, force_all_finite=force_all_finite, ensure_2d=False) elif sp.issparse(X) and X.dtype in [np.float32, np.float64]: return X.copy() if copy else X elif X.dtype in [np.float32, np.float64]: return X.copy('F' if X.flags['F_CONTIGUOUS'] else 'C') if copy else X else: if X.dtype.kind in 'uib' and X.dtype.itemsize <= 4: return_dtype = np.float32 else: return_dtype = np.float64 return X.astype(return_dtype) def _is_arraylike(x): return (hasattr(x, '__len__') or hasattr(x, 'shape') or hasattr(x, '__array__')) def _num_samples(x): message = 'Expected sequence or array-like, got %s' % type(x) if hasattr(x, 'fit') and callable(x.fit): raise TypeError(message) if not hasattr(x, '__len__') and not hasattr(x, 'shape'): if hasattr(x, '__array__'): x = np.asarray(x) else: raise TypeError(message) if hasattr(x, 'shape') and x.shape is not None: if len(x.shape) == 0: raise TypeError("Singleton array %r cannot be considered" " a valid collection." % x) if isinstance(x.shape[0], numbers.Integral): return x.shape[0] try: return len(x) except TypeError: raise TypeError(message)
MIT License
ethereum/trinity
p2p/kademlia.py
KademliaRoutingTable.get_least_recently_updated_log_distance
python
def get_least_recently_updated_log_distance(self) -> int: try: bucket_index = self.bucket_update_order[-1] except IndexError: raise ValueError("Routing table is empty") else: return bucket_index + 1
Get the log distance whose corresponding bucket was updated least recently. Only non-empty buckets are considered. If all buckets are empty, a `ValueError` is raised.
https://github.com/ethereum/trinity/blob/6383280c5044feb06695ac2f7bc1100b7bcf4fe0/p2p/kademlia.py#L443-L453
import collections import functools import ipaddress import itertools import operator import random import struct from typing import ( Any, Dict, Iterable, List, Type, TypeVar, Tuple, Deque, Iterator) from urllib import parse as urlparse from cached_property import cached_property from eth_utils import ( big_endian_to_int, decode_hex, remove_0x_prefix, encode_hex) from eth_keys import ( datatypes, keys, ) from eth_enr import ENR, ENRAPI, V4CompatIdentityScheme from eth_enr.constants import ( IP_V4_ADDRESS_ENR_KEY, UDP_PORT_ENR_KEY, TCP_PORT_ENR_KEY, IDENTITY_SCHEME_ENR_KEY, ) from eth_hash.auto import keccak from eth_typing import NodeID from p2p.abc import AddressAPI, NodeAPI from p2p.constants import NUM_ROUTING_TABLE_BUCKETS from p2p._utils import get_logger from p2p.validation import validate_enode_uri def int_to_big_endian4(integer: int) -> bytes: return struct.pack('>I', integer) def enc_port(p: int) -> bytes: return int_to_big_endian4(p)[-2:] TAddress = TypeVar('TAddress', bound=AddressAPI) class Address(AddressAPI): def __init__(self, ip: str, udp_port: int, tcp_port: int) -> None: self.udp_port = udp_port self.tcp_port = tcp_port self._ip = ipaddress.ip_address(ip) @property def is_loopback(self) -> bool: return self._ip.is_loopback @property def is_unspecified(self) -> bool: return self._ip.is_unspecified @property def is_reserved(self) -> bool: return self._ip.is_reserved @property def is_private(self) -> bool: return self._ip.is_private @property def ip(self) -> str: return str(self._ip) @cached_property def ip_packed(self) -> str: return self._ip.packed def __eq__(self, other: Any) -> bool: return (self.ip, self.udp_port) == (other.ip, other.udp_port) def __repr__(self) -> str: return 'Address(%s:udp:%s|tcp:%s)' % (self.ip, self.udp_port, self.tcp_port) def to_endpoint(self) -> List[bytes]: return [self._ip.packed, enc_port(self.udp_port), enc_port(self.tcp_port)] @classmethod def from_endpoint(cls: Type[TAddress], ip: str, udp_port: bytes, tcp_port: bytes = b'\x00\x00') -> TAddress: return cls(ip, big_endian_to_int(udp_port), big_endian_to_int(tcp_port)) TNode = TypeVar('TNode', bound=NodeAPI) @functools.total_ordering class Node(NodeAPI): def __init__(self, enr: ENRAPI) -> None: self._init(enr) def _init(self, enr: ENRAPI) -> None: try: ip = enr[IP_V4_ADDRESS_ENR_KEY] udp_port = enr[UDP_PORT_ENR_KEY] except KeyError: self._address = None else: tcp_port = enr.get(TCP_PORT_ENR_KEY, udp_port) self._address = Address(ip, udp_port, tcp_port) self._pubkey = keys.PublicKey.from_compressed_bytes(enr.public_key) self._id = NodeID(keccak(self.pubkey.to_bytes())) self._id_int = big_endian_to_int(self.id) self._enr = enr @property def id(self) -> NodeID: return self._id @property def pubkey(self) -> keys.PublicKey: return self._pubkey @property def address(self) -> Address: return self._address @classmethod def from_pubkey_and_addr( cls: Type[TNode], pubkey: datatypes.PublicKey, address: AddressAPI) -> TNode: return cls(create_stub_enr(pubkey, address)) @classmethod def from_uri(cls: Type[TNode], uri: str) -> TNode: if uri.startswith("enr:"): return cls.from_enr_repr(uri) else: return cls.from_enode_uri(uri) @classmethod def from_enr_repr(cls: Type[TNode], uri: str) -> TNode: return cls(ENR.from_repr(uri)) @classmethod def from_enode_uri(cls: Type[TNode], uri: str) -> TNode: validate_enode_uri(uri) parsed = urlparse.urlparse(uri) pubkey = keys.PublicKey(decode_hex(parsed.username)) return cls.from_pubkey_and_addr(pubkey, Address(parsed.hostname, parsed.port, parsed.port)) @property def enr(self) -> ENRAPI: return self._enr def uri(self) -> str: hexstring = self.pubkey.to_hex() hexstring = remove_0x_prefix(hexstring) if self.address is not None: ip = self.address.ip tcp_port = self.address.tcp_port else: ip = None tcp_port = None return f'enode://{hexstring}@{ip}:{tcp_port}' def __str__(self) -> str: if self.address is not None: ip = self.address.ip else: ip = None return f"<Node({self.pubkey.to_hex()[:8]}@{ip})>" def __repr__(self) -> str: if self.address is not None: ip = self.address.ip tcp_port = self.address.tcp_port else: ip = None tcp_port = None return f"<Node({self.pubkey.to_hex()}@{ip}:{tcp_port})>" def distance_to(self, id: int) -> int: return self._id_int ^ id def __lt__(self, other: Any) -> bool: if not isinstance(other, self.__class__): return super().__lt__(other) return self._id_int < other._id_int def __eq__(self, other: Any) -> bool: if not isinstance(other, self.__class__): return super().__eq__(other) return self.pubkey == other.pubkey def __ne__(self, other: Any) -> bool: return not self == other def __hash__(self) -> int: return hash(self.pubkey) def __getstate__(self) -> Dict[Any, Any]: return {'enr': repr(self.enr)} def __setstate__(self, state: Dict[Any, Any]) -> None: self._init(ENR.from_repr(state.pop('enr'))) def check_relayed_addr(sender: AddressAPI, addr: AddressAPI) -> bool: if addr.is_unspecified or addr.is_reserved: return False if addr.is_private and not sender.is_private: return False if addr.is_loopback and not sender.is_loopback: return False return True def sort_by_distance(nodes: Iterable[NodeAPI], target_id: NodeID) -> List[NodeAPI]: target_id_int = big_endian_to_int(target_id) return sorted(nodes, key=operator.methodcaller('distance_to', target_id_int)) def create_stub_enr(pubkey: datatypes.PublicKey, address: AddressAPI) -> ENRAPI: return ENR( 0, { IDENTITY_SCHEME_ENR_KEY: V4CompatIdentityScheme.id, V4CompatIdentityScheme.public_key_enr_key: pubkey.to_compressed_bytes(), IP_V4_ADDRESS_ENR_KEY: address.ip_packed, UDP_PORT_ENR_KEY: address.udp_port, TCP_PORT_ENR_KEY: address.tcp_port, }, signature=b'' ) def compute_distance(left_node_id: NodeID, right_node_id: NodeID) -> int: left_int = big_endian_to_int(left_node_id) right_int = big_endian_to_int(right_node_id) return left_int ^ right_int def compute_log_distance(left_node_id: NodeID, right_node_id: NodeID) -> int: if left_node_id == right_node_id: raise ValueError("Cannot compute log distance between identical nodes") distance = compute_distance(left_node_id, right_node_id) return distance.bit_length() class KademliaRoutingTable: def __init__(self, center_node_id: NodeID, bucket_size: int) -> None: self.logger = get_logger("p2p.kademlia.KademliaRoutingTable") self.center_node_id = center_node_id self.bucket_size = bucket_size self.buckets: Tuple[Deque[NodeID], ...] = tuple( collections.deque(maxlen=bucket_size) for _ in range(NUM_ROUTING_TABLE_BUCKETS) ) self.replacement_caches: Tuple[Deque[NodeID], ...] = tuple( collections.deque() for _ in range(NUM_ROUTING_TABLE_BUCKETS) ) self.bucket_update_order: Deque[int] = collections.deque() def _contains(self, node_id: NodeID, include_replacement_cache: bool) -> bool: _, bucket, replacement_cache = self.get_index_bucket_and_replacement_cache(node_id) if include_replacement_cache: nodes = bucket + replacement_cache else: nodes = bucket return node_id in nodes def get_index_bucket_and_replacement_cache(self, node_id: NodeID, ) -> Tuple[int, Deque[NodeID], Deque[NodeID]]: index = compute_log_distance(self.center_node_id, node_id) - 1 bucket = self.buckets[index] replacement_cache = self.replacement_caches[index] return index, bucket, replacement_cache def update(self, node_id: NodeID) -> NodeID: if node_id == self.center_node_id: raise ValueError("Cannot insert center node into routing table") bucket_index, bucket, replacement_cache = self.get_index_bucket_and_replacement_cache( node_id, ) is_bucket_full = len(bucket) >= self.bucket_size is_node_in_bucket = node_id in bucket if not is_node_in_bucket and not is_bucket_full: self.logger.debug2("Adding %s to bucket %d", encode_hex(node_id), bucket_index) self.update_bucket_unchecked(node_id) eviction_candidate = None elif is_node_in_bucket: self.logger.debug2("Updating %s in bucket %d", encode_hex(node_id), bucket_index) self.update_bucket_unchecked(node_id) eviction_candidate = None elif not is_node_in_bucket and is_bucket_full: if node_id not in replacement_cache: self.logger.debug2( "Adding %s to replacement cache of bucket %d", encode_hex(node_id), bucket_index, ) else: self.logger.debug2( "Updating %s in replacement cache of bucket %d", encode_hex(node_id), bucket_index, ) replacement_cache.remove(node_id) replacement_cache.appendleft(node_id) eviction_candidate = bucket[-1] else: raise Exception("unreachable") return eviction_candidate def update_bucket_unchecked(self, node_id: NodeID) -> None: bucket_index, bucket, replacement_cache = self.get_index_bucket_and_replacement_cache( node_id, ) for container in (bucket, replacement_cache): try: container.remove(node_id) except ValueError: pass bucket.appendleft(node_id) try: self.bucket_update_order.remove(bucket_index) except ValueError: pass self.bucket_update_order.appendleft(bucket_index) def remove(self, node_id: NodeID) -> None: bucket_index, bucket, replacement_cache = self.get_index_bucket_and_replacement_cache( node_id, ) in_bucket = node_id in bucket in_replacement_cache = node_id in replacement_cache if in_bucket: bucket.remove(node_id) if replacement_cache: replacement_node_id = replacement_cache.popleft() self.logger.debug( "Replacing %s from bucket %d with %s from replacement cache", encode_hex(node_id), bucket_index, encode_hex(replacement_node_id), ) bucket.append(replacement_node_id) else: self.logger.debug( "Removing %s from bucket %d without replacement", encode_hex(node_id), bucket_index, ) if in_replacement_cache: self.logger.debug( "Removing %s from replacement cache of bucket %d", encode_hex(node_id), bucket_index, ) replacement_cache.remove(node_id) if not in_bucket and not in_replacement_cache: self.logger.debug( "Not removing %s as it is neither present in the bucket nor the replacement cache", encode_hex(node_id), bucket_index, ) if not bucket: try: self.bucket_update_order.remove(bucket_index) except ValueError: pass def get_nodes_at_log_distance(self, log_distance: int) -> Tuple[NodeID, ...]: if log_distance <= 0: raise ValueError(f"Log distance must be positive, got {log_distance}") elif log_distance > len(self.buckets): raise ValueError( f"Log distance must not be greater than {len(self.buckets)}, got {log_distance}" ) return tuple(self.buckets[log_distance - 1]) @property def is_empty(self) -> bool: return all(len(bucket) == 0 for bucket in self.buckets)
MIT License
slicermorph/slicermorph
IDAVLMConverter/IDAVLMConverter.py
IDAVLMConverterLogic.isValidInputOutputData
python
def isValidInputOutputData(self, inputVolumeNode, outputVolumeNode): if not inputVolumeNode: logging.debug('isValidInputOutputData failed: no input volume node defined') return False if not outputVolumeNode: logging.debug('isValidInputOutputData failed: no output volume node defined') return False if inputVolumeNode.GetID()==outputVolumeNode.GetID(): logging.debug('isValidInputOutputData failed: input and output volume is the same. Create a new volume for output to avoid this error.') return False return True
Validates if the output is not the same as input
https://github.com/slicermorph/slicermorph/blob/67c622c4ab15f0a1dee2bb00dffde8dbcd5a91be/IDAVLMConverter/IDAVLMConverter.py#L148-L160
import os import unittest import vtk, qt, ctk, slicer from slicer.ScriptedLoadableModule import * import logging class IDAVLMConverter(ScriptedLoadableModule): def __init__(self, parent): ScriptedLoadableModule.__init__(self, parent) self.parent.title = "IDAVLMConverter" self.parent.categories = ["SlicerMorph.SlicerMorph Utilities"] self.parent.dependencies = [] self.parent.contributors = ["Murat Maga (UW), Sara Rolfe (UW)"] self.parent.helpText = """ This module converts raw landmark coordinates (.pts format) exported from the IDAV Landmark Editor into fcsv format. It does not accept the Landmark Editor's project files (.land format). <p>For more information about usage and potential issues, please see <a href="https://github.com/SlicerMorph/SlicerMorph/tree/master/Docs/IDAVLMConverter">online documentation.</A> """ self.parent.acknowledgementText = """ This module was developed by Sara Rolfe for SlicerMorph. SlicerMorph was originally supported by an NSF/DBI grant, "An Integrated Platform for Retrieval, Visualization and Analysis of 3D Morphology From Digital Biological Collections" awarded to Murat Maga (1759883), Adam Summers (1759637), and Douglas Boyer (1759839). https://nsf.gov/awardsearch/showAward?AWD_ID=1759883&HistoricalAwards=false """ class IDAVLMConverterWidget(ScriptedLoadableModuleWidget): def setup(self): ScriptedLoadableModuleWidget.setup(self) parametersCollapsibleButton = ctk.ctkCollapsibleButton() parametersCollapsibleButton.text = "Parameters" self.layout.addWidget(parametersCollapsibleButton) parametersFormLayout = qt.QFormLayout(parametersCollapsibleButton) self.inputFileSelector = ctk.ctkPathLineEdit() self.inputFileSelector.filters = ctk.ctkPathLineEdit().Files self.inputFileSelector.setToolTip( "Select landmark file for import" ) parametersFormLayout.addRow("Select file containing landmark names and coordinates to load:", self.inputFileSelector) self.outputDirectory = ctk.ctkDirectoryButton() self.outputDirectory.directory = slicer.mrmlScene.GetCacheManager().GetRemoteCacheDirectory() parametersFormLayout.addRow("Output Directory:", self.outputDirectory) self.headerLengthWidget = ctk.ctkDoubleSpinBox() self.headerLengthWidget.value = 2 self.headerLengthWidget.minimum = 0 self.headerLengthWidget.singleStep = 1 self.headerLengthWidget.setToolTip("Input the number of lines in header") parametersFormLayout.addRow("Header length:", self.headerLengthWidget) self.loadLandmarkNode = qt.QCheckBox() self.loadLandmarkNode.checked = 0 self.loadLandmarkNode.setToolTip("After conversion, load landmarks into the scene.") parametersFormLayout.addRow("Load landmarks into scene", self.loadLandmarkNode) self.applyButton = qt.QPushButton("Apply") self.applyButton.toolTip = "Run the algorithm." self.applyButton.enabled = False parametersFormLayout.addRow(self.applyButton) self.applyButton.connect('clicked(bool)', self.onApplyButton) self.inputFileSelector.connect('validInputChanged(bool)', self.onSelect) self.layout.addStretch(1) self.onSelect() def cleanup(self): pass def onSelect(self): self.applyButton.enabled = bool(self.inputFileSelector.currentPath) def onApplyButton(self): logic = IDAVLMConverterLogic() loadFileOption = self.loadLandmarkNode.checked logic.run(self.inputFileSelector.currentPath, self.outputDirectory.directory, self.headerLengthWidget.value, loadFileOption) class IDAVLMConverterLogic(ScriptedLoadableModuleLogic): def hasImageData(self,volumeNode): if not volumeNode: logging.debug('hasImageData failed: no volume node') return False if volumeNode.GetImageData() is None: logging.debug('hasImageData failed: no image data in volume node') return False return True
BSD 2-Clause Simplified License
geertj/python-ad
lib/ad/protocol/asn1.py
Decoder.leave
python
def leave(self): if self.m_stack is None: raise Error, 'No input selected. Call start() first.' if len(self.m_stack) == 1: raise Error, 'Tag stack is empty.' del self.m_stack[-1] self.m_tag = None
Leave the last entered constructed tag.
https://github.com/geertj/python-ad/blob/3089eae072bd2e871c11251961ec35a09b83dd38/lib/ad/protocol/asn1.py#L283-L290
Boolean = 0x01 Integer = 0x02 OctetString = 0x04 Null = 0x05 ObjectIdentifier = 0x06 Enumerated = 0x0a Sequence = 0x10 Set = 0x11 TypeConstructed = 0x20 TypePrimitive = 0x00 ClassUniversal = 0x00 ClassApplication = 0x40 ClassContext = 0x80 ClassPrivate = 0xc0 import re class Error(Exception): class Encoder(object): def __init__(self): self.m_stack = None def start(self): self.m_stack = [[]] def enter(self, nr, cls=None): if self.m_stack is None: raise Error, 'Encoder not initialized. Call start() first.' if cls is None: cls = ClassUniversal self._emit_tag(nr, TypeConstructed, cls) self.m_stack.append([]) def leave(self): if self.m_stack is None: raise Error, 'Encoder not initialized. Call start() first.' if len(self.m_stack) == 1: raise Error, 'Tag stack is empty.' value = ''.join(self.m_stack[-1]) del self.m_stack[-1] self._emit_length(len(value)) self._emit(value) def write(self, value, nr=None, typ=None, cls=None): if self.m_stack is None: raise Error, 'Encoder not initialized. Call start() first.' if nr is None: if isinstance(value, int) or isinstance(value, long): nr = Integer elif isinstance(value, str) or isinstance(value, unicode): nr = OctetString elif value is None: nr = Null if typ is None: typ = TypePrimitive if cls is None: cls = ClassUniversal value = self._encode_value(nr, value) self._emit_tag(nr, typ, cls) self._emit_length(len(value)) self._emit(value) def output(self): if self.m_stack is None: raise Error, 'Encoder not initialized. Call start() first.' if len(self.m_stack) != 1: raise Error, 'Stack is not empty.' output = ''.join(self.m_stack[0]) return output def _emit_tag(self, nr, typ, cls): if nr < 31: self._emit_tag_short(nr, typ, cls) else: self._emit_tag_long(nr, typ, cls) def _emit_tag_short(self, nr, typ, cls): assert nr < 31 self._emit(chr(nr | typ | cls)) def _emit_tag_long(self, nr, typ, cls): head = chr(typ | cls | 0x1f) self._emit(head) values = [] values.append((nr & 0x7f)) nr >>= 7 while nr: values.append((nr & 0x7f) | 0x80) nr >>= 7 values.reverse() values = map(chr, values) for val in values: self._emit(val) def _emit_length(self, length): if length < 128: self._emit_length_short(length) else: self._emit_length_long(length) def _emit_length_short(self, length): assert length < 128 self._emit(chr(length)) def _emit_length_long(self, length): values = [] while length: values.append(length & 0xff) length >>= 8 values.reverse() values = map(chr, values) assert len(values) < 127 head = chr(0x80 | len(values)) self._emit(head) for val in values: self._emit(val) def _emit(self, s): assert isinstance(s, str) self.m_stack[-1].append(s) def _encode_value(self, nr, value): if nr in (Integer, Enumerated): value = self._encode_integer(value) elif nr == OctetString: value = self._encode_octet_string(value) elif nr == Boolean: value = self._encode_boolean(value) elif nr == Null: value = self._encode_null() elif nr == ObjectIdentifier: value = self._encode_object_identifier(value) return value def _encode_boolean(self, value): return value and '\xff' or '\x00' def _encode_integer(self, value): if value < 0: value = -value negative = True limit = 0x80 else: negative = False limit = 0x7f values = [] while value > limit: values.append(value & 0xff) value >>= 8 values.append(value & 0xff) if negative: for i in range(len(values)): values[i] = 0xff - values[i] for i in range(len(values)): values[i] += 1 if values[i] <= 0xff: break assert i != len(values)-1 values[i] = 0x00 values.reverse() values = map(chr, values) return ''.join(values) def _encode_octet_string(self, value): return value def _encode_null(self): return '' _re_oid = re.compile('^[0-9]+(\.[0-9]+)+$') def _encode_object_identifier(self, oid): if not self._re_oid.match(oid): raise Error, 'Illegal object identifier' cmps = map(int, oid.split('.')) if cmps[0] > 39 or cmps[1] > 39: raise Error, 'Illegal object identifier' cmps = [40 * cmps[0] + cmps[1]] + cmps[2:] cmps.reverse() result = [] for cmp in cmps: result.append(cmp & 0x7f) while cmp > 0x7f: cmp >>= 7 result.append(0x80 | (cmp & 0x7f)) result.reverse() result = map(chr, result) return ''.join(result) class Decoder(object): def __init__(self): self.m_stack = None self.m_tag = None def start(self, data): if not isinstance(data, str): raise Error, 'Expecting string instance.' self.m_stack = [[0, data]] self.m_tag = None def peek(self): if self.m_stack is None: raise Error, 'No input selected. Call start() first.' if self._end_of_input(): return None if self.m_tag is None: self.m_tag = self._read_tag() return self.m_tag def read(self): if self.m_stack is None: raise Error, 'No input selected. Call start() first.' if self._end_of_input(): return None tag = self.peek() length = self._read_length() value = self._read_value(tag[0], length) self.m_tag = None return (tag, value) def eof(self): return self._end_of_input() def enter(self): if self.m_stack is None: raise Error, 'No input selected. Call start() first.' nr, typ, cls = self.peek() if typ != TypeConstructed: raise Error, 'Cannot enter a non-constructed tag.' length = self._read_length() bytes = self._read_bytes(length) self.m_stack.append([0, bytes]) self.m_tag = None
MIT License
ucam-smt/sgnmt
cam/sgnmt/predictors/tf_t2t.py
T2TPredictor.predict_next
python
def predict_next(self): log_probs = self.mon_sess.run(self._log_probs, {self._inputs_var: self.src_sentence, self._targets_var: utils.oov_to_unk( self.consumed + [text_encoder.PAD_ID], self.trg_vocab_size, self._t2t_unk_id)}) log_probs[text_encoder.PAD_ID] = utils.NEG_INF return log_probs
Call the T2T model in self.mon_sess.
https://github.com/ucam-smt/sgnmt/blob/c663ec7b251552e36b6b4f992f0ac21aad87cb7b/cam/sgnmt/predictors/tf_t2t.py#L332-L341
import logging import os from cam.sgnmt import utils, tf_utils from cam.sgnmt.predictors.core import Predictor from cam.sgnmt.misc.trie import SimpleTrie POP = "##POP##" try: from tensor2tensor import models from tensor2tensor import problems as problems_lib from tensor2tensor.utils import usr_dir from tensor2tensor.utils import registry from tensor2tensor.utils import devices from tensor2tensor.utils import trainer_lib from tensor2tensor.data_generators.text_encoder import TextEncoder from tensor2tensor.data_generators import problem from tensor2tensor.data_generators import text_encoder import tensorflow as tf import numpy as np class DummyTextEncoder(TextEncoder): def __init__(self, vocab_size, pop_id=None): super(DummyTextEncoder, self).__init__(num_reserved_ids=None) self._vocab_size = vocab_size def encode(self, s): raise NotImplementedError("Dummy encoder cannot be used to encode.") def decode(self, ids): raise NotImplementedError("Dummy encoder cannot be used to decode.") @property def vocab_size(self): return self._vocab_size flags = tf.flags FLAGS = flags.FLAGS flags.DEFINE_string("schedule", "train_and_evaluate", "Method of tf.contrib.learn.Experiment to run.") except ImportError: pass T2T_INITIALIZED = False def _initialize_t2t(t2t_usr_dir): global T2T_INITIALIZED if not T2T_INITIALIZED: logging.info("Setting up tensor2tensor library...") tf.logging.set_verbosity(tf.logging.INFO) usr_dir.import_usr_dir(t2t_usr_dir) T2T_INITIALIZED = True def log_prob_from_logits(logits): return logits - tf.reduce_logsumexp(logits, keepdims=True, axis=-1) def expand_input_dims_for_t2t(t, batched=False): if not batched: t = tf.expand_dims(t, 0) t = tf.expand_dims(t, -1) t = tf.expand_dims(t, -1) return t def gather_2d(params, indices): batch_size = tf.shape(params)[0] num_indices = tf.shape(indices)[1] batch_indices = tf.tile(tf.expand_dims(tf.range(batch_size), 1), [1, num_indices]) gather_nd_indices = tf.stack([batch_indices, indices], axis=2) return tf.gather_nd(params, gather_nd_indices) class _BaseTensor2TensorPredictor(Predictor): def __init__(self, t2t_usr_dir, checkpoint_dir, src_vocab_size, trg_vocab_size, t2t_unk_id, n_cpu_threads, max_terminal_id=-1, pop_id=-1): super(_BaseTensor2TensorPredictor, self).__init__() self._n_cpu_threads = n_cpu_threads self._t2t_unk_id = utils.UNK_ID if t2t_unk_id < 0 else t2t_unk_id self._checkpoint_dir = checkpoint_dir try: self.pop_id = int(pop_id) except ValueError: logging.warn("t2t predictor only supports single POP IDs. " "Reset to -1") self.pop_id = -1 self.max_terminal_id = max_terminal_id self.src_vocab_size = src_vocab_size self.trg_vocab_size = trg_vocab_size _initialize_t2t(t2t_usr_dir) def _add_problem_hparams(self, hparams, problem_name): if self.pop_id >= 0: try: hparams.add_hparam("pop_id", self.pop_id) except: if hparams.pop_id != self.pop_id: logging.warn("T2T pop_id does not match (%d!=%d)" % (hparams.pop_id, self.pop_id)) try: hparams.add_hparam("max_terminal_id", self.max_terminal_id) except: if hparams.max_terminal_id != self.max_terminal_id: logging.warn("T2T max_terminal_id does not match (%d!=%d)" % (hparams.max_terminal_id, self.max_terminal_id)) try: hparams.add_hparam("closing_bracket_id", self.pop_id) except: if hparams.closing_bracket_id != self.pop_id: logging.warn("T2T closing_bracket_id does not match (%d!=%d)" % (hparams.closing_bracket_id, self.pop_id)) problem = registry.problem(problem_name) problem._encoders = { "inputs": DummyTextEncoder(vocab_size=self.src_vocab_size), "targets": DummyTextEncoder(vocab_size=self.trg_vocab_size) } p_hparams = problem.get_hparams(hparams) hparams.problem = problem hparams.problem_hparams = p_hparams return hparams def create_session(self): return tf_utils.create_session(self._checkpoint_dir, self._n_cpu_threads) def get_unk_probability(self, posterior): return utils.common_get(posterior, self._t2t_unk_id, utils.NEG_INF) class T2TPredictor(_BaseTensor2TensorPredictor): def __init__(self, src_vocab_size, trg_vocab_size, model_name, problem_name, hparams_set_name, t2t_usr_dir, checkpoint_dir, t2t_unk_id=None, n_cpu_threads=-1, max_terminal_id=-1, pop_id=-1): super(T2TPredictor, self).__init__(t2t_usr_dir, checkpoint_dir, src_vocab_size, trg_vocab_size, t2t_unk_id, n_cpu_threads, max_terminal_id, pop_id) if not model_name or not problem_name or not hparams_set_name: logging.fatal( "Please specify t2t_model, t2t_problem, and t2t_hparams_set!") raise AttributeError self.consumed = [] self.src_sentence = [] predictor_graph = tf.Graph() with predictor_graph.as_default() as g: hparams = trainer_lib.create_hparams(hparams_set_name) self._add_problem_hparams(hparams, problem_name) translate_model = registry.model(model_name)( hparams, tf.estimator.ModeKeys.PREDICT) self._inputs_var = tf.placeholder(dtype=tf.int32, shape=[None], name="sgnmt_inputs") self._targets_var = tf.placeholder(dtype=tf.int32, shape=[None], name="sgnmt_targets") features = {"inputs": expand_input_dims_for_t2t(self._inputs_var), "targets": expand_input_dims_for_t2t(self._targets_var)} translate_model.prepare_features_for_infer(features) translate_model._fill_problem_hparams_features(features) logits, _ = translate_model(features) logits = tf.squeeze(logits, [0, 1, 2, 3]) self._log_probs = log_prob_from_logits(logits) self.mon_sess = self.create_session()
Apache License 2.0
sphinx-toolbox/sphinx-toolbox
sphinx_toolbox/testing.py
Sphinx.add_enumerable_node
python
def add_enumerable_node( self, node: Type[nodes.Element], figtype: str, title_getter: Optional[TitleGetter] = None, override: bool = False, **kwargs: Tuple[Callable, Callable], ) -> None: self.registry.add_enumerable_node( node, figtype, title_getter, override=override, ) self.add_node(node, override=override, **kwargs)
Register a Docutils node class as a numfig target.
https://github.com/sphinx-toolbox/sphinx-toolbox/blob/cee88c6bceac20a9ae0e381ada2fb2453ca3fc0b/sphinx_toolbox/testing.py#L236-L255
import copy import sys import tempfile from functools import partial from types import SimpleNamespace from typing import Any, Callable, Dict, List, NamedTuple, Optional, Set, Tuple, Type, Union, cast import pytest import sphinx.application from bs4 import BeautifulSoup from coincidence.regressions import check_file_output, check_file_regression from docutils import nodes from docutils.parsers.rst import Directive, roles from docutils.transforms import Transform from domdf_python_tools.doctools import prettify_docstrings from domdf_python_tools.paths import PathPlus from domdf_python_tools.stringlist import StringList from domdf_python_tools.typing import PathLike from jinja2 import Template from pygments.lexer import Lexer from pytest_regressions.common import check_text_files from pytest_regressions.file_regression import FileRegressionFixture from sphinx.builders import Builder from sphinx.config import Config from sphinx.domains import Domain, Index from sphinx.domains.python import PythonDomain from sphinx.environment.collectors import EnvironmentCollector from sphinx.events import EventListener from sphinx.events import EventManager as BaseEventManager from sphinx.ext.autodoc.directive import AutodocDirective from sphinx.highlighting import lexer_classes from sphinx.registry import SphinxComponentRegistry from sphinx.roles import XRefRole from sphinx.util import docutils from sphinx.util.typing import RoleFunction, TitleGetter from sphinx_toolbox.utils import SphinxExtMetadata __all__ = [ "Sphinx", "run_setup", "RunSetupOutput", "remove_html_footer", "check_html_regression", "remove_html_link_tags", "check_asset_copy", "HTMLRegressionFixture", "html_regression", ] class FakeBuilder(Builder): pass class EventManager(BaseEventManager): def connect(self, name: str, callback: Callable, priority: int) -> int: listener_id = self.next_listener_id self.next_listener_id += 1 self.listeners[name].append(EventListener(listener_id, callback, priority)) return listener_id class Sphinx: registry: SphinxComponentRegistry config: Config events: EventManager html_themes: Dict[str, str] def __init__(self): self.registry = SphinxComponentRegistry() self.config = Config({}, {}) self.events = EventManager(self) self.html_themes: Dict[str, str] = {} def add_builder(self, builder: Type[Builder], override: bool = False) -> None: self.registry.add_builder(builder, override=override) def add_config_value( self, name: str, default: Any, rebuild: Union[bool, str], types: Any = (), ) -> None: if rebuild in {False, True}: rebuild = "env" if rebuild else '' self.config.add(name, default, rebuild, types) def add_event(self, name: str) -> None: self.events.add(name) def set_translator( self, name: str, translator_class: Type[nodes.NodeVisitor], override: bool = False, ) -> None: self.registry.add_translator(name, translator_class, override=override) def add_node( self, node: Type[nodes.Element], override: bool = False, **kwargs: Tuple[Callable, Callable], ) -> None: if not override and docutils.is_node_registered(node): raise ValueError( f"node class {node.__name__!r} is already registered, its visitors will be overridden" ) docutils.register_node(node) self.registry.add_translation_handlers(node, **kwargs)
MIT License
thingsboard/python_tb_rest_client
tb_rest_client/models/models_pe/dashboard.py
Dashboard.owner_id
python
def owner_id(self, owner_id): self._owner_id = owner_id
Sets the owner_id of this Dashboard. :param owner_id: The owner_id of this Dashboard. # noqa: E501 :type: EntityId
https://github.com/thingsboard/python_tb_rest_client/blob/87c6a3703974fc8a86e4c72c444168ee2b758ecb/tb_rest_client/models/models_pe/dashboard.py#L300-L308
import pprint import re import six class Dashboard(object): swagger_types = { 'assigned_customers': 'list[ShortCustomerInfo]', 'configuration': 'str', 'created_time': 'int', 'customer_id': 'CustomerId', 'id': 'DashboardId', 'image': 'str', 'mobile_hide': 'bool', 'mobile_order': 'int', 'name': 'str', 'owner_id': 'EntityId', 'tenant_id': 'TenantId', 'title': 'str' } attribute_map = { 'assigned_customers': 'assignedCustomers', 'configuration': 'configuration', 'created_time': 'createdTime', 'customer_id': 'customerId', 'id': 'id', 'image': 'image', 'mobile_hide': 'mobileHide', 'mobile_order': 'mobileOrder', 'name': 'name', 'owner_id': 'ownerId', 'tenant_id': 'tenantId', 'title': 'title' } def __init__(self, assigned_customers=None, configuration=None, created_time=None, customer_id=None, id=None, image=None, mobile_hide=None, mobile_order=None, name=None, owner_id=None, tenant_id=None, title=None): self._assigned_customers = None self._configuration = None self._created_time = None self._customer_id = None self._id = None self._image = None self._mobile_hide = None self._mobile_order = None self._name = None self._owner_id = None self._tenant_id = None self._title = None self.discriminator = None if assigned_customers is not None: self.assigned_customers = assigned_customers if configuration is not None: self.configuration = configuration if created_time is not None: self.created_time = created_time if customer_id is not None: self.customer_id = customer_id if id is not None: self.id = id if image is not None: self.image = image if mobile_hide is not None: self.mobile_hide = mobile_hide if mobile_order is not None: self.mobile_order = mobile_order if name is not None: self.name = name if owner_id is not None: self.owner_id = owner_id if tenant_id is not None: self.tenant_id = tenant_id if title is not None: self.title = title @property def assigned_customers(self): return self._assigned_customers @assigned_customers.setter def assigned_customers(self, assigned_customers): self._assigned_customers = assigned_customers @property def configuration(self): return self._configuration @configuration.setter def configuration(self, configuration): self._configuration = configuration @property def created_time(self): return self._created_time @created_time.setter def created_time(self, created_time): self._created_time = created_time @property def customer_id(self): return self._customer_id @customer_id.setter def customer_id(self, customer_id): self._customer_id = customer_id @property def id(self): return self._id @id.setter def id(self, id): self._id = id @property def image(self): return self._image @image.setter def image(self, image): self._image = image @property def mobile_hide(self): return self._mobile_hide @mobile_hide.setter def mobile_hide(self, mobile_hide): self._mobile_hide = mobile_hide @property def mobile_order(self): return self._mobile_order @mobile_order.setter def mobile_order(self, mobile_order): self._mobile_order = mobile_order @property def name(self): return self._name @name.setter def name(self, name): self._name = name @property def owner_id(self): return self._owner_id @owner_id.setter
Apache License 2.0
nrel/floris
floris/tools/power_rose.py
PowerRose.__init__
python
def __init__(self,):
Instantiate a PowerRose object. No explicit arguments required, and an additional method will need to be called to populate the PowerRose object with data.
https://github.com/nrel/floris/blob/ef4934ec7feb7afd2615772d364a1eaa28db93e9/floris/tools/power_rose.py#L43-L48
import os import pickle import numpy as np import pandas as pd import matplotlib.pyplot as plt from floris.utilities import wrap_180 class PowerRose:
Apache License 2.0
georgebrock/git-browse
gitbrowse/git.py
GitFileHistory.line_mapping
python
def line_mapping(self, start, finish): key = start + '/' + finish if key in self._line_mappings: return self._line_mappings[key] forward, backward = self._build_line_mappings(start, finish) self._line_mappings[start + '/' + finish] = forward self._line_mappings[finish + '/' + start] = backward return forward
Returns a dict that represents how lines have moved between versions of a file. The keys are the line numbers in the version of the file at start, the values are where those lines have ended up in the version at finish. For example if at start the file is two lines, and at finish a new line has been inserted between the two the mapping would be: {1:1, 2:3} Deleted lines are represented by None. For example, if at start the file were two lines, and the first had been deleted by finish: {1:None, 2:1}
https://github.com/georgebrock/git-browse/blob/a77031683f08bfded5959bed9f836503b3a1219a/gitbrowse/git.py#L130-L155
import os class GitCommit(object): def __init__(self, sha, author, message): self.sha = sha self.author = author self.message = message class GitBlameLine(object): def __init__(self, sha, line, current, original_line, final_line): self.sha = sha self.line = line self.current = current self.original_line = original_line self.final_line = final_line class GitFileHistory(object): def __init__(self, path, start_commit): if not verify_revision(start_commit): raise ValueError('%s is not a valid commit, branch, tag, etc.' % ( start_commit, )) if not verify_file(path): raise ValueError('"%s" is not tracked by git' % (path, )) self.path = path p = os.popen('git log %s --follow --pretty="%s" -- %s' % ( start_commit, '%H%n%an%n%s%n', self.path, )) output = p.read().split('\n\n') self.commits = [GitCommit(*c.split('\n', 2)) for c in output if c] self._index = 0 self._blame = None self._line_mappings = {} @property def current_commit(self): return self.commits[self._index] def next(self): if self._index <= 0: return False self._index -= 1 self._blame = None return True def prev(self): if self._index >= len(self.commits) - 1: return False self._index += 1 self._blame = None return True def blame(self): if self._blame: return self._blame lines = [] p = os.popen('git blame -p %s %s' % ( self.path, self.current_commit.sha, )) while True: header = p.readline() if not header: break sha, original_line, final_line = header.split(' ')[:3] line = p.readline() while not line.startswith('\t'): line = p.readline() lines.append(GitBlameLine( sha=sha, line=line[1:], current=(sha == self.current_commit.sha), original_line=original_line, final_line=final_line, )) self._blame = lines return self._blame
MIT License
pyansys/pymapdl
ansys/mapdl/core/_commands/solution/nonlinear_options.py
NonLinearOptions.arclen
python
def arclen(self, key="", maxarc="", minarc="", **kwargs): command = f"ARCLEN,{key},{maxarc},{minarc}" return self.run(command, **kwargs)
Activates the arc-length method. APDL Command: ARCLEN Parameters ---------- key Arc-length key: OFF - Do not use the arc-length method (default). ON - Use the arc-length method. maxarc Maximum multiplier of the reference arc-length radius (default = 25). minarc Minimum multiplier of the reference arc-length radius (default = 1/1000). Notes ----- Activates the arc-length method and sets the minimum and maximum multipliers for controlling the arc-length radius based on the initial arc-length radius. The initial arc-length radius, t0, is proportional (in absolute value) to the initial load factor. The initial load factor is given by: Initial Load Factor = TIME / NSBSTP where TIME is the time specified by the TIME command for the arc-length load step, and NSBSTP is the number of substeps specified by the NSUBST command. The factors MAXARC and MINARC are used to define the range for the arc- length radius to expand and shrink during the substep solution: In each substep, the arc-length radius is kept constant throughout the equilibrium iterations. After each converged substep, the arc-length radius for the next substep is modified depending on the convergence behavior. If the substep converges and the program heuristic predicts an easy convergence, the arc-length radius is enlarged. If the enlarged value is greater than tMAX, the arc-length radius is reset to tMAX. If the substep does not converge, bisection will take place until the arc- length radius is reduced to tMIN. If further nonconvergence is encountered, the solution terminates. The arc-length method predicts the next time increment (that is, load factor increment). Therefore, the AUTOTS and PRED commands are ignored when the arc-length method is used. The STABILIZE and LNSRCH commands are also ignored. The arc-length method cannot be used in a multiframe restart. For difficult problems, one suggestion is to increase the initial number of substeps (NSUBST), and to prevent the arc-length radius from increasing too rapidly (MAXARC = 1). ARCLEN cannot be used for any load step that has no applied load or displacement. The arc-length method does not support tabular loads. In order to use the arc-length method, you must replace tabular loads by other load types and then run the analysis again.
https://github.com/pyansys/pymapdl/blob/e5cc21471c3a8fcef1f7b88359e38aa89cd63f73/ansys/mapdl/core/_commands/solution/nonlinear_options.py#L2-L72
class NonLinearOptions:
MIT License
komuw/sewer
sewer/dns_providers/aliyundns.py
AliyunDns.delete_dns_record
python
def delete_dns_record(self, domain_name, domain_dns_value): self.logger.info("delete_dns_record start: %s", (domain_name, domain_dns_value)) root, _, acme_txt = self.extract_zone(domain_name) record_id = self.query_recored_id(root, acme_txt) if not record_id: msg = "failed to find record_id of domain: %s, value: %s", domain_name, domain_dns_value self.logger.warning(msg) return self.logger.info("start to delete dns record, id: %s", record_id) request = DeleteDomainRecordRequest.DeleteDomainRecordRequest() request.set_RecordId(record_id) resp = self._send_reqeust(request) self.logger.info("delete_dns_record end: %s", (domain_name, domain_dns_value, resp.json())) return resp
delete a txt record we created just now. :param str domain_name: the value sewer client passed in, like *.menduo.example.com :param str domain_dns_value: the value sewer client passed in. we do not use this. :return _ResponseForAliyun: :return:
https://github.com/komuw/sewer/blob/056ac64fe294fb284ec5b920ec1a9425dd254e92/sewer/dns_providers/aliyundns.py#L180-L205
import json from aliyunsdkcore import client import aliyunsdkalidns.request.v20150109 from aliyunsdkalidns.request.v20150109 import ( DescribeDomainRecordsRequest, AddDomainRecordRequest, DeleteDomainRecordRequest, ) from . import common class _ResponseForAliyun(object): def __init__(self, status_code=200, content=None, headers=None): self.status_code = status_code self.headers = headers or {} self.content = content or {} self.content = json.dumps(content) super(_ResponseForAliyun, self).__init__() def json(self): return json.loads(self.content) class AliyunDns(common.BaseDns): def __init__(self, key, secret, endpoint="cn-beijing", debug=False, **kwargs): super().__init__(**kwargs) self._key = key self._secret = secret self._endpoint = endpoint self._debug = debug self.clt = client.AcsClient(self._key, self._secret, self._endpoint, debug=self._debug) def _send_reqeust(self, request): request.set_accept_format("json") try: status, headers, result = self.clt.implementation_of_do_action(request) if isinstance(result, bytes): result = result.decode() result = json.loads(result) if "Message" in result or "Code" in result: result["Success"] = False self.logger.warning("aliyundns resp error: %s", result) except Exception as exc: self.logger.warning("aliyundns failed to send request: %s, %s", str(exc), request) status, headers, result = 502, {}, b'{"Success": false}' result = json.loads(result) if self._debug: self.logger.info("aliyundns request name: %s", request.__class__.__name__) self.logger.info("aliyundns request query: %s", request.get_query_params()) return _ResponseForAliyun(status, result, headers) def query_recored_items(self, host, zone=None, tipe=None, page=1, psize=200): request = DescribeDomainRecordsRequest.DescribeDomainRecordsRequest() request.get_action_name() request.set_DomainName(host) request.set_PageNumber(page) request.set_PageSize(psize) if zone: request.set_RRKeyWord(zone) if tipe: request.set_TypeKeyWord(tipe) resp = self._send_reqeust(request) body = resp.json() return body def query_recored_id(self, root, zone, tipe="TXT"): record_id = None recoreds = self.query_recored_items(root, zone, tipe=tipe) recored_list = recoreds.get("DomainRecords", {}).get("Record", []) recored_item_list = [i for i in recored_list if i["RR"] == zone] if len(recored_item_list): record_id = recored_item_list[0]["RecordId"] return record_id @staticmethod def extract_zone(domain_name): if domain_name.count(".") > 1: zone, middle, last = str(domain_name).rsplit(".", 2) root = ".".join([middle, last]) acme_txt = "_acme-challenge.%s" % zone else: zone = "" root = domain_name acme_txt = "_acme-challenge" return root, zone, acme_txt def create_dns_record(self, domain_name, domain_dns_value): self.logger.info("create_dns_record start: %s", (domain_name, domain_dns_value)) root, _, acme_txt = self.extract_zone(domain_name) request = AddDomainRecordRequest.AddDomainRecordRequest() request.set_DomainName(root) request.set_TTL(600) request.set_RR(acme_txt) request.set_Type("TXT") request.set_Value(domain_dns_value) resp = self._send_reqeust(request) self.logger.info("create_dns_record end: %s", (domain_name, domain_dns_value, resp.json())) return resp
MIT License
kuri65536/python-for-android
python-build/python-libs/gdata/build/lib/gdata/Crypto/Util/RFC1751.py
_key2bin
python
def _key2bin(s): kl=map(lambda x: ord(x), s) kl=map(lambda x: binary[x/16]+binary[x&15], kl) return ''.join(kl)
Convert a key into a string of binary digits
https://github.com/kuri65536/python-for-android/blob/26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891/python-build/python-libs/gdata/build/lib/gdata/Crypto/Util/RFC1751.py#L15-L19
__revision__ = "$Id: RFC1751.py,v 1.6 2003/04/04 15:15:10 akuchling Exp $" import string, binascii binary={0:'0000', 1:'0001', 2:'0010', 3:'0011', 4:'0100', 5:'0101', 6:'0110', 7:'0111', 8:'1000', 9:'1001', 10:'1010', 11:'1011', 12:'1100', 13:'1101', 14:'1110', 15:'1111'}
Apache License 2.0
jhuapl-boss/boss
django/bosscore/test/setup_db.py
SetupTestDB.insert_downsample_data
python
def insert_downsample_data(self): self.add_coordinate_frame('cf_ds_aniso', 'Description for cf2', 0, 4096, 0, 4096, 0, 128, 4, 4, 35) self.add_experiment('col1', 'exp_ds_aniso', 'cf_ds_aniso', 5, 500, 1) aniso_chan = self.add_channel('col1', 'exp_ds_aniso', 'channel1', 0, 0, 'uint8', 'image') self.add_coordinate_frame('cf_ds_iso', 'Description for cf2', 0, 4096, 0, 4096, 0, 128, 6, 6, 6) self.add_experiment('col1', 'exp_ds_iso', 'cf_ds_iso', 3, 500, 1, hierarchy_method="isotropic") iso_chan = self.add_channel('col1', 'exp_ds_iso', 'channel1', 0, 0, 'uint8', 'image') return (aniso_chan, iso_chan)
Some resources for small downsample tests Returns: (Tuple[Channel, Channel]): The channels created for the downsample test.
https://github.com/jhuapl-boss/boss/blob/c2e26d272bd7b8d54abdc2948193163537e31291/django/bosscore/test/setup_db.py#L219-L232
from django.contrib.auth.models import User from django.contrib.auth.models import Group from django.contrib.contenttypes.models import ContentType from guardian.shortcuts import assign_perm from ..models import Collection, Experiment, CoordinateFrame, Channel, BossLookup, BossRole, BossGroup from ..views.views_resource import ChannelDetail from ..constants import ADMIN_USER, ADMIN_GRP, PUBLIC_GRP from ..permissions import BossPermissionManager from spdb.spatialdb.test.setup import AWSSetupLayer test_user = 'testuser' test_group = 'testuser-primary' BASE_RESOLUTION = 2 NUM_HIERARCHY_LEVELS = 7 EXP1 = 'exp1' EXP22 = 'exp22' EXP_BASE_RES = 'exp-base-res-test' TEST_DATA_EXPERIMENTS = [EXP1, EXP22, EXP_BASE_RES] CHAN_BASE_RES = 'chan-with-base-res' class SetupTestDB: def __init__(self, super_user=None): self.super_user = super_user self.user = super_user def create_user(self, username=None): if self.super_user is None: self.create_super_user() if not username: username = test_user self.user = User.objects.create_user(username=username, email=username+'@test.com', password=username) user_primary_group, created = Group.objects.get_or_create(name=username + '-primary') public_group, created = Group.objects.get_or_create(name=PUBLIC_GRP) self.user.groups.add(user_primary_group) public_group.user_set.add(self.user) return self.user def add_role(self, role_name, user=None): if user is None: user = self.user BossRole.objects.create(user=user, role=role_name) def create_super_user(self, username=ADMIN_USER, email=None, password=ADMIN_USER): if self.super_user is not None: return if email is None: full_email = username + '@boss.io' else: full_email = email self.super_user = User.objects.create_superuser(username=username, email=full_email, password=password) user_primary_group, created = Group.objects.get_or_create(name=ADMIN_USER+'-primary') public_group, created = Group.objects.get_or_create(name=PUBLIC_GRP) admin_group, created = Group.objects.get_or_create(name=ADMIN_GRP) self.super_user.groups.add(user_primary_group) self.super_user.groups.add(public_group) self.super_user.groups.add(admin_group) self.add_role('admin', self.super_user) self.user = self.super_user return self.super_user def get_user(self): return self.user def set_user(self, user): self.user = user def create_group(self, group_name): group, created = Group.objects.get_or_create(name=group_name) if created: self.user.groups.add(group) bgrp, created = BossGroup.objects.get_or_create(group=group, creator=self.user) group_name = self.user.username + "-primary" user_primary_group = Group.objects.get(name=group_name) assign_perm('maintain_group', user_primary_group, bgrp) return created def insert_test_data(self): self.add_collection('col1', 'Description for collection1') self.add_collection('col1-22', 'Description for collection1-22') self.add_collection('col2', 'Description for collection2') self.add_coordinate_frame('cf1', 'Description for cf1', 0, 1000, 0, 1000, 0, 1000, 4, 4, 4) self.add_experiment('col1', EXP1, 'cf1', NUM_HIERARCHY_LEVELS, 10, 1) self.add_experiment('col1', EXP22, 'cf1', NUM_HIERARCHY_LEVELS, 500, 1) self.add_experiment('col1', EXP_BASE_RES, 'cf1', NUM_HIERARCHY_LEVELS, 10, 1) self.add_channel('col1', EXP1, 'channel1', 0, 0, 'uint8', 'image') self.add_channel('col1', EXP1, 'channel2', 0, 0, 'uint8', 'image') self.add_channel('col1', EXP1, 'channel3', 0, 0, 'uint64', 'annotation', ['channel1']) self.add_channel('col1', EXP_BASE_RES, CHAN_BASE_RES, 0, BASE_RESOLUTION, 'uint8', 'image') self.add_channel('col1', EXP1, 'layer1', 0, 0, 'uint64', 'annotation', ['channel1']) def insert_lookup_test_data(self): self.add_collection('col1', 'Description for collection1') self.add_collection('col2', 'Description for collection2') self.add_coordinate_frame('cf1', 'Description for cf1', 0, 1000, 0, 1000, 0, 1000, 4, 4, 4) self.add_experiment('col1', 'exp1', 'cf1', 10, 10, 1) self.add_experiment('col2', 'exp1', 'cf1', 10, 500, 1) self.add_channel('col1', 'exp1', 'channel1', 0, 0, 'uint8', 'image') self.add_channel('col1', 'exp1', 'channel2', 0, 0, 'uint8', 'image') self.add_channel('col1', 'exp1', 'channel3', 0, 0, 'uint64', 'annotation', ['channel1']) self.add_channel('col1', 'exp1', 'layer1', 0, 0, 'uint64', 'annotation', ['channel1']) self.add_channel('col2', 'exp1', 'channel1', 0, 0, 'uinit8', 'image') def insert_spatialdb_test_data(self): self.add_collection('col1', 'Description for collection1') self.add_coordinate_frame('cf1', 'Description for cf1', 0, 100000, 0, 100000, 0, 100000, 4, 4, 4) self.add_experiment('col1', 'exp1', 'cf1', 10, 500, 1) self.add_channel('col1', 'exp1', 'channel1', 0, 0, 'uint8', 'image') self.add_channel('col1', 'exp1', 'channel2', 0, 0, 'uint16', 'image') self.add_channel('col1', 'exp1', 'layer1', 0, 0, 'uint64', 'annotation', ['channel1']) self.add_channel('col1', 'exp1', 'bbchan1', 0, 0, 'uint64', 'annotation', ['channel1']) def insert_cloudvolume_test_data(self): self.add_collection('col1', 'Description for collection1') self.add_coordinate_frame('cf1', 'Description for cf1', 0, 100000, 0, 100000, 0, 100000, 4, 4, 4) self.add_experiment('col1', 'exp1', 'cf1', 10, 500, 1) self.add_channel('col1', 'exp1', 'chan1', 0, 0, 'uint8', 'image', storage_type='cloudvol', bucket='bossdb-test-data', cv_path='col1/exp1/chan1') self.add_channel('col1', 'exp1', 'chan2', 0, 0, 'uint16', 'image', storage_type='cloudvol', bucket='bossdb-test-data', cv_path='col1/exp1/chan2') self.add_channel('col1', 'exp1', 'anno1', 0, 0, 'uint64', 'annotation', storage_type='cloudvol', bucket='bossdb-test-data', cv_path='col1/exp1/anno1') def insert_ingest_test_data(self): self.add_collection('my_col_1', 'Description for collection1') self.add_coordinate_frame('cf1', 'Description for cf1', 0, 100000, 0, 100000, 0, 100000, 4, 4, 4) self.add_experiment('my_col_1', 'my_exp_1', 'cf1', 10, 500, 1) self.add_channel('my_col_1', 'my_exp_1', 'my_ch_1', 0, 0, 'uint8', 'image') def insert_iso_data(self): self.add_coordinate_frame('cf2aniso', 'Description for cf2', 0, 2000, 0, 5000, 0, 200, 4, 4, 35) self.add_experiment('col1', 'exp_aniso', 'cf2aniso', 8, 500, 1) self.add_channel('col1', 'exp_aniso', 'channel1', 0, 0, 'uint8', 'image') self.add_coordinate_frame('cf2iso', 'Description for cf2', 0, 2000, 0, 5000, 0, 200, 6, 6, 6) self.add_experiment('col1', 'exp_iso', 'cf2iso', 8, 500, 1, hierarchy_method="isotropic") self.add_channel('col1', 'exp_iso', 'channel1', 0, 0, 'uint8', 'image')
Apache License 2.0
purestorage-openconnect/py-pure-client
pypureclient/pure1/Pure1_1_0/models/drive_get_response.py
DriveGetResponse.to_dict
python
def to_dict(self): result = {} for attr, _ in six.iteritems(self.swagger_types): if hasattr(self, attr): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(DriveGetResponse, dict): for key, value in self.items(): result[key] = value return result
Returns the model properties as a dict
https://github.com/purestorage-openconnect/py-pure-client/blob/2d9fdef0b73321cea9613e7d1eb881b42845099b/pypureclient/pure1/Pure1_1_0/models/drive_get_response.py#L78-L104
import pprint import re import six import typing from ....properties import Property if typing.TYPE_CHECKING: from pypureclient.pure1.Pure1_1_0 import models class DriveGetResponse(object): swagger_types = { 'continuation_token': 'str', 'total_item_count': 'int', 'items': 'list[Drive]' } attribute_map = { 'continuation_token': 'continuation_token', 'total_item_count': 'total_item_count', 'items': 'items' } required_args = { } def __init__( self, continuation_token=None, total_item_count=None, items=None, ): if continuation_token is not None: self.continuation_token = continuation_token if total_item_count is not None: self.total_item_count = total_item_count if items is not None: self.items = items def __setattr__(self, key, value): if key not in self.attribute_map: raise KeyError("Invalid key `{}` for `DriveGetResponse`".format(key)) self.__dict__[key] = value def __getattribute__(self, item): value = object.__getattribute__(self, item) if isinstance(value, Property): raise AttributeError else: return value
BSD 2-Clause Simplified License
superkogito/pydiogment
pydiogment/utils/filters.py
butter_bandpass
python
def butter_bandpass(low_cut, high_cut, fs, order=5): nyq = 0.5 * fs low = low_cut / nyq high = high_cut / nyq b, a = butter(order, [low, high], btype='band') return b, a
Design band pass filter. Args: - low_cut (float) : the low cutoff frequency of the filter. - high_cut (float) : the high cutoff frequency of the filter. - fs (float) : the sampling rate. - order (int) : order of the filter, by default defined to 5.
https://github.com/superkogito/pydiogment/blob/000a07b2ad8d3480535e7d900aed1ed3358a5d4a/pydiogment/utils/filters.py#L49-L68
from scipy.signal import butter, lfilter def butter_lowpass(cutoff, fs, order=5): nyq = 0.5 * fs low = cutoff / nyq b, a = butter(order, low, btype='low', analog=False) return b, a def butter_highpass(cutoff, fs, order=5): nyq = 0.5 * fs high = cutoff / nyq b, a = butter(order, high, btype='high', analog=False) return b, a
BSD 3-Clause New or Revised License
voxel51/eta
eta/core/module.py
ModuleMetadata.get_input
python
def get_input(self, name): return self.inputs[name]
Returns the ModuleInput instance for input `name`.
https://github.com/voxel51/eta/blob/e51510fda0722ac7cadb17b109bad413a6602ed3/eta/core/module.py#L629-L631
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from builtins import * from future.utils import iteritems from collections import OrderedDict from glob import glob import logging import os import eta from eta.core.config import Config, ConfigError, Configurable from eta.core.diagram import HasBlockDiagram, BlockdiagModule import eta.core.logging as etal import eta.core.serial as etas import eta.core.types as etat import eta.core.utils as etau logger = logging.getLogger(__name__) def run(module_name, module_config_or_path): if etau.is_str(module_config_or_path): return _run(module_name, module_config_or_path) with etau.TempDir() as d: module_config_path = os.path.join(d, "config.json") etas.write_json(module_config_or_path, module_config_path) return _run(module_name, module_config_path) def _run(module_name, module_config_path): module_exe = find_exe(module_name) args = ["python", module_exe, module_config_path] return etau.call(args) def load_all_metadata(): return {k: _load_metadata(v) for k, v in iteritems(find_all_metadata())} def load_metadata(module_name): return _load_metadata(find_metadata(module_name)) def _load_metadata(config): metadata = ModuleMetadata.from_json(config) name = os.path.splitext(os.path.basename(config))[0] if metadata.info.name != name: raise ModuleMetadataError( "Name '%s' from ModuleMetadata must match module name '%s'" % (metadata.info.name, name) ) return metadata def find_all_metadata(): d = {} mdirs = etau.make_search_path(eta.config.module_dirs) for mdir in mdirs: for path in glob(os.path.join(mdir, "*.json")): name = os.path.splitext(os.path.basename(path))[0] if name not in d: d[name] = path else: logger.debug( "Module '%s' already exists; ignoring %s", name, path ) return d def find_metadata(module_name): try: return find_all_metadata()[module_name] except KeyError: raise ModuleMetadataError("Could not find module '%s'" % module_name) def find_exe(module_name=None, module_metadata=None): if module_metadata is None: module_metadata = load_metadata(module_name) meta_path = find_metadata(module_metadata.info.name) exe_path = os.path.join( os.path.dirname(meta_path), module_metadata.info.exe ) if not os.path.isfile(exe_path): raise ModuleMetadataError( "Could not find module executable '%s'" % exe_path ) return exe_path def setup(module_config, pipeline_config_path=None): if pipeline_config_path: from eta.core.pipeline import PipelineConfig pipeline_config = PipelineConfig.from_json(pipeline_config_path) module_config.base.eta_config.update(pipeline_config.eta_config) module_config.base.logging_config = pipeline_config.logging_config etal.custom_setup(module_config.base.logging_config) eta.set_config_settings(**module_config.base.eta_config) class BaseModuleConfig(Config): def __init__(self, d): self.base = self.parse_object( d, "base", BaseModuleConfigSettings, default=None ) if self.base is None: self.base = BaseModuleConfigSettings.default() class BaseModuleConfigSettings(Config): def __init__(self, d): self.eta_config = self.parse_dict(d, "eta_config", default={}) self.logging_config = self.parse_object( d, "logging_config", etal.LoggingConfig, default=etal.LoggingConfig.default(), ) class ModuleMetadataConfig(Config): def __init__(self, d): self.info = self.parse_object(d, "info", ModuleInfoConfig) self.inputs = self.parse_object_array(d, "inputs", ModuleInputConfig) self.outputs = self.parse_object_array( d, "outputs", ModuleOutputConfig ) self.parameters = self.parse_object_array( d, "parameters", ModuleParameterConfig ) def attributes(self): return ["info", "inputs", "outputs", "parameters"] class ModuleInfoConfig(Config): def __init__(self, d): self.name = self.parse_string(d, "name") self.type = self.parse_string(d, "type") self.version = self.parse_string(d, "version") self.description = self.parse_string(d, "description") self.exe = self.parse_string(d, "exe") def attributes(self): return ["name", "type", "version", "description", "exe"] class ModuleInputConfig(Config): def __init__(self, d): self.name = self.parse_string(d, "name") self.type = self.parse_string(d, "type") self.description = self.parse_string(d, "description") self.required = self.parse_bool(d, "required", default=True) def attributes(self): return ["name", "type", "description", "required"] class ModuleOutputConfig(Config): def __init__(self, d): self.name = self.parse_string(d, "name") self.type = self.parse_string(d, "type") self.description = self.parse_string(d, "description") self.required = self.parse_bool(d, "required", default=True) def attributes(self): return ["name", "type", "description", "required"] class ModuleParameterConfig(Config): def __init__(self, d): self.name = self.parse_string(d, "name") self.type = self.parse_string(d, "type") self.description = self.parse_string(d, "description") self.required = self.parse_bool(d, "required", default=True) if not self.required: self.default = self.parse_raw(d, "default") elif "default" in d: raise ConfigError( "Module parameter '%s' is required, so it should not have a " "default value" % self.name ) def attributes(self): attrs = ["name", "type", "description", "required"] if not self.required: attrs.append("default") return attrs class ModuleInfo(Configurable): def __init__(self, config): self.validate(config) self.name = config.name self.type = self._parse_type(config.type) self.version = config.version self.description = config.description self.exe = config.exe @staticmethod def _parse_type(type_str): type_ = etat.parse_type(type_str) if not etat.is_module(type_): raise ModuleMetadataError( "'%s' is not a valid module type" % type_ ) return type_ class ModuleInput(Configurable): def __init__(self, config): self.validate(config) self.name = config.name self.type = self._parse_type(config.type) self.description = config.description self.required = config.required def is_valid_path(self, path): return self.type.is_valid_path(path) @property def is_required(self): return self.required def _parse_type(self, type_str): type_ = etat.parse_type(type_str) if not etat.is_data(type_): raise ModuleMetadataError( ( "Module input '%s' has type '%s' but must be a subclass " "of Data" ) % (self.name, type_) ) return type_ class ModuleOutput(Configurable): def __init__(self, config): self.validate(config) self.name = config.name self.type = self._parse_type(config.type) self.description = config.description self.required = config.required def is_valid_path(self, path): return self.type.is_valid_path(path) @property def is_required(self): return self.required def _parse_type(self, type_str): type_ = etat.parse_type(type_str) if not etat.is_concrete_data(type_): raise ModuleMetadataError( ( "Module output '%s' has type '%s' but must be a subclass " "of ConcreteData" ) % (self.name, type_) ) return type_ class ModuleParameter(Configurable): def __init__(self, config): self.validate(config) self.name = config.name self.type = self._parse_type(config.name, config.type) self.description = config.description self.required = config.required if not self.required: self._default = config.default self._validate_default() def is_valid_value(self, val): if self.is_builtin: return self.type.is_valid_value(val) return self.type.is_valid_path(val) @property def is_required(self): return self.required @property def is_builtin(self): return etat.is_builtin(self.type) @property def is_data(self): return etat.is_data(self.type) @property def default_value(self): if self.is_required: raise ModuleMetadataError( "Module parameter '%s' is required, so it has no default " "value" % self.name ) return self._default @staticmethod def _parse_type(name, type_str): type_ = etat.parse_type(type_str) if not etat.is_builtin(type_) and not etat.is_concrete_data(type_): raise ModuleMetadataError( "Module parameter '%s' has type '%s' but must be a subclass " "of Builtin or ConcreteData" % (name, type_) ) return type_ def _validate_default(self): if self._default is None: is_valid = True elif self.is_builtin: is_valid = self.type.is_valid_value(self._default) else: is_valid = self.type.is_valid_path(self._default) if not is_valid: raise ModuleMetadataError( "Default value '%s' is invalid for module parameter '%s' of " "'%s'" % (self._default, self.name, self.type) ) class ModuleMetadata(Configurable, HasBlockDiagram): def __init__(self, config): self.validate(config) self.config = config self.info = None self.inputs = OrderedDict() self.outputs = OrderedDict() self.parameters = OrderedDict() self._parse_metadata(config) def has_input(self, name): return name in self.inputs def has_output(self, name): return name in self.outputs def has_parameter(self, name): return name in self.parameters def is_valid_input(self, name, path): return self.get_input(name).is_valid_path(path) def is_valid_output(self, name, path): return self.get_output(name).is_valid_path(path) def is_valid_parameter(self, name, val): return self.get_parameter(name).is_valid_value(val)
Apache License 2.0
nikcub/cexbot
cexbot/appdirs.py
site_config_dir
python
def site_config_dir(appname=None, appauthor=None, version=None, multipath=False): if sys.platform in [ "win32", "darwin" ]: path = site_data_dir(appname, appauthor) if appname and version: path = os.path.join(path, version) else: path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') pathlist = [ os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep) ] if appname: if version: appname = os.path.join(appname, version) pathlist = [ os.sep.join([x, appname]) for x in pathlist ] if multipath: path = os.pathsep.join(pathlist) else: path = pathlist[0] return path
Return full path to the user-shared data dir for this application. "appname" is the name of application. If None, just the system directory is returned. "appauthor" (only required and used on Windows) is the name of the appauthor or distributing body for this application. Typically it is the owning company name. This falls back to appname. "version" is an optional version path element to append to the path. You might want to use this if you want multiple versions of your app to be able to run independently. If used, this would typically be "<major>.<minor>". Only applied when appname is present. "multipath" is an optional parameter only applicable to *nix which indicates that the entire list of config dirs should be returned. By default, the first item from XDG_CONFIG_DIRS is returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set Typical user data directories are: Mac OS X: same as site_data_dir Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in $XDG_CONFIG_DIRS Win *: same as site_data_dir Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.) For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
https://github.com/nikcub/cexbot/blob/0dd0b60415afd9c1feb959186d32b1a683887975/cexbot/appdirs.py#L182-L229
__version_info__ = (1, 3, 0) __version__ = '.'.join(map(str, __version_info__)) import sys import os PY3 = sys.version_info[0] == 3 if PY3: unicode = str def user_data_dir(appname=None, appauthor=None, version=None, roaming=False): if sys.platform == "win32": if appauthor is None: appauthor = appname const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" path = os.path.normpath(_get_win_folder(const)) if appname: path = os.path.join(path, appauthor, appname) elif sys.platform == 'darwin': path = os.path.expanduser('~/Library/Application Support/') if appname: path = os.path.join(path, appname) else: path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share")) if appname: path = os.path.join(path, appname) if appname and version: path = os.path.join(path, version) return path def site_data_dir(appname=None, appauthor=None, version=None, multipath=False): if sys.platform == "win32": if appauthor is None: appauthor = appname path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) if appname: path = os.path.join(path, appauthor, appname) elif sys.platform == 'darwin': path = os.path.expanduser('/Library/Application Support') if appname: path = os.path.join(path, appname) else: path = os.getenv('XDG_DATA_DIRS', os.pathsep.join(['/usr/local/share', '/usr/share'])) pathlist = [ os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep) ] if appname: if version: appname = os.path.join(appname, version) pathlist = [ os.sep.join([x, appname]) for x in pathlist ] if multipath: path = os.pathsep.join(pathlist) else: path = pathlist[0] return path if appname and version: path = os.path.join(path, version) return path def user_config_dir(appname=None, appauthor=None, version=None, roaming=False): if sys.platform in [ "win32", "darwin" ]: path = user_data_dir(appname, appauthor, None, roaming) else: path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")) if appname: path = os.path.join(path, appname) if appname and version: path = os.path.join(path, version) return path
MIT License