hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b82c60629edbd503cad186008edb4548bf7c7524 | 1,010 | py | Python | docs/core/howto/listings/ssl/check_echo_certificate.py | giadram/twisted | 4771b1340b822d20d0664bb7d8334e8fb7e52863 | [
"MIT",
"Unlicense"
] | 1 | 2020-12-18T06:32:58.000Z | 2020-12-18T06:32:58.000Z | docs/core/howto/listings/ssl/check_echo_certificate.py | giadram/twisted | 4771b1340b822d20d0664bb7d8334e8fb7e52863 | [
"MIT",
"Unlicense"
] | 3 | 2015-08-20T09:05:57.000Z | 2015-11-10T08:30:51.000Z | docs/core/howto/listings/ssl/check_echo_certificate.py | giadram/twisted | 4771b1340b822d20d0664bb7d8334e8fb7e52863 | [
"MIT",
"Unlicense"
] | null | null | null | import sys
from twisted.internet import defer, endpoints, protocol, ssl, task
with open("../../../examples/server.pem") as f:
certificate = ssl.Certificate.loadPEM(f.read())
def main(reactor, host, port=443):
options = ssl.optionsForClientTLS(host.decode("utf-8"), trustRoot=certificate)
port = int(port)
done = defer.Deferred()
class ShowCertificate(protocol.Protocol):
def connectionMade(self):
self.transport.write(b"GET / HTTP/1.0\r\n\r\n")
def dataReceived(self, data):
certificate = ssl.Certificate(self.transport.getPeerCertificate())
print(certificate)
self.transport.loseConnection()
def connectionLost(self, reason):
if reason.check(ssl.SSL.Error):
print(reason.value)
done.callback(None)
endpoints.connectProtocol(
endpoints.SSL4ClientEndpoint(reactor, host, port, options), ShowCertificate()
)
return done
task.react(main, sys.argv[1:])
| 29.705882 | 85 | 0.650495 |
c9696a836fb85799420c7070484037b8fadf3aac | 229 | py | Python | Sources/01XXX/1000/1000.py | DDManager/Baekjoon-Online-Judge | 7dd6d76838d3309bfe5bef46f1778c5776ebdf2a | [
"MIT"
] | 1 | 2019-07-02T09:07:58.000Z | 2019-07-02T09:07:58.000Z | Sources/01XXX/1000/1000.py | DDManager/Baekjoon-Online-Judge | 7dd6d76838d3309bfe5bef46f1778c5776ebdf2a | [
"MIT"
] | null | null | null | Sources/01XXX/1000/1000.py | DDManager/Baekjoon-Online-Judge | 7dd6d76838d3309bfe5bef46f1778c5776ebdf2a | [
"MIT"
] | 1 | 2022-02-13T04:17:10.000Z | 2022-02-13T04:17:10.000Z | ##
# BOJ 1000번 Python 3 소스 코드
# 작성자 : 동동매니저 (DDManager)
#
# ※ 실행 결과
# 사용 메모리 : 29,056 KB / 294,912 KB
# 소요 시간 : 60 ms / 8,000 ms
#
# Copyright 2019. DDManager all rights reserved.
##
a,b=map(int,input().split())
print(a+b) | 17.615385 | 49 | 0.60262 |
b75abe691bc1aa4ce428b8d2b02f015a4d3c54b9 | 3,377 | py | Python | ucscentralsdk/mometa/vnic/VnicBootIpPolicy.py | ragupta-git/ucscentralsdk | 2678008b5fb6b0fafafec388d0874147e95a1086 | [
"Apache-2.0"
] | null | null | null | ucscentralsdk/mometa/vnic/VnicBootIpPolicy.py | ragupta-git/ucscentralsdk | 2678008b5fb6b0fafafec388d0874147e95a1086 | [
"Apache-2.0"
] | null | null | null | ucscentralsdk/mometa/vnic/VnicBootIpPolicy.py | ragupta-git/ucscentralsdk | 2678008b5fb6b0fafafec388d0874147e95a1086 | [
"Apache-2.0"
] | null | null | null | """This module contains the general information for VnicBootIpPolicy ManagedObject."""
from ...ucscentralmo import ManagedObject
from ...ucscentralcoremeta import UcsCentralVersion, MoPropertyMeta, MoMeta
from ...ucscentralmeta import VersionMeta
class VnicBootIpPolicyConsts():
INT_ID_NONE = "none"
POLICY_OWNER_LOCAL = "local"
POLICY_OWNER_PENDING_POLICY = "pending-policy"
POLICY_OWNER_POLICY = "policy"
POLICY_OWNER_UNSPECIFIED = "unspecified"
class VnicBootIpPolicy(ManagedObject):
"""This is VnicBootIpPolicy class."""
consts = VnicBootIpPolicyConsts()
naming_props = set([])
mo_meta = MoMeta("VnicBootIpPolicy", "vnicBootIpPolicy", "", VersionMeta.Version111a, "InputOutput", 0x7f, [], ["admin", "ls-config", "ls-network", "ls-server"], [], [], ["Get"])
prop_meta = {
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version111a, MoPropertyMeta.INTERNAL, None, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"descr": MoPropertyMeta("descr", "descr", "string", VersionMeta.Version111a, MoPropertyMeta.READ_WRITE, 0x2, None, None, r"""[ !#$%&\(\)\*\+,\-\./:;\?@\[\]_\{\|\}~a-zA-Z0-9]{0,256}""", [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []),
"int_id": MoPropertyMeta("int_id", "intId", "string", VersionMeta.Version111a, MoPropertyMeta.INTERNAL, None, None, None, None, ["none"], ["0-4294967295"]),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version111a, MoPropertyMeta.READ_WRITE, 0x8, None, None, r"""[\-\.:_a-zA-Z0-9]{0,16}""", [], []),
"policy_level": MoPropertyMeta("policy_level", "policyLevel", "uint", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"policy_owner": MoPropertyMeta("policy_owner", "policyOwner", "string", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["local", "pending-policy", "policy", "unspecified"], []),
"pool_name": MoPropertyMeta("pool_name", "poolName", "string", VersionMeta.Version111a, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, 0x20, 0, 256, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version111a, MoPropertyMeta.READ_WRITE, 0x40, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
}
prop_map = {
"childAction": "child_action",
"descr": "descr",
"dn": "dn",
"intId": "int_id",
"name": "name",
"policyLevel": "policy_level",
"policyOwner": "policy_owner",
"poolName": "pool_name",
"rn": "rn",
"status": "status",
}
def __init__(self, parent_mo_or_dn, **kwargs):
self._dirty_mask = 0
self.child_action = None
self.descr = None
self.int_id = None
self.name = None
self.policy_level = None
self.policy_owner = None
self.pool_name = None
self.status = None
ManagedObject.__init__(self, "VnicBootIpPolicy", parent_mo_or_dn, **kwargs)
| 53.603175 | 249 | 0.644655 |
337e3e4a73b757dc9c52679bd0ebfa8ae65425a0 | 273 | py | Python | tests/artificial/transf_BoxCox/trend_MovingAverage/cycle_7/ar_12/test_artificial_1024_BoxCox_MovingAverage_7_12_100.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | null | null | null | tests/artificial/transf_BoxCox/trend_MovingAverage/cycle_7/ar_12/test_artificial_1024_BoxCox_MovingAverage_7_12_100.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | 1 | 2019-11-30T23:39:38.000Z | 2019-12-01T04:34:35.000Z | tests/artificial/transf_BoxCox/trend_MovingAverage/cycle_7/ar_12/test_artificial_1024_BoxCox_MovingAverage_7_12_100.py | jmabry/pyaf | afbc15a851a2445a7824bf255af612dc429265af | [
"BSD-3-Clause"
] | null | null | null | import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "MovingAverage", cycle_length = 7, transform = "BoxCox", sigma = 0.0, exog_count = 100, ar_order = 12); | 39 | 168 | 0.736264 |
00706f8950a9bb3254ed9e85de44892ff48d443b | 13,789 | py | Python | discord/audit_logs.py | mccoderpy/discord.py-with-message-components | 2945eec9c9ef79d3bcfeaaa8687cba3d60fd3a85 | [
"MIT"
] | 22 | 2021-06-11T15:39:08.000Z | 2021-12-19T18:42:22.000Z | discord/audit_logs.py | mccoderpy/discord.py-with-message-components | 2945eec9c9ef79d3bcfeaaa8687cba3d60fd3a85 | [
"MIT"
] | 8 | 2021-06-08T13:44:48.000Z | 2021-10-09T09:51:22.000Z | discord/audit_logs.py | mccoderpy/discord.py-with-message-components | 2945eec9c9ef79d3bcfeaaa8687cba3d60fd3a85 | [
"MIT"
] | 7 | 2021-06-28T18:39:03.000Z | 2022-02-06T17:44:18.000Z | # -*- coding: utf-8 -*-
"""
The MIT License (MIT)
Copyright (c) 2015-present Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from . import utils, enums
from .object import Object
from .permissions import PermissionOverwrite, Permissions
from .colour import Colour
from .invite import Invite
from .mixins import Hashable
def _transform_verification_level(entry, data):
return enums.try_enum(enums.VerificationLevel, data)
def _transform_default_notifications(entry, data):
return enums.try_enum(enums.NotificationLevel, data)
def _transform_explicit_content_filter(entry, data):
return enums.try_enum(enums.ContentFilter, data)
def _transform_permissions(entry, data):
return Permissions(data)
def _transform_color(entry, data):
return Colour(data)
def _transform_snowflake(entry, data):
return int(data)
def _transform_channel(entry, data):
if data is None:
return None
return entry.guild.get_channel(int(data)) or Object(id=data)
def _transform_owner_id(entry, data):
if data is None:
return None
return entry._get_member(int(data))
def _transform_inviter_id(entry, data):
if data is None:
return None
return entry._get_member(int(data))
def _transform_overwrites(entry, data):
overwrites = []
for elem in data:
allow = Permissions(elem['allow'])
deny = Permissions(elem['deny'])
ow = PermissionOverwrite.from_pair(allow, deny)
ow_type = elem['type']
ow_id = int(elem['id'])
if ow_type == 'role':
target = entry.guild.get_role(ow_id)
else:
target = entry._get_member(ow_id)
if target is None:
target = Object(id=ow_id)
overwrites.append((target, ow))
print(overwrites)
return overwrites
class AuditLogDiff:
def __len__(self):
return len(self.__dict__)
def __iter__(self):
return iter(self.__dict__.items())
def __repr__(self):
values = ' '.join('%s=%r' % item for item in self.__dict__.items())
return '<AuditLogDiff %s>' % values
class AuditLogChanges:
TRANSFORMERS = {
'verification_level': (None, _transform_verification_level),
'explicit_content_filter': (None, _transform_explicit_content_filter),
'allow': (None, _transform_permissions),
'deny': (None, _transform_permissions),
'permissions': (None, _transform_permissions),
'id': (None, _transform_snowflake),
'color': ('colour', _transform_color),
'owner_id': ('owner', _transform_owner_id),
'inviter_id': ('inviter', _transform_inviter_id),
'channel_id': ('channel', _transform_channel),
'afk_channel_id': ('afk_channel', _transform_channel),
'system_channel_id': ('system_channel', _transform_channel),
'widget_channel_id': ('widget_channel', _transform_channel),
'permission_overwrites': ('overwrites', _transform_overwrites),
'splash_hash': ('splash', None),
'icon_hash': ('icon', None),
'avatar_hash': ('avatar', None),
'rate_limit_per_user': ('slowmode_delay', None),
'default_message_notifications': ('default_notifications', _transform_default_notifications),
}
def __init__(self, entry, data):
self.before = AuditLogDiff()
self.after = AuditLogDiff()
for elem in data:
attr = elem['key']
# special cases for role add/remove
if attr == '$add':
self._handle_role(self.before, self.after, entry, elem['new_value'])
continue
elif attr == '$remove':
self._handle_role(self.after, self.before, entry, elem['new_value'])
continue
transformer = self.TRANSFORMERS.get(attr)
if transformer:
key, transformer = transformer
if key:
attr = key
try:
before = elem['old_value']
except KeyError:
before = None
else:
if transformer:
before = transformer(entry, before)
setattr(self.before, attr, before)
try:
after = elem['new_value']
except KeyError:
after = None
else:
if transformer:
after = transformer(entry, after)
setattr(self.after, attr, after)
# add an alias
if hasattr(self.after, 'colour'):
self.after.color = self.after.colour
self.before.color = self.before.colour
def __repr__(self):
return '<AuditLogChanges before=%r after=%r>' % (self.before, self.after)
def _handle_role(self, first, second, entry, elem):
if not hasattr(first, 'roles'):
setattr(first, 'roles', [])
data = []
g = entry.guild
for e in elem:
role_id = int(e['id'])
role = g.get_role(role_id)
if role is None:
role = Object(id=role_id)
role.name = e['name']
data.append(role)
setattr(second, 'roles', data)
class AuditLogEntry(Hashable):
r"""Represents an Audit Log entry.
You retrieve these via :meth:`Guild.audit_logs`.
.. container:: operations
.. describe:: x == y
Checks if two entries are equal.
.. describe:: x != y
Checks if two entries are not equal.
.. describe:: hash(x)
Returns the entry's hash.
.. versionchanged:: 1.7
Audit log entries are now comparable and hashable.
Attributes
-----------
action: :class:`AuditLogAction`
The action that was done.
user: :class:`abc.User`
The user who initiated this action. Usually a :class:`Member`\, unless gone
then it's a :class:`User`.
id: :class:`int`
The entry ID.
target: Any
The target that got changed. The exact type of this depends on
the action being done.
reason: Optional[:class:`str`]
The reason this action was done.
extra: Any
Extra information that this entry has that might be useful.
For most actions, this is ``None``. However in some cases it
contains extra information. See :class:`AuditLogAction` for
which actions have this field filled out.
"""
def __init__(self, *, users, data, guild):
self._state = guild._state
self.guild = guild
self._users = users
self._from_data(data)
def _from_data(self, data):
self.action = enums.try_enum(enums.AuditLogAction, data['action_type'])
self.id = int(data['id'])
# this key is technically not usually present
self.reason = data.get('reason')
self.extra = data.get('options')
if isinstance(self.action, enums.AuditLogAction) and self.extra:
if self.action is enums.AuditLogAction.member_prune:
# member prune has two keys with useful information
self.extra = type('_AuditLogProxy', (), {k: int(v) for k, v in self.extra.items()})()
elif self.action is enums.AuditLogAction.member_move or self.action is enums.AuditLogAction.message_delete:
channel_id = int(self.extra['channel_id'])
elems = {
'count': int(self.extra['count']),
'channel': self.guild.get_channel(channel_id) or Object(id=channel_id)
}
self.extra = type('_AuditLogProxy', (), elems)()
elif self.action is enums.AuditLogAction.member_disconnect:
# The member disconnect action has a dict with some information
elems = {
'count': int(self.extra['count']),
}
self.extra = type('_AuditLogProxy', (), elems)()
elif self.action.name.endswith('pin'):
# the pin actions have a dict with some information
channel_id = int(self.extra['channel_id'])
message_id = int(self.extra['message_id'])
elems = {
'channel': self.guild.get_channel(channel_id) or Object(id=channel_id),
'message_id': message_id
}
self.extra = type('_AuditLogProxy', (), elems)()
elif self.action.name.startswith('overwrite_'):
# the overwrite_ actions have a dict with some information
instance_id = int(self.extra['id'])
the_type = self.extra.get('type')
if the_type == 'member':
self.extra = self._get_member(instance_id)
else:
role = self.guild.get_role(instance_id)
if role is None:
role = Object(id=instance_id)
role.name = self.extra.get('role_name')
self.extra = role
# this key is not present when the above is present, typically.
# It's a list of { new_value: a, old_value: b, key: c }
# where new_value and old_value are not guaranteed to be there depending
# on the action type, so let's just fetch it for now and only turn it
# into meaningful data when requested
self._changes = data.get('changes', [])
self.user = self._get_member(utils._get_as_snowflake(data, 'user_id'))
self._target_id = utils._get_as_snowflake(data, 'target_id')
def _get_member(self, user_id):
return self.guild.get_member(user_id) or self._users.get(user_id)
def __repr__(self):
return '<AuditLogEntry id={0.id} action={0.action} user={0.user!r}>'.format(self)
@utils.cached_property
def created_at(self):
""":class:`datetime.datetime`: Returns the entry's creation time in UTC."""
return utils.snowflake_time(self.id)
@utils.cached_property
def target(self):
try:
converter = getattr(self, '_convert_target_' + self.action.target_type)
except AttributeError:
return Object(id=self._target_id)
else:
return converter(self._target_id)
@utils.cached_property
def category(self):
"""Optional[:class:`AuditLogActionCategory`]: The category of the action, if applicable."""
return self.action.category
@utils.cached_property
def changes(self):
""":class:`AuditLogChanges`: The list of changes this entry has."""
obj = AuditLogChanges(self, self._changes)
del self._changes
return obj
@utils.cached_property
def before(self):
""":class:`AuditLogDiff`: The target's prior state."""
return self.changes.before
@utils.cached_property
def after(self):
""":class:`AuditLogDiff`: The target's subsequent state."""
return self.changes.after
def _convert_target_guild(self, target_id):
return self.guild
def _convert_target_channel(self, target_id):
ch = self.guild.get_channel(target_id)
if ch is None:
return Object(id=target_id)
return ch
def _convert_target_user(self, target_id):
return self._get_member(target_id)
def _convert_target_role(self, target_id):
role = self.guild.get_role(target_id)
if role is None:
return Object(id=target_id)
return role
def _convert_target_invite(self, target_id):
# invites have target_id set to null
# so figure out which change has the full invite data
changeset = self.before if self.action is enums.AuditLogAction.invite_delete else self.after
fake_payload = {
'max_age': changeset.max_age,
'max_uses': changeset.max_uses,
'code': changeset.code,
'temporary': changeset.temporary,
'channel': changeset.channel,
'uses': changeset.uses,
'guild': self.guild,
}
obj = Invite(state=self._state, data=fake_payload)
try:
obj.inviter = changeset.inviter
except AttributeError:
pass
return obj
def _convert_target_emoji(self, target_id):
return self._state.get_emoji(target_id) or Object(id=target_id)
def _convert_target_message(self, target_id):
return self._get_member(target_id)
| 36.002611 | 119 | 0.603452 |
de8a5f796be826a3127581c8f82d9c6571bb4cd1 | 4,536 | py | Python | conans/model/editable_layout.py | Ignition/conan | 84a38590987ecb9f3011f73babc95598ea62535f | [
"MIT"
] | 2 | 2020-02-12T09:56:25.000Z | 2022-03-03T06:41:35.000Z | conans/model/editable_layout.py | Ignition/conan | 84a38590987ecb9f3011f73babc95598ea62535f | [
"MIT"
] | 6 | 2016-03-08T22:06:45.000Z | 2020-06-02T15:22:19.000Z | conans/model/editable_layout.py | Ignition/conan | 84a38590987ecb9f3011f73babc95598ea62535f | [
"MIT"
] | 1 | 2016-11-13T20:23:18.000Z | 2016-11-13T20:23:18.000Z | # coding=utf-8
import os
from collections import OrderedDict
import six
from six.moves import configparser
from conans.errors import ConanException
from conans.model.ref import ConanFileReference
from conans.util.files import load
from conans.util.templates import render_layout_file
DEFAULT_LAYOUT_FILE = "default"
LAYOUTS_FOLDER = 'layouts'
def get_editable_abs_path(path, cwd, cache_folder):
# Check the layout file exists, is correct, and get its abs-path
if path:
layout_abs_path = os.path.normpath(os.path.join(cwd, path))
if not os.path.isfile(layout_abs_path):
layout_abs_path = os.path.join(cache_folder, LAYOUTS_FOLDER, path)
if not os.path.isfile(layout_abs_path):
raise ConanException("Couldn't find layout file: %s" % path)
return layout_abs_path
# Default only in cache
layout_default_path = os.path.join(cache_folder, LAYOUTS_FOLDER, DEFAULT_LAYOUT_FILE)
if os.path.isfile(layout_default_path):
return layout_default_path
class EditableLayout(object):
BUILD_FOLDER = "build_folder"
SOURCE_FOLDER = "source_folder"
cpp_info_dirs = ['includedirs', 'libdirs', 'resdirs', 'bindirs', 'builddirs', 'srcdirs']
folders = [BUILD_FOLDER, SOURCE_FOLDER]
def __init__(self, filepath):
self._filepath = filepath
def folder(self, ref, name, settings, options):
_, folders = self._load_data(ref, settings=settings, options=options)
try:
path = folders.get(str(ref)) or folders.get(None) or {}
return path[name]
except KeyError:
return None
@staticmethod
def _work_on_item(value):
value = value.replace('\\', '/')
return value
def _parse_layout_file(self, ref, settings, options):
content = load(self._filepath)
try:
content = render_layout_file(content, ref=ref, settings=settings, options=options)
parser = configparser.ConfigParser(allow_no_value=True)
parser.optionxform = str
if six.PY3:
parser.read_string(content)
else:
parser.readfp(six.StringIO(content))
except (configparser.Error, ConanException) as e:
raise ConanException("Error parsing layout file '%s' (for reference '%s')\n%s" %
(self._filepath, str(ref), str(e)))
return parser
def _load_data(self, ref, settings, options):
parser = self._parse_layout_file(ref, settings, options)
# Build a convenient data structure
data = OrderedDict()
folders = {}
for section in parser.sections():
reference, section_name = section.rsplit(":", 1) if ':' in section else (None, section)
if section_name in EditableLayout.folders:
items = [k for k, _ in parser.items(section)] or [""]
if len(items) > 1:
raise ConanException("'%s' with more than one value in layout file: %s"
% (section_name, self._filepath))
folders.setdefault(reference, {})[section_name] = self._work_on_item(items[0])
continue
if section_name not in EditableLayout.cpp_info_dirs:
raise ConanException("Wrong cpp_info field '%s' in layout file: %s"
% (section_name, self._filepath))
if reference:
try:
r = ConanFileReference.loads(reference, validate=True)
if r.revision:
raise ConanException("Don't provide revision in Editable layouts")
except ConanException:
raise ConanException("Wrong package reference '%s' in layout file: %s"
% (reference, self._filepath))
data.setdefault(reference, {})[section_name] =\
[self._work_on_item(k) for k, _ in parser.items(section)]
return data, folders
def apply_to(self, ref, cpp_info, settings=None, options=None):
data, _ = self._load_data(ref, settings=settings, options=options)
# Apply the data to the cpp_info
data = data.get(str(ref)) or data.get(None) or {}
try:
for key, items in data.items():
setattr(cpp_info, key, items)
except Exception as e:
raise ConanException("Error applying layout in '%s': %s" % (str(ref), str(e)))
| 39.443478 | 99 | 0.611993 |
402343b48817415d7b6fec7590216fe2e47794e6 | 6,090 | py | Python | ceilometerclient/tests/v2/test_shell.py | citrix-openstack-build/python-ceilometerclient | c3283ec1e861f29a53c59c1a09f2a1bc21a713e4 | [
"Apache-2.0"
] | null | null | null | ceilometerclient/tests/v2/test_shell.py | citrix-openstack-build/python-ceilometerclient | c3283ec1e861f29a53c59c1a09f2a1bc21a713e4 | [
"Apache-2.0"
] | null | null | null | ceilometerclient/tests/v2/test_shell.py | citrix-openstack-build/python-ceilometerclient | c3283ec1e861f29a53c59c1a09f2a1bc21a713e4 | [
"Apache-2.0"
] | null | null | null | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import cStringIO
import mock
import re
import sys
from testtools import matchers
from ceilometerclient.tests import utils
from ceilometerclient.v2 import alarms
from ceilometerclient.v2 import shell as ceilometer_shell
class ShellAlarmStateCommandsTest(utils.BaseTestCase):
ALARM_ID = 'foobar'
def setUp(self):
super(ShellAlarmStateCommandsTest, self).setUp()
self.cc = mock.Mock()
self.cc.alarms = mock.Mock()
self.args = mock.Mock()
self.args.alarm_id = self.ALARM_ID
def test_alarm_state_get(self):
ceilometer_shell.do_alarm_state_get(self.cc, self.args)
self.cc.alarms.get_state.assert_called_once_with(self.ALARM_ID)
self.assertFalse(self.cc.alarms.set_state.called)
def test_alarm_state_set(self):
self.args.state = 'ok'
ceilometer_shell.do_alarm_state_set(self.cc, self.args)
self.cc.alarms.set_state.assert_called_once_with(self.ALARM_ID, 'ok')
self.assertFalse(self.cc.alarms.get_state.called)
class ShellAlarmHistoryCommandTest(utils.BaseTestCase):
ALARM_ID = '768ff714-8cfb-4db9-9753-d484cb33a1cc'
FULL_DETAIL = ('{"alarm_actions": [], '
'"user_id": "8185aa72421a4fd396d4122cba50e1b5", '
'"name": "scombo", '
'"timestamp": "2013-10-03T08:58:33.647912", '
'"enabled": true, '
'"state_timestamp": "2013-10-03T08:58:33.647912", '
'"rule": {"operator": "or", "alarm_ids": '
'["062cc907-3a9f-4867-ab3b-fa83212b39f7"]}, '
'"alarm_id": "768ff714-8cfb-4db9-9753-d484cb33a1cc", '
'"state": "insufficient data", '
'"insufficient_data_actions": [], '
'"repeat_actions": false, '
'"ok_actions": [], '
'"project_id": "57d04f24d0824b78b1ea9bcecedbda8f", '
'"type": "combination", '
'"description": "Combined state of alarms '
'062cc907-3a9f-4867-ab3b-fa83212b39f7"}')
ALARM_HISTORY = [{'on_behalf_of': '57d04f24d0824b78b1ea9bcecedbda8f',
'user_id': '8185aa72421a4fd396d4122cba50e1b5',
'event_id': 'c74a8611-6553-4764-a860-c15a6aabb5d0',
'timestamp': '2013-10-03T08:59:28.326000',
'detail': '{"state": "alarm"}',
'alarm_id': '768ff714-8cfb-4db9-9753-d484cb33a1cc',
'project_id': '57d04f24d0824b78b1ea9bcecedbda8f',
'type': 'state transition'},
{'on_behalf_of': '57d04f24d0824b78b1ea9bcecedbda8f',
'user_id': '8185aa72421a4fd396d4122cba50e1b5',
'event_id': 'c74a8611-6553-4764-a860-c15a6aabb5d0',
'timestamp': '2013-10-03T08:59:28.326000',
'detail': '{"description": "combination of one"}',
'alarm_id': '768ff714-8cfb-4db9-9753-d484cb33a1cc',
'project_id': '57d04f24d0824b78b1ea9bcecedbda8f',
'type': 'rule change'},
{'on_behalf_of': '57d04f24d0824b78b1ea9bcecedbda8f',
'user_id': '8185aa72421a4fd396d4122cba50e1b5',
'event_id': '4fd7df9e-190d-4471-8884-dc5a33d5d4bb',
'timestamp': '2013-10-03T08:58:33.647000',
'detail': FULL_DETAIL,
'alarm_id': '768ff714-8cfb-4db9-9753-d484cb33a1cc',
'project_id': '57d04f24d0824b78b1ea9bcecedbda8f',
'type': 'creation'}]
TIMESTAMP_RE = (' +\| (\d{4})-(\d{2})-(\d{2})T'
'(\d{2})\:(\d{2})\:(\d{2})\.(\d{6}) \| +')
def setUp(self):
super(ShellAlarmHistoryCommandTest, self).setUp()
self.cc = mock.Mock()
self.cc.alarms = mock.Mock()
self.args = mock.Mock()
self.args.alarm_id = self.ALARM_ID
def _do_test_alarm_history(self, raw_query=None, parsed_query=None):
self.args.query = raw_query
orig = sys.stdout
sys.stdout = cStringIO.StringIO()
history = [alarms.AlarmChange(mock.Mock(), change)
for change in self.ALARM_HISTORY]
self.cc.alarms.get_history.return_value = history
try:
ceilometer_shell.do_alarm_history(self.cc, self.args)
self.cc.alarms.get_history.assert_called_once_with(
q=parsed_query,
alarm_id=self.ALARM_ID
)
out = sys.stdout.getvalue()
required = [
'.*creation%sname: scombo.*' % self.TIMESTAMP_RE,
'.*rule change%sdescription: combination of one.*' %
self.TIMESTAMP_RE,
'.*state transition%sstate: alarm.*' % self.TIMESTAMP_RE,
]
for r in required:
self.assertThat(out, matchers.MatchesRegex(r, re.DOTALL))
finally:
sys.stdout.close()
sys.stdout = orig
def test_alarm_all_history(self):
self._do_test_alarm_history()
def test_alarm_constrained_history(self):
parsed_query = [dict(field='timestamp',
value='2013-10-03T08:59:28',
op='gt')]
self._do_test_alarm_history(raw_query='timestamp>2013-10-03T08:59:28',
parsed_query=parsed_query)
| 44.130435 | 78 | 0.577833 |
59bbf71ab32c3df8c1a39e638fa01e1ece56464c | 5,982 | py | Python | keras/baseline/src/train.py | Angus1996/HuaweiCloud_AI_Competition2019 | 08f4a262a7563bc26561acae8bfe3b41aab9af6b | [
"MIT"
] | 3 | 2020-09-11T13:42:50.000Z | 2020-12-02T08:09:04.000Z | keras/baseline/src/train.py | Angus1996/HuaweiCloud_AI_Competition2019 | 08f4a262a7563bc26561acae8bfe3b41aab9af6b | [
"MIT"
] | null | null | null | keras/baseline/src/train.py | Angus1996/HuaweiCloud_AI_Competition2019 | 08f4a262a7563bc26561acae8bfe3b41aab9af6b | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import os
import multiprocessing
from glob import glob
import moxing as mox # 华为自研模块moxing,本地机器无法安装,仅可在华为云ModelArts平台上使用,
# moxing文档请查看 https://github.com/huaweicloud/ModelArts-Lab/tree/master/docs/moxing_api_doc
import numpy as np
from keras import backend
from keras.models import Model
from keras.optimizers import adam
from keras.layers import Flatten, Dense, Dropout
from keras.callbacks import TensorBoard, Callback, EarlyStopping
from keras import regularizers
from data_gen import data_flow
from models.resnet50 import ResNet50
backend.set_image_data_format('channels_last')
def model_fn(FLAGS, objective, optimizer, metrics):
"""
pre-trained resnet50 model
"""
base_model = ResNet50(weights="imagenet",
include_top=False,
pooling=None,
input_shape=(FLAGS.input_size, FLAGS.input_size, 3),
classes=FLAGS.num_classes)
for layer in base_model.layers:
layer.trainable = False
x = base_model.output
x = Flatten()(x)
x = Dense(256, activation='sigmoid', kernel_regularizer=regularizers.l1(0.0001))(x)
x = Dropout(rate=0.3)(x)
predictions = Dense(FLAGS.num_classes, activation='softmax')(x)
model = Model(inputs=base_model.input, outputs=predictions)
model.compile(loss=objective, optimizer=optimizer, metrics=metrics)
return model
class LossHistory(Callback):
def __init__(self, FLAGS):
super(LossHistory, self).__init__()
self.FLAGS = FLAGS
def on_epoch_end(self, epoch, logs={}):
if epoch % self.FLAGS.snapshot_freq == 0:
save_path = os.path.join(self.FLAGS.train_local, 'weights_%03d_%.4f.h5' % (epoch, logs.get('val_acc')))
self.model.save_weights(save_path)
if self.FLAGS.train_url.startswith('s3://'):
save_url = os.path.join(self.FLAGS.train_url, 'weights_%03d_%.4f.h5' % (epoch, logs.get('val_acc')))
mox.file.copy(save_path, save_url)
print('save weights file', save_path)
if self.FLAGS.keep_weights_file_num > -1:
weights_files = glob(os.path.join(self.FLAGS.train_local, '*.h5'))
if len(weights_files) >= self.FLAGS.keep_weights_file_num:
weights_files.sort(key=lambda file_name: os.stat(file_name).st_ctime, reverse=True)
for file_path in weights_files[self.FLAGS.keep_weights_file_num:]:
os.remove(file_path) # only remove weights files on local path
def train_model(FLAGS):
# data flow generator
train_sequence, validation_sequence = data_flow(FLAGS.data_local, FLAGS.batch_size,
FLAGS.num_classes, FLAGS.input_size)
optimizer = adam(lr=FLAGS.learning_rate, decay=1e-6,clipnorm=0.001)
objective = 'categorical_crossentropy'
metrics = ['accuracy']
model = model_fn(FLAGS, objective, optimizer, metrics)
if FLAGS.restore_model_path != '' and mox.file.exists(FLAGS.restore_model_path):
if FLAGS.restore_model_path.startswith('s3://'):
restore_model_name = FLAGS.restore_model_path.rsplit('/', 1)[1]
mox.file.copy(FLAGS.restore_model_path, '/cache/tmp/' + restore_model_name)
model.load_weights('/cache/tmp/' + restore_model_name)
os.remove('/cache/tmp/' + restore_model_name)
else:
model.load_weights(FLAGS.restore_model_path)
print('restore parameters from %s success' % FLAGS.restore_model_path)
if not os.path.exists(FLAGS.train_local):
os.makedirs(FLAGS.train_local)
tensorboard = TensorBoard(log_dir=FLAGS.train_local, batch_size=FLAGS.batch_size)
early_stopping = EarlyStopping(monitor='val_loss', patience=4, verbose=2)
history = LossHistory(FLAGS)
model.fit_generator(
train_sequence,
steps_per_epoch=len(train_sequence),
epochs=FLAGS.max_epochs,
verbose=1,
callbacks=[history, tensorboard, early_stopping],
validation_data=validation_sequence,
max_queue_size=10,
workers=int(multiprocessing.cpu_count() * 0.7),
use_multiprocessing=True,
shuffle=True
)
print('training done!')
# 将训练日志拷贝到OBS,然后可以用 ModelArts 训练作业自带的tensorboard查看训练情况
if FLAGS.train_url.startswith('s3://'):
files = mox.file.list_directory(FLAGS.train_local)
for file_name in files:
if file_name.startswith('enevts'):
mox.file.copy(os.path.join(FLAGS.train_local, file_name), os.path.join(FLAGS.train_url, file_name))
print('save events log file to OBS path: ', FLAGS.train_url)
pb_save_dir_local = ''
if FLAGS.deploy_script_path != '':
from save_model import save_pb_model
# 默认将最新的模型保存为pb模型,您可以使用python run.py --mode=save_pb ... 将指定的h5模型转为pb模型
pb_save_dir_local = save_pb_model(FLAGS, model)
if FLAGS.deploy_script_path != '' and FLAGS.test_data_url != '':
print('test dataset predicting...')
from inference import infer_on_dataset
accuracy, result_file_path = infer_on_dataset(FLAGS.test_data_local, FLAGS.test_data_local, os.path.join(pb_save_dir_local, 'model'))
if accuracy is not None:
metric_file_name = os.path.join(FLAGS.train_url, 'metric.json')
metric_file_content = '{"total_metric": {"total_metric_values": {"accuracy": %0.4f}}}' % accuracy
with mox.file.File(metric_file_name, "w") as f:
f.write(metric_file_content + '\n')
if FLAGS.train_url.startswith('s3://'):
result_file_path_obs = os.path.join(FLAGS.train_url, 'model', os.path.basename(result_file_path))
mox.file.copy(result_file_path, result_file_path_obs)
print('accuracy result file has been copied to %s' % result_file_path_obs)
else:
print('accuracy is None')
print('end')
| 44.977444 | 141 | 0.668673 |
acc26444ff052192e02fdcbbf01f56c0c36332a3 | 505 | py | Python | LeetCode/Count Largest Group.py | UtkarshPathrabe/Competitive-Coding | ba322fbb1b88682d56a9b80bdd92a853f1caa84e | [
"MIT"
] | 13 | 2021-09-02T07:30:02.000Z | 2022-03-22T19:32:03.000Z | LeetCode/Count Largest Group.py | UtkarshPathrabe/Competitive-Coding | ba322fbb1b88682d56a9b80bdd92a853f1caa84e | [
"MIT"
] | null | null | null | LeetCode/Count Largest Group.py | UtkarshPathrabe/Competitive-Coding | ba322fbb1b88682d56a9b80bdd92a853f1caa84e | [
"MIT"
] | 3 | 2021-08-24T16:06:22.000Z | 2021-09-17T15:39:53.000Z | class Solution:
def countLargestGroup(self, n: int) -> int:
hashMap, maxGroupSize = defaultdict(int), 0
def getDigitSum(x):
currentSum = 0
while x:
currentSum += (x % 10)
x //= 10
return currentSum
for i in range(1, n + 1):
hashMap[getDigitSum(i)] += 1
maxGroupSize = max(maxGroupSize, hashMap[getDigitSum(i)])
return sum(1 for _, val in hashMap.items() if val == maxGroupSize) | 38.846154 | 74 | 0.534653 |
77b42b177656e4b633ae0b907f4c4662c71e7aaf | 10,073 | py | Python | tests/core/activity/registry_test.py | SymphonyOSF/symphony-api-client-python | 70137a893f4385381a3158ef80e1be156e0fc4bd | [
"Apache-2.0"
] | null | null | null | tests/core/activity/registry_test.py | SymphonyOSF/symphony-api-client-python | 70137a893f4385381a3158ef80e1be156e0fc4bd | [
"Apache-2.0"
] | null | null | null | tests/core/activity/registry_test.py | SymphonyOSF/symphony-api-client-python | 70137a893f4385381a3158ef80e1be156e0fc4bd | [
"Apache-2.0"
] | null | null | null | from unittest.mock import MagicMock, AsyncMock
import pytest
from symphony.bdk.core.activity.parsing.command_token import MatchingUserIdMentionToken, StaticCommandToken
from symphony.bdk.gen.agent_model.v4_user import V4User
from symphony.bdk.core.activity.command import CommandActivity, SlashCommandActivity
from symphony.bdk.core.activity.form import FormReplyActivity
from symphony.bdk.core.activity.registry import ActivityRegistry
from symphony.bdk.core.service.session.session_service import SessionService
from symphony.bdk.gen.agent_model.v4_initiator import V4Initiator
from symphony.bdk.gen.agent_model.v4_message import V4Message
from symphony.bdk.gen.agent_model.v4_message_sent import V4MessageSent
from symphony.bdk.gen.agent_model.v4_stream import V4Stream
from symphony.bdk.core.activity.user_joined_room import UserJoinedRoomActivity
from symphony.bdk.gen.agent_model.v4_user_joined_room import V4UserJoinedRoom
from symphony.bdk.gen.agent_model.v4_symphony_elements_action import V4SymphonyElementsAction
@pytest.fixture(name="session_service")
def fixture_session_service():
session_service = MagicMock(SessionService)
session_service.get_session = AsyncMock()
return session_service
@pytest.fixture(name="command")
def fixture_command():
return MagicMock(CommandActivity)
@pytest.fixture(name="form")
def fixture_form():
return MagicMock(FormReplyActivity)
@pytest.fixture(name="user")
def fixture_user():
return MagicMock(UserJoinedRoomActivity)
@pytest.fixture(name="message_sent")
def fixture_message_sent():
return V4MessageSent(message=V4Message(message_id="message_id",
message="<div><p><span>hello world</span></p></div>",
stream=V4Stream(stream_id="stream_id")))
@pytest.fixture(name="elements_action")
def fixture_elements_action():
return V4SymphonyElementsAction(form_id="test_form",
form_values={"key": "value"})
@pytest.fixture(name="user_joined_room")
def fixture_user_joined_room():
return V4UserJoinedRoom(stream=V4Stream(stream_id="12345678"),
affected_user=V4User(user_id=0))
@pytest.fixture(name="activity_registry")
def fixture_activity_registry(session_service):
return ActivityRegistry(session_service)
@pytest.mark.asyncio
async def test_register(activity_registry, session_service, message_sent):
# call on_message_sent a first time
await activity_registry.on_message_sent(V4Initiator(), message_sent)
session_service.get_session.assert_called_once()
session_service.get_session.reset_mock()
# call on_message_sent a second time, get_session() is not performed twice
await activity_registry.on_message_sent(V4Initiator(), message_sent)
session_service.get_session.assert_not_called()
@pytest.mark.asyncio
async def test_register_different_activities_instance(activity_registry, command, form, user, message_sent,
elements_action, user_joined_room):
command.on_activity = AsyncMock()
form.on_activity = AsyncMock()
user.on_activity = AsyncMock()
activity_registry.register(command)
activity_registry.register(form)
activity_registry.register(user)
assert len(activity_registry._activity_list) == 3
await activity_registry.on_message_sent(V4Initiator(), message_sent)
command.before_matcher.assert_called_once()
form.before_matcher.assert_not_called()
user.before_matcher.assert_not_called()
command.reset_mock()
form.reset_mock()
user.reset_mock()
await activity_registry.on_symphony_elements_action(V4Initiator(), elements_action)
form.before_matcher.assert_called_once()
command.before_matcher.assert_not_called()
user.before_matcher.assert_not_called()
command.reset_mock()
form.reset_mock()
user.reset_mock()
await activity_registry.on_user_joined_room(V4Initiator(), user_joined_room)
user.before_matcher.assert_called_once()
form.before_matcher.assert_not_called()
command.before_matcher.assert_not_called()
@pytest.mark.asyncio
async def test_on_message_sent(activity_registry, message_sent, command):
command.on_activity = AsyncMock()
activity_registry.register(command)
await activity_registry.on_message_sent(V4Initiator(), message_sent)
assert len(activity_registry._activity_list) == 1
command.before_matcher.assert_called_once()
command.matches.assert_called_once()
command.on_activity.assert_called_once()
@pytest.mark.asyncio
async def test_on_message_sent_false_match(activity_registry, message_sent, command):
command.on_activity = AsyncMock()
command.matches.return_value = False
activity_registry.register(command)
await activity_registry.on_message_sent(V4Initiator(), message_sent)
command.before_matcher.assert_called_once()
command.matches.assert_called_once()
command.on_activity.assert_not_called()
@pytest.mark.asyncio
async def test_on_symphony_elements_action(activity_registry, elements_action, form):
form.on_activity = AsyncMock()
activity_registry.register(form)
await activity_registry.on_symphony_elements_action(V4Initiator(), elements_action)
assert len(activity_registry._activity_list) == 1
form.before_matcher.assert_called_once()
form.matches.assert_called_once()
form.on_activity.assert_called_once()
@pytest.mark.asyncio
async def test_on_symphony_elements_action_false_match(activity_registry, elements_action, form):
form.on_activity = AsyncMock()
form.matches.return_value = False
activity_registry.register(form)
await activity_registry.on_symphony_elements_action(V4Initiator(), elements_action)
form.before_matcher.assert_called_once()
form.matches.assert_called_once()
form.on_activity.assert_not_called()
@pytest.mark.asyncio
async def test_on_user_joined_room(activity_registry, user_joined_room, user):
user.on_activity = AsyncMock()
activity_registry.register(user)
await activity_registry.on_user_joined_room(V4UserJoinedRoom, user_joined_room)
assert len(activity_registry._activity_list) == 1
user.before_matcher.assert_called_once()
user.matches.assert_called_once()
user.on_activity.assert_called_once()
@pytest.mark.asyncio
async def test_on_user_joined_room_false_match(activity_registry, user_joined_room, user):
user.on_activity = AsyncMock()
user.matches.return_value = False
activity_registry.register(user)
await activity_registry.on_user_joined_room(V4Initiator(), user_joined_room)
user.before_matcher.assert_called_once()
user.matches.assert_called_once()
user.on_activity.assert_not_called()
@pytest.mark.asyncio
async def test_slash_command_decorator(activity_registry, message_sent):
@activity_registry.slash("/command")
async def listener(context):
pass
assert len(activity_registry._activity_list) == 1
assert isinstance(activity_registry._activity_list[0], SlashCommandActivity)
@pytest.mark.asyncio
async def test_slash_command_without_mention_bot(activity_registry, message_sent):
listener = AsyncMock()
mention_bot = False
command_name = "/command"
activity_registry.slash(command_name, mention_bot)(listener)
assert len(activity_registry._activity_list) == 1
slash_activity = activity_registry._activity_list[0]
assert isinstance(slash_activity, SlashCommandActivity)
assert slash_activity._name == command_name
assert slash_activity._requires_mention_bot == mention_bot
assert slash_activity._callback == listener
assert isinstance(slash_activity._command_pattern.tokens[0], StaticCommandToken)
@pytest.mark.asyncio
async def test_slash_command_with_mention_bot(activity_registry, message_sent):
listener = AsyncMock()
mention_bot = True
command_name = "/command"
activity_registry.slash(command=command_name, mention_bot=mention_bot)(listener)
assert len(activity_registry._activity_list) == 1
slash_activity = activity_registry._activity_list[0]
assert isinstance(slash_activity, SlashCommandActivity)
assert slash_activity._name == command_name
assert slash_activity._requires_mention_bot == mention_bot
assert slash_activity._callback == listener
assert isinstance(slash_activity._command_pattern.tokens[0], MatchingUserIdMentionToken)
@pytest.mark.asyncio
async def test_slash_same_command_name_and_mention(activity_registry, message_sent):
listener1 = AsyncMock()
listener2 = AsyncMock()
mention_bot = True
command_name = "/command"
activity_registry.slash(command_name, mention_bot)(listener1)
activity_registry.slash(command_name, mention_bot)(listener2)
assert len(activity_registry._activity_list) == 1
slash_activity = activity_registry._activity_list[0]
assert isinstance(slash_activity, SlashCommandActivity)
assert slash_activity._name == command_name
assert slash_activity._requires_mention_bot
assert slash_activity._callback == listener2
@pytest.mark.asyncio
async def test_slash_same_command_name_different_mention(activity_registry, message_sent):
listener1 = AsyncMock()
listener2 = AsyncMock()
command_name = "/command"
activity_registry.slash(command_name, True)(listener1)
activity_registry.slash(command_name, False)(listener2)
assert len(activity_registry._activity_list) == 2
slash_activity1 = activity_registry._activity_list[0]
assert isinstance(slash_activity1, SlashCommandActivity)
assert slash_activity1._name == command_name
assert slash_activity1._requires_mention_bot
assert slash_activity1._callback == listener1
slash_activity2 = activity_registry._activity_list[1]
assert isinstance(slash_activity2, SlashCommandActivity)
assert slash_activity2._name == command_name
assert not slash_activity2._requires_mention_bot
assert slash_activity2._callback == listener2
| 34.975694 | 107 | 0.783878 |
86a92e0d3e4572d8f1b6988adf2ec32970f29921 | 3,371 | py | Python | toyClassification/HMC/train.py | dataflowr/evaluating_bdl | b7d7e3f2b8095a0ec43118d2b69b4b49e0b910f2 | [
"MIT"
] | 110 | 2019-06-04T13:30:23.000Z | 2022-03-05T07:37:52.000Z | toyClassification/HMC/train.py | dataflowr/evaluating_bdl | b7d7e3f2b8095a0ec43118d2b69b4b49e0b910f2 | [
"MIT"
] | 3 | 2020-08-31T17:12:39.000Z | 2021-09-12T01:21:24.000Z | toyClassification/HMC/train.py | dataflowr/evaluating_bdl | b7d7e3f2b8095a0ec43118d2b69b4b49e0b910f2 | [
"MIT"
] | 23 | 2019-06-05T08:53:28.000Z | 2022-03-05T09:01:25.000Z | # code-checked
# server-checked
from datasets import ToyDataset # (this needs to be imported before torch, because cv2 needs to be imported before torch for some reason)
import torch
import torch.utils.data
import torch.nn as nn
from torch.autograd import Variable
import pyro
from pyro.infer import EmpiricalMarginal
from pyro.infer.mcmc import MCMC, NUTS
from model_pyro import model, det_net
import pickle
pyro.enable_validation(True)
pyro.set_rng_seed(0)
train_dataset = ToyDataset()
train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=len(train_dataset))
for step, (x, y) in enumerate(train_loader):
x = Variable(x).cuda() # (shape: (batch_size, 2))
y = Variable(y).cuda() # (shape: (batch_size, ))
nuts_kernel = NUTS(model, jit_compile=False,)
posterior = MCMC(nuts_kernel, num_samples=1000, warmup_steps=1000, num_chains=1).run(x, y) # num_samples=1000, warmup_steps=1000
fc1_weight_samples = EmpiricalMarginal(posterior, sites=["module$$$fc1.weight"])._get_samples_and_weights()[0].cpu().numpy() # (shape: (num_samples, 1, shape1, shape2))
fc1_bias_samples = EmpiricalMarginal(posterior, sites=["module$$$fc1.bias"])._get_samples_and_weights()[0].cpu().numpy() # (shape: (num_samples, 1, shape1, shape2))
fc2_weight_samples = EmpiricalMarginal(posterior, sites=["module$$$fc2.weight"])._get_samples_and_weights()[0].cpu().numpy() # (shape: (num_samples, 1, shape1, shape2))
fc2_bias_samples = EmpiricalMarginal(posterior, sites=["module$$$fc2.bias"])._get_samples_and_weights()[0].cpu().numpy() # (shape: (num_samples, 1, shape1, shape2))
fc3_weight_samples = EmpiricalMarginal(posterior, sites=["module$$$fc3.weight"])._get_samples_and_weights()[0].cpu().numpy() # (shape: (num_samples, 1, shape1, shape2))
fc3_bias_samples = EmpiricalMarginal(posterior, sites=["module$$$fc3.bias"])._get_samples_and_weights()[0].cpu().numpy() # (shape: (num_samples, 1, shape1, shape2))
print ("fc1_weight_samples.shape:")
print (fc1_weight_samples.shape)
print ("fc1_bias_samples.shape:")
print (fc1_bias_samples.shape)
print ("###")
print ("fc2_weight_samples.shape:")
print (fc2_weight_samples.shape)
print ("fc2_bias_samples.shape:")
print (fc2_bias_samples.shape)
print ("###")
print ("fc3_weight_samples.shape:")
print (fc3_weight_samples.shape)
print ("fc3_bias_samples.shape:")
print (fc3_bias_samples.shape)
with open("%s/fc1_weight_samples.pkl" % det_net.model_dir, "wb") as file:
pickle.dump(fc1_weight_samples, file)
with open("%s/fc1_bias_samples.pkl" % det_net.model_dir, "wb") as file:
pickle.dump(fc1_bias_samples, file)
with open("%s/fc2_weight_samples.pkl" % det_net.model_dir, "wb") as file:
pickle.dump(fc2_weight_samples, file)
with open("%s/fc2_bias_samples.pkl" % det_net.model_dir, "wb") as file:
pickle.dump(fc2_bias_samples, file)
with open("%s/fc3_weight_samples.pkl" % det_net.model_dir, "wb") as file:
pickle.dump(fc3_weight_samples, file)
with open("%s/fc3_bias_samples.pkl" % det_net.model_dir, "wb") as file:
pickle.dump(fc3_bias_samples, file)
# with open("%s/fc1_weight_samples.pkl" % det_net.model_dir, "rb") as file: # (needed for python3)
# test = pickle.load(file)
# print (test)
# print (test.shape)
| 44.946667 | 172 | 0.713142 |
e55ac8e9063f37ed30bdb6910ee614b3a87fed2a | 265 | py | Python | src/lesson4/samples/randomMoveBall.py | saji-ryu/pyxel-study | b10ef781a86cfea4dad28efee89f851195189560 | [
"MIT"
] | null | null | null | src/lesson4/samples/randomMoveBall.py | saji-ryu/pyxel-study | b10ef781a86cfea4dad28efee89f851195189560 | [
"MIT"
] | null | null | null | src/lesson4/samples/randomMoveBall.py | saji-ryu/pyxel-study | b10ef781a86cfea4dad28efee89f851195189560 | [
"MIT"
] | null | null | null | import pyxel
import random
pyxel.init(200, 200)
pyxel.cls(7)
for a in range(0, 10):
x = random.randint(0, 199)
y = random.randint(0, 199)
r = random.randint(5, 20)
c = random.randint(0, 15)
pyxel.circ(x, y, r, c)
pyxel.flip()
pyxel.show()
| 17.666667 | 30 | 0.611321 |
0c6733fbeaf1ce8cae3197bc945f20bd76dde4e3 | 13,006 | py | Python | smoke-test/tests/tokens/revokable_access_token_test.py | Ankit-Keshari-Vituity/datahub | 1b50709e20a2d4f0079bbb6cda14889d6e2d3680 | [
"Apache-2.0"
] | 1,603 | 2016-03-03T17:21:03.000Z | 2020-01-22T22:12:02.000Z | smoke-test/tests/tokens/revokable_access_token_test.py | Ankit-Keshari-Vituity/datahub | 1b50709e20a2d4f0079bbb6cda14889d6e2d3680 | [
"Apache-2.0"
] | 1,157 | 2016-03-03T19:29:22.000Z | 2020-01-20T14:41:59.000Z | smoke-test/tests/tokens/revokable_access_token_test.py | Ankit-Keshari-Vituity/datahub | 1b50709e20a2d4f0079bbb6cda14889d6e2d3680 | [
"Apache-2.0"
] | 570 | 2016-03-03T17:21:05.000Z | 2020-01-21T06:54:10.000Z | import pytest
import time
import requests
from tests.utils import FRONTEND_ENDPOINT
from time import sleep
from tests.utils import ingest_file_via_rest
from datahub.cli.ingest_cli import get_session_and_host
@pytest.fixture(autouse=True)
def test_setup():
"""Fixture to execute asserts before and after a test is run"""
admin_session = loginAs("datahub", "datahub")
res_data = listAccessTokens(admin_session)
assert res_data
assert res_data["data"]
assert res_data["data"]["listAccessTokens"]["total"] == 0
assert not res_data["data"]["listAccessTokens"]["tokens"]
ingest_file_via_rest("tests/tokens/revokable_test_data.json")
sleep(5)
yield
sleep(5)
# Clean up
res_data = listAccessTokens(admin_session)
for metadata in res_data["data"]["listAccessTokens"]["tokens"]:
revokeAccessToken(admin_session, metadata["id"])
@pytest.mark.dependency(depends=["test_healthchecks", "test_run_ingestion"])
def test_admin_can_create_list_and_revoke_tokens():
admin_session = loginAs("datahub", "datahub")
# Using a super account, there should be no tokens
res_data = listAccessTokens(admin_session)
assert res_data
assert res_data["data"]
assert res_data["data"]["listAccessTokens"]["total"] is not None
assert len(res_data["data"]["listAccessTokens"]["tokens"]) == 0
# Using a super account, generate a token for itself.
res_data = generateAccessToken_v2(admin_session, "urn:li:corpuser:datahub")
assert res_data
assert res_data["data"]
assert res_data["data"]["createAccessToken"]
assert res_data["data"]["createAccessToken"]["accessToken"]
assert res_data["data"]["createAccessToken"]["metadata"]["actorUrn"] == "urn:li:corpuser:datahub"
admin_tokenId = res_data["data"]["createAccessToken"]["metadata"]["id"]
# Using a super account, list the previously created token.
res_data = listAccessTokens(admin_session)
assert res_data
assert res_data["data"]
assert res_data["data"]["listAccessTokens"]["total"] is not None
assert len(res_data["data"]["listAccessTokens"]["tokens"]) == 1
assert res_data["data"]["listAccessTokens"]["tokens"][1]["actorUrn"] == "urn:li:corpuser:datahub"
assert res_data["data"]["listAccessTokens"]["tokens"][1]["ownerUrn"] == "urn:li:corpuser:datahub"
# Check that the super account can revoke tokens that it created
res_data = revokeAccessToken(admin_session, admin_tokenId)
assert res_data
assert res_data["data"]
assert res_data["data"]["revokeAccessToken"]
assert res_data["data"]["revokeAccessToken"] == True
# Using a super account, there should be no tokens
res_data = listAccessTokens(admin_session)
assert res_data
assert res_data["data"]
assert res_data["data"]["listAccessTokens"]["total"] is not None
assert len(res_data["data"]["listAccessTokens"]["tokens"]) == 0
@pytest.mark.dependency(depends=["test_healthchecks", "test_run_ingestion"])
def test_admin_can_create_and_revoke_tokens_for_other_user():
admin_session = loginAs("datahub", "datahub")
# Using a super account, there should be no tokens
res_data = listAccessTokens(admin_session)
assert res_data
assert res_data["data"]
assert res_data["data"]["listAccessTokens"]["total"] is not None
assert len(res_data["data"]["listAccessTokens"]["tokens"]) == 0
# Using a super account, generate a token for another user.
res_data = generateAccessToken_v2(admin_session, "urn:li:corpuser:user")
assert res_data
assert res_data["data"]
assert res_data["data"]["createAccessToken"]
assert res_data["data"]["createAccessToken"]["accessToken"]
assert res_data["data"]["createAccessToken"]["metadata"]["actorUrn"] == "urn:li:corpuser:user"
user_tokenId = res_data["data"]["createAccessToken"]["metadata"]["id"]
# Using a super account, list the previously created tokens.
res_data = listAccessTokens(admin_session)
assert res_data
assert res_data["data"]
assert res_data["data"]["listAccessTokens"]["total"] is not None
assert len(res_data["data"]["listAccessTokens"]["tokens"]) == 1
assert res_data["data"]["listAccessTokens"]["tokens"][0]["actorUrn"] == "urn:li:corpuser:user"
assert res_data["data"]["listAccessTokens"]["tokens"][0]["ownerUrn"] == "urn:li:corpuser:datahub"
# Check that the super account can revoke tokens that it created for another user
res_data = revokeAccessToken(admin_session, user_tokenId)
assert res_data
assert res_data["data"]
assert res_data["data"]["revokeAccessToken"]
assert res_data["data"]["revokeAccessToken"] == True
# Using a super account, there should be no tokens
res_data = listAccessTokens(admin_session)
assert res_data
assert res_data["data"]
assert res_data["data"]["listAccessTokens"]["total"] is not None
assert len(res_data["data"]["listAccessTokens"]["tokens"]) == 0
"""
@pytest.mark.dependency(depends=["test_healthchecks", "test_run_ingestion"])
def test_non_admin_can_create_list_revoke_tokens():
user_session = loginAs("user", "user")
admin_session = loginAs("datahub", "datahub")
# Normal user should be able to generate token for himself.
res_data = generateAccessToken_v2(user_session, "urn:li:corpuser:user")
assert res_data
assert res_data["data"]
assert res_data["data"]["createAccessToken"]
assert res_data["data"]["createAccessToken"]["accessToken"]
assert res_data["data"]["createAccessToken"]["metadata"]["actorUrn"] == "urn:li:corpuser:user"
user_tokenId = res_data["data"]["createAccessToken"]["metadata"]["id"]
# User should be able to list his own token
res_data = listAccessTokens(user_session, [{"field": "ownerUrn","value": "urn:li:corpuser:user"}])
assert res_data
assert res_data["data"]
assert res_data["data"]["listAccessTokens"]["total"] is not None
assert len(res_data["data"]["listAccessTokens"]["tokens"]) == 1
assert res_data["data"]["listAccessTokens"]["tokens"][0]["actorUrn"] == "urn:li:corpuser:user"
assert res_data["data"]["listAccessTokens"]["tokens"][0]["ownerUrn"] == "urn:li:corpuser:user"
assert res_data["data"]["listAccessTokens"]["tokens"][0]["id"] == user_tokenId
# User should be able to revoke his own token
res_data = revokeAccessToken(user_session, user_tokenId)
assert res_data
assert res_data["data"]
assert res_data["data"]["revokeAccessToken"]
assert res_data["data"]["revokeAccessToken"] == True
# Using a normal account, check that all its tokens where removed.
res_data = listAccessTokens(user_session, [{"field": "ownerUrn","value": "urn:li:corpuser:user"}])
assert res_data
assert res_data["data"]
assert res_data["data"]["listAccessTokens"]["total"] is not None
assert len(res_data["data"]["listAccessTokens"]["tokens"]) == 0
@pytest.mark.dependency(depends=["test_healthchecks", "test_run_ingestion"])
def test_admin_can_manage_tokens_generated_by_other_user():
user_session = loginAs("user", "user")
admin_session = loginAs("datahub", "datahub")
# Using a super account, there should be no tokens
res_data = listAccessTokens(admin_session)
assert res_data
assert res_data["data"]
assert res_data["data"]["listAccessTokens"]["total"] is not None
assert len(res_data["data"]["listAccessTokens"]["tokens"]) == 0
# Normal user should be able to generate token for himself.
res_data = generateAccessToken_v2(user_session, "urn:li:corpuser:user")
assert res_data
assert res_data["data"]
assert res_data["data"]["createAccessToken"]
assert res_data["data"]["createAccessToken"]["accessToken"]
assert res_data["data"]["createAccessToken"]["metadata"]["actorUrn"] == "urn:li:corpuser:user"
user_tokenId = res_data["data"]["createAccessToken"]["metadata"]["id"]
# Admin should be able to list other tokens
res_data = listAccessTokens(admin_session, [{"field": "actorUrn","value": "urn:li:corpuser:user"}])
assert res_data
assert res_data["data"]
assert res_data["data"]["listAccessTokens"]["total"] is not None
assert len(res_data["data"]["listAccessTokens"]["tokens"]) == 1
assert res_data["data"]["listAccessTokens"]["tokens"][0]["actorUrn"] == "urn:li:corpuser:user"
assert res_data["data"]["listAccessTokens"]["tokens"][0]["ownerUrn"] == "urn:li:corpuser:user"
assert res_data["data"]["listAccessTokens"]["tokens"][0]["id"] == user_tokenId
# Admin can delete token created by someone else.
res_data = revokeAccessToken(admin_session, user_tokenId)
assert res_data
assert res_data["data"]
assert res_data["data"]["revokeAccessToken"]
assert res_data["data"]["revokeAccessToken"] == True
# Using a normal account, check that all its tokens where removed.
res_data = listAccessTokens(user_session, [{"field": "actorUrn","value": "urn:li:corpuser:user"}])
assert res_data
assert res_data["data"]
assert res_data["data"]["listAccessTokens"]["total"] is not None
assert len(res_data["data"]["listAccessTokens"]["tokens"]) == 0
# Using the super account, check that all tokens where removed.
res_data = listAccessTokens(admin_session, [{"field": "actorUrn","value": "urn:li:corpuser:user"}])
assert res_data
assert res_data["data"]
assert res_data["data"]["listAccessTokens"]["total"] is not None
assert len(res_data["data"]["listAccessTokens"]["tokens"]) == 0
@pytest.mark.dependency(depends=["test_healthchecks", "test_run_ingestion"])
def test_non_admin_can_not_generate_tokens_for_others():
user_session = loginAs("user", "user")
# Normal user should not be able to generate token for another user
res_data = generateAccessToken_v2(user_session, "urn:li:corpuser:datahub")
assert res_data
assert res_data["errors"]
assert res_data["errors"][0]["message"] == "Unauthorized to perform this action. Please contact your DataHub administrator."
"""
def generateAccessToken_v1(session, actorUrn):
# Create new token
json = {
"query": """query getAccessToken($input: GetAccessTokenInput!) {\n
getAccessToken(input: $input) {\n
accessToken\n
}\n
}""",
"variables": {
"input": {
"type": "PERSONAL",
"actorUrn": actorUrn,
"duration": "ONE_HOUR"
}
}
}
response = session.post(
f"{FRONTEND_ENDPOINT}/api/v2/graphql", json=json
)
response.raise_for_status()
return response.json()
def generateAccessToken_v2(session, actorUrn):
# Create new token
json = {
"query": """mutation createAccessToken($input: CreateAccessTokenInput!) {\n
createAccessToken(input: $input) {\n
accessToken\n
metadata {\n
id\n
actorUrn\n
ownerUrn\n
name\n
description\n
}
}\n
}""",
"variables": {
"input": {
"type": "PERSONAL",
"actorUrn": actorUrn,
"duration": "ONE_HOUR",
"name": "my token"
}
}
}
response = session.post(
f"{FRONTEND_ENDPOINT}/api/v2/graphql", json=json
)
response.raise_for_status()
sleep(5)
return response.json()
def listAccessTokens(session, filters=[]):
# Get count of existing tokens
input = {
"start": "0",
"count": "20",
}
if filters:
input['filters'] = filters
json = {
"query": """query listAccessTokens($input: ListAccessTokenInput!) {\n
listAccessTokens(input: $input) {\n
start\n
count\n
total\n
tokens {\n
urn\n
id\n
actorUrn\n
ownerUrn\n
}\n
}\n
}""",
"variables": {
"input": input
}
}
response = session.post(
f"{FRONTEND_ENDPOINT}/api/v2/graphql", json=json
)
response.raise_for_status()
return response.json()
def revokeAccessToken(session, tokenId):
# Revoke token
json = {
"query": """mutation revokeAccessToken($tokenId: String!) {\n
revokeAccessToken(tokenId: $tokenId)
}""",
"variables": {
"tokenId": tokenId
}
}
response = session.post(
f"{FRONTEND_ENDPOINT}/api/v2/graphql", json=json
)
sleep(5)
response.raise_for_status()
return response.json()
def loginAs(username, password):
session = requests.Session()
headers = {
"Content-Type": "application/json",
}
data = '{"username":"' + username +'", "password":"' + password + '"}'
response = session.post(
f"{FRONTEND_ENDPOINT}/logIn", headers=headers, data=data
)
response.raise_for_status()
return session | 38.365782 | 128 | 0.663002 |
24ea70916a97caebfa79c222926c5a5dea999aa8 | 3,786 | py | Python | MyFirstApps/PySys/acts/mbox/Box.py | EnzoH8321/Minis | 75c94c865e26128f35bdab481c4cad792beb91b2 | [
"MIT"
] | null | null | null | MyFirstApps/PySys/acts/mbox/Box.py | EnzoH8321/Minis | 75c94c865e26128f35bdab481c4cad792beb91b2 | [
"MIT"
] | null | null | null | MyFirstApps/PySys/acts/mbox/Box.py | EnzoH8321/Minis | 75c94c865e26128f35bdab481c4cad792beb91b2 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# PyLibs
from PyQt5 import QtCore, QtGui, QtWidgets
# Class
class Ui_MainWindow(object):
# App
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(250, 150)
MainWindow.setMinimumSize(QtCore.QSize(250, 150))
MainWindow.setMaximumSize(QtCore.QSize(250, 150))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("other\icon.ico"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
MainWindow.setWindowIcon(icon)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.close = QtWidgets.QPushButton(self.centralwidget)
self.close.setGeometry(QtCore.QRect(170, 120, 75, 25))
self.close.setObjectName("close")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(0, 10, 41, 41))
self.label.setText("")
self.label.setPixmap(QtGui.QPixmap("other\left-image.png"))
self.label.setObjectName("label")
self.label_2 = QtWidgets.QLabel(self.centralwidget)
self.label_2.setGeometry(QtCore.QRect(60, 10, 71, 31))
font = QtGui.QFont()
font.setPointSize(15)
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
self.label_3 = QtWidgets.QLabel(self.centralwidget)
self.label_3.setGeometry(QtCore.QRect(10, 50, 241, 16))
font = QtGui.QFont()
font.setPointSize(8)
self.label_3.setFont(font)
self.label_3.setObjectName("label_3")
self.label_4 = QtWidgets.QLabel(self.centralwidget)
self.label_4.setGeometry(QtCore.QRect(10, 70, 221, 16))
font = QtGui.QFont()
font.setPointSize(9)
self.label_4.setFont(font)
self.label_4.setObjectName("label_4")
self.label_5 = QtWidgets.QLabel(self.centralwidget)
self.label_5.setGeometry(QtCore.QRect(10, 90, 171, 20))
self.label_5.setObjectName("label_5")
self.label_6 = QtWidgets.QLabel(self.centralwidget)
self.label_6.setGeometry(QtCore.QRect(10, 120, 141, 21))
self.label_7 = QtWidgets.QLabel(self.centralwidget)
self.label_7.setGeometry(QtCore.QRect(150, 5, 65, 41))
self.label_7.setObjectName("label_7")
font.setBold(False)
font.setWeight(50)
font.setKerning(False)
self.label_6.setFont(font)
self.label_6.setObjectName("label_6")
MainWindow.setCentralWidget(self.centralwidget)
# Signals
self.retranslateUi(MainWindow)
self.close.clicked.connect(MainWindow.close)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
# Main Window Setting
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "Help"))
self.close.setText(_translate("MainWindow", "TNX !"))
self.label_2.setText(_translate("MainWindow", "PySys"))
self.label_3.setText(_translate("MainWindow", "This Program Can Does Things With Your System"))
self.label_4.setText(_translate("MainWindow", "Developer => Amirhossein Mohammadi"))
self.label_5.setText(_translate("MainWindow", "Choose One Option And Press OK !"))
self.label_6.setText(_translate("MainWindow", "Developed By PyQT"))
self.label_7.setText(_translate("MainWindow", "Version 1.0.0"))
# End
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
| 43.517241 | 104 | 0.65663 |
83bbccc79630bb9ca04aacbf8b2cd9fc66660825 | 1,280 | py | Python | api.py | OscarMelin/Boten-Anna | 4924dd62f962bee5761f716475528fc8a5469bf1 | [
"MIT"
] | 1 | 2015-06-16T11:24:27.000Z | 2015-06-16T11:24:27.000Z | api.py | OscarMelin/Boten-Anna | 4924dd62f962bee5761f716475528fc8a5469bf1 | [
"MIT"
] | null | null | null | api.py | OscarMelin/Boten-Anna | 4924dd62f962bee5761f716475528fc8a5469bf1 | [
"MIT"
] | null | null | null | import json
import urllib.request
API_URL = "https://slack.com/api/"
with open("config.json") as config_file:
settings = json.load(config_file)
USERTOKENSTRING = settings["USERTOKENSTRING"]
URLTOKENSTRING = settings["URLTOKENSTRING"]
TEAMNAMESTRING = settings["TEAMNAMESTRING"]
def get_users():
with urllib.request.urlopen("{0}users.list?token={1}".format(API_URL, USERTOKENSTRING)) as response:
return json.loads(response.read().decode("utf-8"))["members"]
def get_channels():
with urllib.request.urlopen("{0}channels.list?token={1}".format(API_URL, USERTOKENSTRING)) as response:
return json.loads(response.read().decode("utf-8"))["channels"]
def post_message(message):
#channel "test" id:C06E3DG6S
with urllib.request.urlopen("{0}chat.postMessage?token={1}&channel=C06E3DG6S&text={2}&username=Boten%20Anna".format( \
API_URL, USERTOKENSTRING, message)) as response:
return response.read().decode("utf-8")
def get_latest_messages(amount):
#channel "test" id:C06E3DG6S
with urllib.request.urlopen("{0}channels.history?token={1}&channel=C06E3DG6S&count={2}".format( \
API_URL, USERTOKENSTRING, str(amount))) as response:
return json.loads(response.read().decode("utf-8"))["messages"]
| 38.787879 | 122 | 0.709375 |
56bf6ba851b4345c57af1e01bb7465fc3a09d976 | 190 | py | Python | LifeIsShort/c/callback.py | loopyme/Life-is-Short | bd37e8597971283aa35bc31e29543c071f03acba | [
"MIT"
] | 1 | 2020-04-02T02:03:21.000Z | 2020-04-02T02:03:21.000Z | LifeIsShort/c/callback.py | loopyme/Life-is-Short | bd37e8597971283aa35bc31e29543c071f03acba | [
"MIT"
] | null | null | null | LifeIsShort/c/callback.py | loopyme/Life-is-Short | bd37e8597971283aa35bc31e29543c071f03acba | [
"MIT"
] | null | null | null | words = "Life is short"
class TaskRunner:
@staticmethod
def run(func, *args, **kwargs):
func(*args, **kwargs)
runner = TaskRunner()
runner.run(lambda x: print(x), words)
| 15.833333 | 37 | 0.631579 |
d92ba1f6d8629a1942755d037af319ee02b03c64 | 295 | py | Python | src/config.py | Mathematical-Olympiads-Discord-Server/mods-website | de0fd5b107a1f40f289c72d8e370bf3dc4f77d1c | [
"MIT"
] | 2 | 2020-03-04T04:33:55.000Z | 2020-03-04T06:20:59.000Z | src/config.py | Mathematical-Olympiads-Discord-Server/mods-website | de0fd5b107a1f40f289c72d8e370bf3dc4f77d1c | [
"MIT"
] | 25 | 2020-04-03T12:34:12.000Z | 2020-08-11T03:27:38.000Z | src/config.py | Mathematical-Olympiads-Discord-Server/mods-website | de0fd5b107a1f40f289c72d8e370bf3dc4f77d1c | [
"MIT"
] | 1 | 2020-06-20T11:58:26.000Z | 2020-06-20T11:58:26.000Z | #!/usr/bin/python
email = "mo.discord.server@gmail.com"
# Technically, these can be non-consecutive
previous_month = "2021-07-modsmo-int"
next_month = "2021-07-modsmo-adv"
# On github, the links don't have to have .html prefix,
# and directories forward to index.html inside.
github = True
| 24.583333 | 55 | 0.735593 |
2c73725ca3b39956b4bf1a70c8be35bda0f04b78 | 3,592 | py | Python | berserker_resolver/resolver.py | DmitryFillo/berserker_resolver | 8f6aa8530b54d19de4bde3f3c84f566d875c3673 | [
"BSD-2-Clause"
] | 19 | 2015-02-28T17:45:06.000Z | 2022-03-22T10:36:44.000Z | berserker_resolver/resolver.py | DmitryFillo/berserker_resolver | 8f6aa8530b54d19de4bde3f3c84f566d875c3673 | [
"BSD-2-Clause"
] | 5 | 2015-08-08T09:57:50.000Z | 2018-01-03T15:47:48.000Z | berserker_resolver/resolver.py | DmitryFillo/berserker_resolver | 8f6aa8530b54d19de4bde3f3c84f566d875c3673 | [
"BSD-2-Clause"
] | 5 | 2015-04-09T17:09:15.000Z | 2020-11-02T15:04:46.000Z | import re
import random
import threading
import dns.resolver
import dns.exception
from berserker_resolver.utils import locked_iterator
class BaseResolver(object):
_regexp_www = re.compile(r'(?:www\.){1}(.+\..+)', re.I)
_regexp_www_combine = re.compile(r'(?:www\.)?(.+\..+)', re.I)
def __init__(self, *args, **kwargs):
self.tries = kwargs.get('tries', 48)
self.timeout = kwargs.get('timeout', 3)
self.qname = kwargs.get('qname', 'A')
self.nameservers = kwargs.get('nameservers', ['8.8.8.8', '8.8.4.4', '77.88.8.8', '77.88.8.1', '84.200.69.80', '84.200.70.40',])
self.verbose = kwargs.get('verbose', False)
self.www = kwargs.get('www', False)
self.www_combine = kwargs.get('www_combine', False)
self._backend = dns.resolver.Resolver(configure=False)
self._backend.lifetime = self.timeout
def resolve(self, domains):
domains = self._bind(domains)
result, result_exception = self._run(domains)
if self.verbose:
return {
'success' : result,
'error' : result_exception,
}
else:
return result
def query(self, domain, ns=None):
self._backend.nameservers = [ns or random.choice(self.nameservers)]
return self._backend.query(domain, self.qname)
def _bind(self, domains):
for d in domains:
for t in range(self.tries):
for n in self.nameservers:
if self.www:
r = self._regexp_www.match(d)
if not r:
for i in (d, 'www.'+d):
yield i, n
else:
for i in (d, r.group(1)):
yield i, n
else:
yield d, n
def _build(self, d, answer, result, result_exception):
domain, ns = d
if self.www_combine:
domain = self._regexp_www_combine.match(domain).group(1)
if not isinstance(answer, Exception):
result.setdefault(domain, set()).update(iter(answer))
elif self.verbose:
result_exception.setdefault(domain, dict()).update({ns: answer})
def _process(self, domains, result, result_exception):
for d in domains:
try:
answer = self.query(*d)
except dns.exception.DNSException as e:
answer = e
self._build(d, answer, result, result_exception)
def _run(self, domains):
result = {}
result_exception = {}
self._process(domains, result, result_exception)
return result, result_exception
class Resolver(BaseResolver):
def __init__(self, *args, **kwargs):
super(Resolver, self).__init__(*args, **kwargs)
self.threads = kwargs.get('threads', 512)
self._lock = threading.Lock()
@locked_iterator
def _bind(self, *args, **kwargs):
return super(Resolver, self)._bind(*args, **kwargs)
def _build(self, *args, **kwargs):
with self._lock:
return super(Resolver, self)._build(*args, **kwargs)
def _run(self, domains):
result = {}
result_exception = {}
threads = []
for i in range(self.threads):
t = threading.Thread(target=self._process, args=(domains, result, result_exception))
t.start()
threads.append(t)
for i in threads:
i.join()
return result, result_exception
| 34.538462 | 135 | 0.552339 |
b04edeca78fa27350a14f300615eb714a12b9b9d | 5,440 | py | Python | invite/admin.py | lmarvaud/django-invite | 86db0fa278f373cc9481aa9af22fb927ebc1a5d9 | [
"MIT"
] | null | null | null | invite/admin.py | lmarvaud/django-invite | 86db0fa278f373cc9481aa9af22fb927ebc1a5d9 | [
"MIT"
] | 4 | 2019-04-21T19:46:33.000Z | 2019-05-26T11:30:01.000Z | invite/admin.py | lmarvaud/django-invite | 86db0fa278f373cc9481aa9af22fb927ebc1a5d9 | [
"MIT"
] | null | null | null | """
Admin configurations for django-invite project
"""
from django.conf import settings
from django.contrib import admin, messages
from django.forms import BooleanField, ModelForm
from django.urls import reverse
from django.utils.html import format_html
from django.utils.translation import gettext as _
from invite.join_and import join_and
from .models import Family, Guest, Accompany, Event, MailTemplate
from .send_mass_html_mail import send_mass_html_mail
class InviteInline(admin.TabularInline):
"""
Family guest admin view
A family require at least one guest
"""
model = Guest
extra = 2
min_num = 1
class AccompanyInline(admin.TabularInline):
"""Family accompanies admin view"""
model = Accompany
extra = 1
min_num = 0
class FamilyInvitationForm(ModelForm):
"""Form to permit Family Invitation to be sent"""
send_mail = BooleanField(label=_('Send the mail'), required=False)
class FamilyInvitationInline(admin.TabularInline):
"""Invitation families admin view"""
autocomplete_fields = ("family", "event")
model = Event.families.through
readonly_fields = ('show_mail',)
form = FamilyInvitationForm
extra = 1
min_num = 0
@staticmethod
def show_mail(instance):
"""Extra field adding a link to preview the email"""
if instance.pk:
if instance.event.has_mailtemplate:
url = reverse('show_mail', kwargs={"event_id": instance.event_id,
"family_id": instance.family_id})
return format_html(u'<a href="{}">{}</a>'.format(url, _("Preview the mail")))
return _("The event has no email template set")
return ""
class MailTemplateInline(admin.StackedInline):
"""
MailTemplate admin view
An event can only have one mail template (for now ?)
"""
model = MailTemplate
class FamilyInvitationModelAdminMixin(admin.ModelAdmin):
"""
Mixin model admin for family invitation management
Inlines is preset to add FamilyInvitation Inline and saving it will send the email from the
formset
"""
inlines = [FamilyInvitationInline]
def save_formset(self, request, form, formset, change):
"""Send FamilyInvitation mail after saving the formset"""
super().save_formset(request, form, formset, change)
if 'send_mail' in formset.form.declared_fields and \
'event' in formset.form.base_fields and \
'family' in formset.form.base_fields:
self._send_mail(request, formset)
@staticmethod
def _send_mail(request, formset):
"""Send the emails for a formset from a FamilyInvitationForm"""
family_invitations = {(data['family'], data['event'])
for data in formset.cleaned_data
if data and data["send_mail"]}
if family_invitations:
to_send = (
event.gen_mass_email(family)
for family, event in family_invitations
)
send_result = send_mass_html_mail(
to_send,
reply_to=["{host} <{email}>".format(host=host, email=settings.INVITE_HOSTS[host])
for host in settings.INVITE_HOSTS]
)
messages.add_message(request, messages.INFO,
_("%(result)d messages send") % {"result": send_result})
@admin.register(Family, site=admin.site)
class FamilyAdmin(FamilyInvitationModelAdminMixin):
"""
Family admin view
This view use FamilyInvitationInline to send an initation to a selection of guests
"""
inlines = [InviteInline, AccompanyInline] + FamilyInvitationModelAdminMixin.inlines
search_fields = ("guests__name", "accompanies__name")
@admin.register(Event, site=admin.site)
class EventAdmin(FamilyInvitationModelAdminMixin):
"""
Event admin view
This view use FamilyInvitationInline to send an initation to a selection of guests
"""
exclude = ('families', )
actions = ["send_mail"]
search_fields = ("name", "date")
inlines = [MailTemplateInline] + FamilyInvitationModelAdminMixin.inlines
def send_mail(self, request, events):
"""
Email action, send the email to the guest
:param unused_model_admin: the admin.ModelAdmin
:param unused_request: the admin request
:param families: the list of the selected families to send the mail to
:return:
"""
events_without_mail = [str(event) for event in events if not event.has_mailtemplate]
if events_without_mail:
self.message_user(request, _("The %(events)s has no email template set") %
{"events": join_and(events_without_mail)},
messages.ERROR)
return
to_send = (
invitation.gen_mass_email(family, request=request)
for invitation in events
for family in invitation.families.all()
)
result = send_mass_html_mail(
to_send,
reply_to=["{host} <{email}>".format(host=host, email=settings.INVITE_HOSTS[host])
for host in settings.INVITE_HOSTS]
)
self.message_user(request, _("%(result)d messages send") % {"result": result})
send_mail.short_description = _("Send the email")
| 35.096774 | 97 | 0.641544 |
4c64283c13360b047a884cf4b6ced0d0bce12f46 | 7,719 | py | Python | tests/handlers/auth_logging_test.py | lsst/jwt_authorizer | 76b984c809ad10843f2cd6f543e88ee1002bdb96 | [
"MIT"
] | null | null | null | tests/handlers/auth_logging_test.py | lsst/jwt_authorizer | 76b984c809ad10843f2cd6f543e88ee1002bdb96 | [
"MIT"
] | 5 | 2019-06-18T17:28:52.000Z | 2020-04-20T22:15:54.000Z | tests/handlers/auth_logging_test.py | lsst/gafaelfawr | a9dd91c7770e194d609f64fee6fd955a27231bd6 | [
"MIT"
] | 1 | 2019-06-25T16:04:57.000Z | 2019-06-25T16:04:57.000Z | """Tests for logging in the /auth route."""
from __future__ import annotations
import base64
import pytest
from _pytest.logging import LogCaptureFixture
from httpx import AsyncClient
from gafaelfawr.factory import Factory
from ..support.constants import TEST_HOSTNAME
from ..support.logging import parse_log
from ..support.tokens import create_session_token
@pytest.mark.asyncio
async def test_success(
client: AsyncClient, factory: Factory, caplog: LogCaptureFixture
) -> None:
token_data = await create_session_token(factory, scopes=["exec:admin"])
# Successful request with X-Forwarded-For and a bearer token.
caplog.clear()
r = await client.get(
"/auth",
params={"scope": "exec:admin"},
headers={
"Authorization": f"Bearer {token_data.token}",
"X-Original-Uri": "/foo",
"X-Forwarded-For": "192.0.2.1",
},
)
assert r.status_code == 200
expected_log = {
"auth_uri": "/foo",
"event": "Token authorized",
"httpRequest": {
"requestMethod": "GET",
"requestUrl": f"https://{TEST_HOSTNAME}/auth?scope=exec%3Aadmin",
"remoteIp": "192.0.2.1",
},
"required_scopes": ["exec:admin"],
"satisfy": "all",
"scopes": ["exec:admin"],
"severity": "info",
"token": token_data.token.key,
"token_source": "bearer",
"user": token_data.username,
}
assert parse_log(caplog) == [expected_log]
# Successful request with HTTP Basic authentication in the username.
basic = f"{token_data.token}:x-oauth-basic".encode()
basic_b64 = base64.b64encode(basic).decode()
caplog.clear()
r = await client.get(
"/auth",
params={"scope": "exec:admin"},
headers={
"Authorization": f"Basic {basic_b64}",
"X-Original-Uri": "/foo",
"X-Forwarded-For": "192.0.2.1",
},
)
assert r.status_code == 200
expected_log["token_source"] = "basic-username"
assert parse_log(caplog) == [expected_log]
# The same with HTTP Basic in the password.
basic = f"x-oauth-basic:{token_data.token}".encode()
basic_b64 = base64.b64encode(basic).decode()
caplog.clear()
r = await client.get(
"/auth",
params={"scope": "exec:admin"},
headers={
"Authorization": f"Basic {basic_b64}",
"X-Original-Uri": "/foo",
"X-Forwarded-For": "192.0.2.1",
},
)
assert r.status_code == 200
expected_log["token_source"] = "basic-password"
assert parse_log(caplog) == [expected_log]
@pytest.mark.asyncio
async def test_authorization_failed(
client: AsyncClient, factory: Factory, caplog: LogCaptureFixture
) -> None:
token_data = await create_session_token(factory, scopes=["exec:admin"])
caplog.clear()
r = await client.get(
"/auth",
params={"scope": "exec:test", "satisfy": "any"},
headers={
"Authorization": f"Bearer {token_data.token}",
"X-Original-Uri": "/foo",
},
)
assert r.status_code == 403
assert parse_log(caplog) == [
{
"auth_uri": "/foo",
"error": "Token missing required scope",
"event": "Permission denied",
"httpRequest": {
"requestMethod": "GET",
"requestUrl": (
f"https://{TEST_HOSTNAME}/auth"
"?scope=exec%3Atest&satisfy=any"
),
"remoteIp": "127.0.0.1",
},
"required_scopes": ["exec:test"],
"satisfy": "any",
"scopes": ["exec:admin"],
"severity": "warning",
"token": token_data.token.key,
"token_source": "bearer",
"user": token_data.username,
}
]
@pytest.mark.asyncio
async def test_original_url(
client: AsyncClient, factory: Factory, caplog: LogCaptureFixture
) -> None:
token_data = await create_session_token(factory)
caplog.clear()
r = await client.get(
"/auth",
params={"scope": "exec:admin"},
headers={
"Authorization": f"bearer {token_data.token}",
"X-Original-Url": "https://example.com/test",
},
)
assert r.status_code == 403
expected_log = {
"auth_uri": "https://example.com/test",
"error": "Token missing required scope",
"event": "Permission denied",
"httpRequest": {
"requestMethod": "GET",
"requestUrl": f"https://{TEST_HOSTNAME}/auth?scope=exec%3Aadmin",
"remoteIp": "127.0.0.1",
},
"required_scopes": ["exec:admin"],
"satisfy": "all",
"scopes": ["user:token"],
"severity": "warning",
"token": token_data.token.key,
"token_source": "bearer",
"user": token_data.username,
}
assert parse_log(caplog) == [expected_log]
# Check with both X-Original-URI and X-Original-URL. The former should
# override the latter.
caplog.clear()
r = await client.get(
"/auth",
params={"scope": "exec:admin"},
headers={
"Authorization": f"bearer {token_data.token}",
"X-Original-URI": "/foo",
"X-Original-URL": "https://example.com/test",
},
)
assert r.status_code == 403
expected_log["auth_uri"] = "/foo"
assert parse_log(caplog) == [expected_log]
@pytest.mark.asyncio
async def test_chained_x_forwarded(
client: AsyncClient, factory: Factory, caplog: LogCaptureFixture
) -> None:
token_data = await create_session_token(factory)
caplog.clear()
r = await client.get(
"/auth",
params={"scope": "exec:admin"},
headers={
"Authorization": f"bearer {token_data.token}",
"X-Forwarded-For": "2001:db8:85a3:8d3:1319:8a2e:370:734, 10.0.0.1",
"X-Forwarded-Proto": "https, http",
"X-Original-Uri": "/foo",
},
)
assert r.status_code == 403
assert parse_log(caplog) == [
{
"auth_uri": "/foo",
"error": "Token missing required scope",
"event": "Permission denied",
"httpRequest": {
"requestMethod": "GET",
"requestUrl": (
f"https://{TEST_HOSTNAME}/auth?scope=exec%3Aadmin"
),
"remoteIp": "2001:db8:85a3:8d3:1319:8a2e:370:734",
},
"required_scopes": ["exec:admin"],
"satisfy": "all",
"scopes": ["user:token"],
"severity": "warning",
"token": token_data.token.key,
"token_source": "bearer",
"user": token_data.username,
}
]
@pytest.mark.asyncio
async def test_invalid_token(
client: AsyncClient, caplog: LogCaptureFixture
) -> None:
caplog.clear()
r = await client.get(
"/auth",
params={"scope": "exec:admin"},
headers={"Authorization": "Bearer blah"},
)
assert r.status_code == 401
assert parse_log(caplog) == [
{
"auth_uri": "NONE",
"error": "Token does not start with gt-",
"event": "Invalid token",
"httpRequest": {
"requestMethod": "GET",
"requestUrl": (
f"https://{TEST_HOSTNAME}/auth?scope=exec%3Aadmin"
),
"remoteIp": "127.0.0.1",
},
"required_scopes": ["exec:admin"],
"satisfy": "all",
"severity": "warning",
"token_source": "bearer",
}
]
| 30.389764 | 79 | 0.545278 |
ba2f37ae5a77a00c92d022170097c184e46bbf5d | 1,351 | py | Python | pyosu/types/game_modes.py | obayemi/Osu.py | 3d6e5390230732e716c9251d9e978fbf1f939ca4 | [
"MIT"
] | null | null | null | pyosu/types/game_modes.py | obayemi/Osu.py | 3d6e5390230732e716c9251d9e978fbf1f939ca4 | [
"MIT"
] | null | null | null | pyosu/types/game_modes.py | obayemi/Osu.py | 3d6e5390230732e716c9251d9e978fbf1f939ca4 | [
"MIT"
] | null | null | null | # MIT License
# Copyright (c) 2018 Renondedju
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
class GameMode():
"""
Game mode class. Used to Represent the different game modes.
Osu (std) = 0
Taiko = 1
CtB = 2
osu!mania = 3
"""
Osu = 0
Taiko = 1
Catch = 2
Mania = 3 | 36.513514 | 80 | 0.717987 |
d3032b63d387c5498a63b9d777cc69a7666c4d24 | 32,933 | py | Python | libs/blocks/blocks/algorithms/__init__.py | dendisuhubdy/attention-lvcsr | 598d487c118e66875fdd625baa84ed29d283b800 | [
"MIT"
] | 295 | 2015-09-25T21:15:04.000Z | 2022-01-13T01:16:18.000Z | libs/blocks/blocks/algorithms/__init__.py | shenshenzhanzhan/attention-lvcsr | 598d487c118e66875fdd625baa84ed29d283b800 | [
"MIT"
] | 21 | 2015-10-28T19:06:32.000Z | 2022-03-11T23:13:05.000Z | libs/blocks/blocks/algorithms/__init__.py | shenshenzhanzhan/attention-lvcsr | 598d487c118e66875fdd625baa84ed29d283b800 | [
"MIT"
] | 114 | 2015-09-26T21:23:02.000Z | 2021-11-19T02:36:41.000Z | """Training algorithms."""
import logging
import itertools
from abc import ABCMeta, abstractmethod
from collections import OrderedDict
from six.moves import reduce
from picklable_itertools.extras import equizip
import theano
from six import add_metaclass
from theano import tensor
from blocks.graph import ComputationGraph
from blocks.roles import add_role, ALGORITHM_HYPERPARAMETER, ALGORITHM_BUFFER
from blocks.theano_expressions import l2_norm
from blocks.utils import (dict_subset, pack, shared_floatx,
shared_floatx_zeros_matching)
logger = logging.getLogger(__name__)
@add_metaclass(ABCMeta)
class TrainingAlgorithm(object):
"""Base class for training algorithms.
A training algorithm object has a simple life-cycle.
First it is initialized by calling its :meth:`initialize` method.
At this stage, for instance, Theano functions can be compiled.
After that the :meth:`process_batch` method is repeatedly
called with a batch of training data as a parameter.
"""
@abstractmethod
def initialize(self, **kwargs):
"""Initialize the training algorithm."""
pass
@abstractmethod
def process_batch(self, batch):
"""Process a batch of training data.
Attributes
----------
batch : dict
A dictionary of (source name, data) pairs.
"""
pass
class DifferentiableCostMinimizer(TrainingAlgorithm):
"""Minimizes a differentiable cost given as a Theano expression.
Very often the goal of training is to minimize the expected value of a
Theano expression. Batch processing in this cases typically consists of
running a (or a few) Theano functions.
:class:`DifferentiableCostMinimizer` is the base class for such
algorithms.
Parameters
----------
cost : :class:`~tensor.TensorVariable`
The objective to be minimized.
parameters : list of :class:`~tensor.TensorSharedVariable`
The parameters to be tuned.
Attributes
----------
updates : list of :class:`~tensor.TensorSharedVariable` updates
Updates to be done for every batch. It is required that the
updates are done using the old values of optimized parameters.
cost : :class:`~tensor.TensorVariable`
The objective to be minimized.
parameters : list of :class:`~tensor.TensorSharedVariable`
The parameters to be tuned.
Notes
-----
Changing `updates` attribute or calling `add_updates` after
the `initialize` method is called will have no effect.
.. todo::
Some shared variables are not parameters (e.g. those created by
random streams).
.. todo::
Due to a rather premature status of the :class:`ComputationGraph`
class the parameter used only inside scans are not fetched
currently.
"""
def __init__(self, cost, parameters):
self.cost = cost
self.parameters = parameters
self._cost_computation_graph = ComputationGraph(self.cost)
self._updates = []
@property
def inputs(self):
"""Return inputs of the cost computation graph.
Returns
-------
inputs : list of :class:`~tensor.TensorVariable`
Inputs to this graph.
"""
return self._cost_computation_graph.inputs
@property
def updates(self):
return self._updates
@updates.setter
def updates(self, value):
self._updates = value
def add_updates(self, updates):
"""Add updates to the training process.
The updates will be done _before_ the parameters are changed.
Parameters
----------
updates : list of tuples or :class:`~collections.OrderedDict`
The updates to add.
"""
if isinstance(updates, OrderedDict):
updates = list(updates.items())
if not isinstance(updates, list):
raise ValueError
self.updates.extend(updates)
variable_mismatch_error = """
Blocks tried to match the sources ({sources}) of the training dataset to \
the names of the Theano variables ({variables}), but failed to do so. \
If you want to train on a subset of the sources that your dataset provides, \
pass the `sources` keyword argument to its constructor. Or pass \
on_unused_sources='warn' or on_unused_sources='ignore' to \
the GradientDescent algorithm."""
source_missing_error = """
Blocks didn't find all the sources ({sources}) of the training dataset \
that match the names of the Theano variables ({variables})."""
class GradientDescent(DifferentiableCostMinimizer):
"""A base class for all gradient descent algorithms.
By "gradient descent" we mean a training algorithm of the following
form:
.. code-block:: python
for batch in data:
steps = step_rule.compute_steps(parameters,
gradients_wr_parameters)
for parameter in parameters:
parameter -= steps[parameter]
Note, that the step is *subtracted, not added*! This is done in order
to make step rule chaining possible.
Parameters
----------
step_rule : instance of :class:`StepRule`, optional
An object encapsulating most of the algorithm's logic. Its
`compute_steps` method is called to get Theano expression for
steps. Note, that the step rule might have a state, e.g. to
remember a weighted sum of gradients from previous steps like it is
done in gradient descent with momentum. If ``None``, an instance of
:class:`Scale` is created.
gradients : dict, optional
A dictionary mapping a parameter to an expression for the cost's
gradient with respect to the parameter. If ``None``, the gradient
are taken automatically using :func:`theano.gradient.grad`.
known_grads : dict, optional
A passthrough to `theano.tensor.grad`'s `known_grads` argument.
Useful when you know the [approximate] gradients of some
sub-expressions and would like Theano to use that information
to compute parameter gradients. Only makes sense when `gradients`
is `None`.
consider_constant : list, optional
A passthrough to `theano.tensor.grad`'s `consider_constant`
argument. A list of expressions through which gradients will not
be backpropagated. Only makes sense when `gradients` is `None`.
on_unused_sources : str, one of 'raise' (default), 'ignore', 'warn'
Controls behavior when not all sources are used.
theano_func_kwargs : dict, optional
A passthrough to `theano.function` for additional arguments.
Useful for passing `profile` or `mode` arguments to the theano
function that will be compiled for the algorithm.
Attributes
----------
gradients : dict
The gradient dictionary.
step_rule : instance of :class:`StepRule`
The step rule.
"""
def __init__(self, step_rule=None, gradients=None, known_grads=None,
consider_constant=None, on_unused_sources='raise',
theano_func_kwargs=None, **kwargs):
if gradients:
kwargs.setdefault("parameters", gradients.keys())
super(GradientDescent, self).__init__(**kwargs)
self.gradients = gradients
if not self.gradients:
logger.info("Taking the cost gradient")
self.gradients = dict(
equizip(self.parameters, tensor.grad(
self.cost, self.parameters,
known_grads=known_grads,
consider_constant=consider_constant)))
logger.info("The cost gradient computation graph is built")
else:
if known_grads:
raise ValueError("known_grads has no effect when gradients "
"are passed in")
if consider_constant is not None:
raise ValueError("consider_constant has no effect when "
"gradients are passed in")
self.step_rule = step_rule if step_rule else Scale()
self.total_gradient_norm = l2_norm(
self.gradients.values()).copy(name="total_gradient_norm")
self.steps, self.step_rule_updates = (
self.step_rule.compute_steps(self.gradients))
self.total_step_norm = l2_norm(
self.steps.values()).copy(name="total_step_norm")
self.on_unused_sources = on_unused_sources
self.theano_func_kwargs = (theano_func_kwargs if theano_func_kwargs
is not None else dict())
def initialize(self):
logger.info("Initializing the training algorithm")
all_updates = self.updates
# Note: the gradients are computed in the same order in which
# the parameters were given. Keep it like that to ensure
# reproducibility.
for parameter in self.parameters:
all_updates.append((parameter, parameter - self.steps[parameter]))
all_updates += self.step_rule_updates
self._function = theano.function(
self.inputs, [], updates=all_updates, **self.theano_func_kwargs)
logger.info("The training algorithm is initialized")
def _validate_source_names(self, batch):
in_names = [v.name for v in self.inputs]
if not set(in_names).issubset(set(batch.keys())):
raise ValueError("Didn't find all sources: " +
source_missing_error.format(
sources=batch.keys(),
variables=in_names))
if not set(batch.keys()).issubset(set(in_names)):
if self.on_unused_sources == 'ignore':
pass
elif self.on_unused_sources == 'warn':
if not hasattr(self, '_unused_source_warned'):
logger.warn(variable_mismatch_error.format(
sources=batch.keys(),
variables=in_names))
self._unused_source_warned = True
elif self.on_unused_sources == 'raise':
raise ValueError(
"mismatch of variable names and data sources" +
variable_mismatch_error.format(
sources=batch.keys(),
variables=in_names))
else:
raise ValueError("Wrong value of on_unused_sources: {}."
.format(self.on_unused_sources))
def process_batch(self, batch):
self._validate_source_names(batch)
ordered_batch = [batch[v.name] for v in self.inputs]
self._function(*ordered_batch)
@add_metaclass(ABCMeta)
class StepRule(object):
"""A rule to compute steps for a gradient descent algorithm."""
def compute_step(self, parameter, previous_step):
"""Build a Theano expression for the step for a parameter.
This method is called by default implementation of
:meth:`compute_steps`, it relieves from writing a loop each time.
Parameters
----------
parameter : :class:`~tensor.TensorSharedVariable`
The parameter.
previous_step : :class:`~tensor.TensorVariable`
Some quantity related to the gradient of the cost with respect
to the parameter, either the gradient itself or a step in a
related direction.
Returns
-------
step : :class:`~theano.Variable`
Theano variable for the step to take.
updates : list
A list of tuples representing updates to be performed. This
is useful for stateful rules such as :class:`Momentum` which
need to update shared variables after itetations.
"""
raise NotImplementedError
def compute_steps(self, previous_steps):
"""Build a Theano expression for steps for all parameters.
Override this method if you want to process the steps
with respect to all parameters as a whole, not parameter-wise.
Parameters
----------
previous_steps : OrderedDict
An :class:`~OrderedDict` of
(:class:`~tensor.TensorSharedVariable`
:class:`~tensor.TensorVariable`) pairs. The keys are the
parameters being trained, the values are the expressions for
quantities related to gradients of the cost with respect to
the parameters, either the gradients themselves or steps in
related directions.
Returns
-------
steps : OrderedDict
A dictionary of the proposed steps in the same form as
`previous_steps`.
updates : list
A list of tuples representing updates to be performed.
"""
parameter_wise = [self.compute_step(parameter,
previous_steps[parameter])
for parameter in previous_steps]
steps, updates = equizip(*parameter_wise)
steps = OrderedDict((parameter, step) for parameter, step
in equizip(previous_steps.keys(), steps))
updates = list(itertools.chain(*updates))
return steps, updates
class CompositeRule(StepRule):
"""Chains several step rules.
Parameters
----------
components : list of :class:`StepRule`
The learning rules to be chained. The rules will be applied in the
order as given.
"""
def __init__(self, components):
self.components = components
def compute_steps(self, previous_steps):
steps = previous_steps
updates = []
for rule in self.components:
steps, more_updates = rule.compute_steps(steps)
updates += more_updates
return steps, updates
class Scale(StepRule):
"""A step in the direction proportional to the previous step.
If used in :class:`GradientDescent` alone, this step rule implements
steepest descent.
Parameters
----------
learning_rate : float
The learning rate by which the previous step is multiplied to
produce the step.
Attributes
----------
learning_rate : :class:`~tensor.TensorSharedVariable`
The shared variable storing the learning rate used.
"""
def __init__(self, learning_rate=1.0):
self.learning_rate = shared_floatx(learning_rate, "learning_rate")
add_role(self.learning_rate, ALGORITHM_HYPERPARAMETER)
def compute_step(self, parameter, previous_step):
return self.learning_rate * previous_step, []
class BasicMomentum(StepRule):
"""Accumulates step with exponential discount.
Parameters
----------
momentum : float, optional
The momentum coefficient. Defaults to 0.
Notes
-----
This step rule is intended to be used in conjunction with another
step rule, _e.g._ :class:`Scale`. For an all-batteries-included
experience, look at :class:`Momentum`.
"""
def __init__(self, momentum=0.):
self.momentum = shared_floatx(momentum, "momentum")
add_role(self.momentum, ALGORITHM_HYPERPARAMETER)
def compute_step(self, parameter, previous_step):
velocity = shared_floatx_zeros_matching(parameter, "velocity")
add_role(velocity, ALGORITHM_BUFFER)
step = self.momentum * velocity + previous_step
updates = [(velocity, step)]
return step, updates
class Momentum(CompositeRule):
"""Accumulates step with exponential discount.
Combines :class:`BasicMomentum` and :class:`Scale` to form the
usual momentum step rule.
Parameters
----------
learning_rate : float, optional
The learning rate by which the previous step scaled. Defaults to 1.
momentum : float, optional
The momentum coefficient. Defaults to 0.
Attributes
----------
learning_rate : :class:`~tensor.SharedVariable`
A variable for learning rate.
momentum : :class:`~tensor.SharedVariable`
A variable for momentum.
See Also
--------
:class:`SharedVariableModifier`
"""
def __init__(self, learning_rate=1.0, momentum=0.):
scale = Scale(learning_rate=learning_rate)
basic_momentum = BasicMomentum(momentum=momentum)
self.learning_rate = scale.learning_rate
self.momentum = basic_momentum.momentum
self.components = [scale, basic_momentum]
class AdaDelta(StepRule):
"""Adapts the step size over time using only first order information.
Parameters
----------
decay_rate : float, optional
Decay rate in [0, 1]. Defaults to 0.95.
epsilon : float, optional
Stabilizing constant for RMS. Defaults to 1e-6.
Notes
-----
For more information, see [ADADELTA]_.
.. [ADADELTA] Matthew D. Zeiler, *ADADELTA: An Adaptive Learning
Rate Method*, arXiv:1212.5701.
"""
def __init__(self, decay_rate=0.95, epsilon=1e-6):
if not 0.0 <= decay_rate <= 1.0:
raise ValueError("decay rate needs to be in [0, 1]")
self.decay_rate = shared_floatx(decay_rate, "decay_rate")
add_role(self.decay_rate, ALGORITHM_HYPERPARAMETER)
self.epsilon = shared_floatx(epsilon, "epsilon")
add_role(self.epsilon, ALGORITHM_HYPERPARAMETER)
def compute_step(self, parameter, previous_step):
mean_square_step_tm1 = shared_floatx_zeros_matching(
parameter, "mean_square_step_tm1")
add_role(mean_square_step_tm1, ALGORITHM_BUFFER)
mean_square_delta_x_tm1 = shared_floatx_zeros_matching(
parameter, "mean_square_delta_x_tm1")
add_role(mean_square_delta_x_tm1, ALGORITHM_BUFFER)
mean_square_step_t = (
self.decay_rate * mean_square_step_tm1 +
(1 - self.decay_rate) * tensor.sqr(previous_step)
)
rms_delta_x_tm1 = tensor.sqrt(mean_square_delta_x_tm1 + self.epsilon)
rms_step_t = tensor.sqrt(mean_square_step_t + self.epsilon)
delta_x_t = rms_delta_x_tm1 / rms_step_t * previous_step
mean_square_delta_x_t = (
self.decay_rate * mean_square_delta_x_tm1 +
(1 - self.decay_rate) * tensor.sqr(delta_x_t)
)
step = delta_x_t
updates = [(mean_square_step_tm1, mean_square_step_t),
(mean_square_delta_x_tm1, mean_square_delta_x_t)]
return step, updates
class BasicRMSProp(StepRule):
"""Scales the step size by a running average of the recent step norms.
Parameters
----------
decay_rate : float, optional
How fast the running average decays, value in [0, 1]
(lower is faster). Defaults to 0.9.
max_scaling : float, optional
Maximum scaling of the step size, in case the running average is
really small. Needs to be greater than 0. Defaults to 1e5.
Notes
-----
This step rule is intended to be used in conjunction with another
step rule, _e.g._ :class:`Scale`. For an all-batteries-included
experience, look at :class:`RMSProp`.
In general, this step rule should be used _before_ other step rules,
because it has normalization properties that may undo their work.
For instance, it should be applied first when used in conjunction
with :class:`Scale`.
For more information, see [Hint2014]_.
"""
def __init__(self, decay_rate=0.9, max_scaling=1e5):
if not 0.0 <= decay_rate <= 1.0:
raise ValueError("decay rate needs to be in [0, 1]")
if max_scaling <= 0:
raise ValueError("max. scaling needs to be greater than 0")
self.decay_rate = shared_floatx(decay_rate, "decay_rate")
add_role(self.decay_rate, ALGORITHM_HYPERPARAMETER)
self.epsilon = 1. / max_scaling
def compute_step(self, parameter, previous_step):
mean_square_step_tm1 = shared_floatx_zeros_matching(
parameter, "mean_square_step_tm1")
add_role(mean_square_step_tm1, ALGORITHM_BUFFER)
mean_square_step_t = (
self.decay_rate * mean_square_step_tm1 +
(1 - self.decay_rate) * tensor.sqr(previous_step))
add_role(mean_square_step_t, ALGORITHM_BUFFER)
rms_step_t = tensor.maximum(
tensor.sqrt(mean_square_step_t), self.epsilon)
step = previous_step / rms_step_t
updates = [(mean_square_step_tm1, mean_square_step_t)]
return step, updates
class RMSProp(CompositeRule):
"""Scales the step size by a running average of the recent step norms.
Combines :class:`BasicRMSProp` and :class:`Scale` to form the step rule
described in [Hint2014]_.
.. [Hint2014] Geoff Hinton, *Neural Networks for Machine Learning*,
lecture 6a,
http://cs.toronto.edu/~tijmen/csc321/slides/lecture_slides_lec6.pdf
Parameters
----------
learning_rate : float, optional
The learning rate by which the previous step scaled. Defaults to 1.
decay_rate : float, optional
How fast the running average decays (lower is faster).
Defaults to 0.9.
max_scaling : float, optional
Maximum scaling of the step size, in case the running average is
really small. Defaults to 1e5.
Attributes
----------
learning_rate : :class:`~tensor.SharedVariable`
A variable for learning rate.
decay_rate : :class:`~tensor.SharedVariable`
A variable for decay rate.
See Also
--------
:class:`SharedVariableModifier`
"""
def __init__(self, learning_rate=1.0, decay_rate=0.9, max_scaling=1e5):
basic_rms_prop = BasicRMSProp(decay_rate=decay_rate,
max_scaling=max_scaling)
scale = Scale(learning_rate=learning_rate)
self.learning_rate = scale.learning_rate
self.decay_rate = basic_rms_prop.decay_rate
self.components = [basic_rms_prop, scale]
class StepClipping(StepRule):
"""Rescales an entire step if its L2 norm exceeds a threshold.
When the previous steps are the gradients, this step rule performs
gradient clipping.
Parameters
----------
threshold : float, optional
The maximum permitted L2 norm for the step. The step
will be rescaled to be not higher than this quanity.
If ``None``, no rescaling will be applied.
Attributes
----------
threshold : :class:`.tensor.TensorSharedVariable`
The shared variable storing the clipping threshold used.
"""
def __init__(self, threshold=None):
if threshold:
self.threshold = shared_floatx(threshold, "threshold")
add_role(self.threshold, ALGORITHM_HYPERPARAMETER)
def compute_steps(self, previous_steps):
if not hasattr(self, 'threshold'):
return previous_steps
norm = l2_norm(previous_steps.values())
multiplier = tensor.switch(norm < self.threshold,
1, self.threshold / norm)
steps = OrderedDict(
(parameter, step * multiplier)
for parameter, step in previous_steps.items())
return steps, []
class VariableClipping(StepRule):
"""Clip the maximum norm of individual variables along certain axes.
This :class:`StepRule` can be used to implement L2 norm constraints on
e.g. the weight vectors of individual hidden units, convolutional
filters or entire weight tensors. Combine with :class:`Restrict`
(and possibly :class:`CompositeRule`), to apply such constraints only
to certain variables and/or apply different norm constraints to
different variables.
Parameters
----------
threshold : float
Maximum norm for a given (portion of a) tensor.
axis : int or iterable, optional
An integer single axis, or an iterable collection of integer
axes over which to sum in order to calculate the L2 norm. If
`None` (the default), the norm is computed over all elements
of the tensor.
Notes
-----
Because of the way the :class:`StepRule` API works, this particular
rule implements norm clipping of the value *after* update in the
following way: it computes ``parameter - previous_step``, scales it
to have (possibly axes-wise) norm(s) of at most `threshold`,
then subtracts *that* value from `parameter` to yield an 'equivalent
step' that respects the desired norm constraints. This procedure
implicitly assumes one is doing simple (stochastic) gradient descent,
and so steps computed by this step rule may not make sense for use
in other contexts.
Investigations into max-norm regularization date from [Srebro2005]_.
The first appearance of this technique as a regularization method
for the weight vectors of individual hidden units in feed-forward
neural networks may be [Hinton2012]_.
.. [Srebro2005] Nathan Srebro and Adi Shraibman.
"Rank, Trace-Norm and Max-Norm". *18th Annual Conference
on Learning Theory (COLT)*, June 2005.
.. [Hinton2012] Geoffrey E. Hinton, Nitish Srivastava,
Alex Krizhevsky, Ilya Sutskever, Ruslan R. Salakhutdinov.
"Improving neural networks by preventing co-adaptation of
feature detectors". arXiv:1207.0580.
"""
def __init__(self, threshold, axis=None):
axis = pack(axis) if axis is not None else ()
self.axis = set(axis)
self.threshold = shared_floatx(threshold, "threshold")
add_role(self.threshold, ALGORITHM_HYPERPARAMETER)
if len(axis) != len(self.axis):
raise ValueError("axis must be unique")
def compute_step(self, parameter, previous_step):
if any(ax >= previous_step.ndim for ax in self.axis):
raise ValueError("Invalid axis {} for {}, ndim={}".format(
self.axis, parameter, previous_step.ndim))
if len(self.axis) == 0:
norms = l2_norm([parameter - previous_step])
else:
squares = tensor.sqr(parameter - previous_step)
norms = tensor.sqrt(
reduce(lambda t, a: t.sum(axis=a, keepdims=True),
sorted(self.axis), squares))
# We want a step s* that is the same as scaling
# (parameter - previous_step) by threshold / norm
# when threshold < norm.
shrinking_step = (parameter -
(self.threshold / norms) *
(parameter - previous_step))
return tensor.switch(norms > self.threshold,
shrinking_step,
previous_step), ()
class AdaGrad(StepRule):
"""Implements the AdaGrad learning rule.
Parameters
----------
learning_rate : float, optional
Step size.
Default value is set to 0.0002.
epsilon : float, optional
Stabilizing constant for one over root of sum of squares.
Defaults to 1e-6.
Notes
-----
For more information, see [ADAGRAD]_.
.. [ADADGRAD] Duchi J, Hazan E, Singer Y.,
*Adaptive subgradient methods for online learning and
stochastic optimization*,
http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf
"""
def __init__(self, learning_rate=0.002, epsilon=1e-6):
self.learning_rate = shared_floatx(learning_rate, "learning_rate")
self.epsilon = shared_floatx(epsilon, "epsilon")
add_role(self.learning_rate, ALGORITHM_HYPERPARAMETER)
add_role(self.epsilon, ALGORITHM_HYPERPARAMETER)
def compute_step(self, parameter, previous_step):
name = 'adagrad_sqs'
if parameter.name:
name += '_' + parameter.name
ssq = shared_floatx_zeros_matching(parameter, name=name)
add_role(ssq, ALGORITHM_BUFFER)
ssq_t = (tensor.sqr(previous_step) + ssq)
step = (self.learning_rate * previous_step /
(tensor.sqrt(ssq_t) + self.epsilon))
updates = [(ssq, ssq_t)]
return step, updates
class Adam(StepRule):
"""Adam optimizer as described in [King2014]_.
.. [King2014] Diederik Kingma, Jimmy Ba,
*Adam: A Method for Stochastic Optimization*,
http://arxiv.org/abs/1412.6980
Parameters
----------
learning_rate : float, optional
Step size.
Default value is set to 0.0002.
beta1 : float, optional
Exponential decay rate for the first moment estimates.
Default value is set to 0.1.
beta2 : float, optional
Exponential decay rate for the second moment estimates.
Default value is set to 0.001.
epsilon : float, optional
Default value is set to 1e-8.
decay_factor : float, optional
Default value is set to 1 - 1e-8.
"""
def __init__(self, learning_rate=0.002,
beta1=0.1, beta2=0.001, epsilon=1e-8,
decay_factor=(1 - 1e-8)):
self.learning_rate = shared_floatx(learning_rate, "learning_rate")
self.beta1 = shared_floatx(beta1, "beta1")
self.beta2 = shared_floatx(beta2, "beta2")
self.epsilon = shared_floatx(epsilon, "epsilon")
self.decay_factor = shared_floatx(decay_factor, "decay_factor")
for param in [self.learning_rate, self.beta1, self.beta2, self.epsilon,
self.decay_factor]:
add_role(param, ALGORITHM_HYPERPARAMETER)
def compute_step(self, parameter, previous_step):
mean = shared_floatx_zeros_matching(parameter, 'mean')
add_role(mean, ALGORITHM_BUFFER)
variance = shared_floatx_zeros_matching(parameter, 'variance')
add_role(variance, ALGORITHM_BUFFER)
time = shared_floatx(0., 'time')
add_role(time, ALGORITHM_BUFFER)
t1 = time + 1
learning_rate = (self.learning_rate *
tensor.sqrt((1. - (1. - self.beta2)**t1)) /
(1. - (1. - self.beta1)**t1))
beta_1t = 1 - (1 - self.beta1) * self.decay_factor ** (t1 - 1)
mean_t = beta_1t * previous_step + (1. - beta_1t) * mean
variance_t = (self.beta2 * tensor.sqr(previous_step) +
(1. - self.beta2) * variance)
step = (learning_rate * mean_t /
(tensor.sqrt(variance_t) + self.epsilon))
updates = [(mean, mean_t),
(variance, variance_t),
(time, t1)]
return step, updates
class RemoveNotFinite(StepRule):
"""A step rule that skips steps with non-finite elements.
Replaces a step (the parameter update of a single shared variable)
which contains non-finite elements (such as ``inf`` or ``NaN``) with a
step rescaling the parameters.
Parameters
----------
scaler : float, optional
The scaling applied to the parameter in case the step contains
non-finite elements. Defaults to 1, which means that parameters
will not be changed.
Notes
-----
This rule should be applied last!
This trick was originally used in the GroundHog_ framework.
.. _GroundHog: https://github.com/lisa-groundhog/GroundHog
"""
def __init__(self, scaler=1):
self.scaler = scaler
def compute_step(self, parameter, previous_step):
step_sum = tensor.sum(previous_step)
not_finite = (tensor.isnan(step_sum) +
tensor.isinf(step_sum))
step = tensor.switch(
not_finite > 0, (1 - self.scaler) * parameter, previous_step)
return step, []
class Restrict(StepRule):
"""Applies a given :class:`StepRule` only to certain variables.
Example applications include clipping steps on only certain parameters,
or scaling a certain kind of parameter's updates (e.g. adding an
additional scalar multiplier to the steps taken on convolutional
filters).
Parameters
----------
step_rule : :class:`StepRule`
The :class:`StepRule` to be applied on the given variables.
variables : iterable
A collection of Theano variables on which to apply `step_rule`.
Variables not appearing in this collection will not have
`step_rule` applied to them.
"""
def __init__(self, step_rule, variables):
self.step_rule = step_rule
self.variables = frozenset(variables)
def compute_steps(self, previous_steps):
filtered_previous_steps = dict_subset(previous_steps, self.variables)
steps, updates = self.step_rule.compute_steps(filtered_previous_steps)
actual = OrderedDict((parameter, steps[parameter])
if parameter in steps
else (parameter, previous_steps[parameter])
for parameter in previous_steps)
return actual, updates
| 36.837808 | 79 | 0.642426 |
24dec208e253c34bc6db32e687a9a080b0f99af3 | 5,219 | py | Python | neo/contrib/nex/test_withdraw.py | iNomaD/neo-python | bf27e91c041daa05b1d73c96d97a69777048f3da | [
"MIT"
] | 3 | 2018-08-07T23:05:34.000Z | 2021-01-25T03:55:18.000Z | neo/contrib/nex/test_withdraw.py | iNomaD/neo-python | bf27e91c041daa05b1d73c96d97a69777048f3da | [
"MIT"
] | null | null | null | neo/contrib/nex/test_withdraw.py | iNomaD/neo-python | bf27e91c041daa05b1d73c96d97a69777048f3da | [
"MIT"
] | 2 | 2021-01-17T23:13:49.000Z | 2021-04-03T17:13:10.000Z | from neo.Utils.WalletFixtureTestCase import WalletFixtureTestCase
from neo.Implementations.Wallets.peewee.UserWallet import UserWallet
from neo.Core.Blockchain import Blockchain
from neo.Core.TX.InvocationTransaction import InvocationTransaction
from neo.Core.TX.Transaction import ContractTransaction
from neocore.UInt160 import UInt160
from neocore.UInt256 import UInt256
from neo.Implementations.Wallets.peewee.Models import VINHold
from neo.contrib.nex.withdraw import WithdrawAll, WithdrawOne, PrintHolds,\
CleanupCompletedHolds, ShowCompletedHolds, RequestWithdrawFrom, DeleteHolds
import json
class WithdrawWalletTestCase(WalletFixtureTestCase):
FIXTURE_REMOTE_LOC = 'https://s3.us-east-2.amazonaws.com/cityofzion/fixtures/withdraw_fixture.tar.gz'
FIXTURE_FILENAME = './Chains/withdraw_fixture.tar.gz'
@classmethod
def leveldb_testpath(self):
return './withdraw_fixtures'
@classmethod
def wallet_1_path(cls):
return './fixtures/withdraw_wallet.db3'
@classmethod
def wallet_1_dest(cls):
return './withdraw_wallet.db3'
@classmethod
def wallet_1_pass(cls):
return 'testpassword'
@classmethod
def wallet_2_path(cls):
return './fixtures/withdraw_wallet2.db3'
@classmethod
def wallet_2_dest(cls):
return './withdraw_wallet2.db3'
@classmethod
def wallet_2_pass(cls):
return 'testpassword'
_wallet1 = None
_wallet2 = None
wallet_1_script_hash = UInt160(data=b')\x96S\xb5\xe3e\xcb3\xb4\xea:\xd1\xd7\xe1\xb3\xf5\xe6\x81N/')
wallet_1_addr = 'AKZmSGPD7ytJBbxpRPmobYGLNxdWH3Jiqs'
wallet_2_script_hash = UInt160(data=b'4\xd0=k\x80TF\x9e\xa8W\x83\xfa\x9eIv\x0b\x9bs\x9d\xb6')
wallet_2_addr = 'ALb8FEhEmtSqv97fuNVuoLmcmrSKckffRf'
withdraw_hash = 'c5a6485dc64174e1ced6ac041b6b591074f795e4'
@property
def GAS(self):
return Blockchain.Default().SystemCoin().Hash
@property
def NEO(self):
return Blockchain.Default().SystemShare().Hash
@classmethod
def GetWallet1(cls, recreate=False):
if cls._wallet1 is None or recreate:
cls._wallet1 = UserWallet.Open(WithdrawWalletTestCase.wallet_1_dest(), WithdrawWalletTestCase.wallet_1_pass())
return cls._wallet1
@classmethod
def GetWallet2(cls, recreate=False):
if cls._wallet2 is None or recreate:
cls._wallet2 = UserWallet.Open(WithdrawWalletTestCase.wallet_2_dest(), WithdrawWalletTestCase.wallet_2_pass())
return cls._wallet2
def test_1_initial_setup(self):
wallet = self.GetWallet1()
self.assertEqual(wallet.WalletHeight, 203437)
holds = wallet._holds
self.assertEqual(len(holds), 2)
count = 0
for item in holds: # type:VINHold
self.assertIsInstance(item, VINHold)
self.assertFalse(item.IsComplete)
self.assertIsInstance(item.OutputHash, UInt160)
self.assertIsInstance(item.InputHash, UInt160)
self.assertEqual(item.OutputHash, self.wallet_1_script_hash)
self.assertIsInstance(item.TXHash, UInt256)
if count == 0:
# item is the last one
self.assertEqual(item.Vin, bytearray(b'\x81\xae\x0bPmK\xda`OT\x0f\xf2\x95\x9b\x07\x08I]N\x1dW\x9bp\xe8\xcd\x16\n \xfbu\xaf\x17\x00'))
count += 1
completed = wallet.LoadCompletedHolds()
self.assertEqual(len(completed), 1)
completed_hold = completed[0] # type:VINHold
self.assertTrue(completed_hold.IsComplete, True)
def test_2_print(self):
wallet = self.GetWallet1()
ShowCompletedHolds(wallet)
PrintHolds(wallet)
def test_3_make_withdrawl_request(self):
wallet = self.GetWallet1()
res = RequestWithdrawFrom(wallet, 'neo', self.withdraw_hash, self.wallet_1_addr, 100)
self.assertFalse(res)
res2 = RequestWithdrawFrom(wallet, 'neo', self.withdraw_hash, self.wallet_1_addr, 1, require_password=False)
self.assertIsInstance(res2, InvocationTransaction)
self.assertEqual(res2.Hash.ToString(), '828a161d718890c7de29527f5c8c705cba1abb17bc627f76681800e78a49e200')
def test_4_withdraw_one(self):
wallet = self.GetWallet1()
res = WithdrawOne(wallet, require_password=False)
self.assertIsInstance(res, ContractTransaction)
self.assertEqual(res.Hash.ToString(), '505e0d6cc4302fb119ec21edbb40bfc17fa7dd6083586390843c0a07bea15fc8')
def test_5_withdraw_all(self):
wallet = self.GetWallet1(recreate=True)
res = WithdrawAll(wallet, require_password=False)
self.assertTrue(res)
def test_6_cleanup_holds(self):
wallet = self.GetWallet1()
res = CleanupCompletedHolds(wallet, require_password=False)
self.assertIsInstance(res, InvocationTransaction)
self.assertEqual(res.Hash.ToString(), 'aa27a2331631e7594517fed5f6388e6f3e2567a7854b4d98901c818d9f20d03e')
def test_7_delete_holds(self):
wallet = self.GetWallet1(recreate=True)
DeleteHolds(wallet, index_to_delete=-1)
wallet.LoadHolds()
self.assertEqual(wallet._holds, [])
| 30.343023 | 149 | 0.707607 |
7f0021e7c37957c8301408a1ae409bb40051faf7 | 3,018 | py | Python | sdk/python/kfp/v2/compiler/compiler_utils_test.py | ConverJens/pipelines | a1d453af214ec9eebad73fb05845dd3499d60d00 | [
"Apache-2.0"
] | 2 | 2021-03-11T14:27:12.000Z | 2021-03-11T14:27:24.000Z | sdk/python/kfp/v2/compiler/compiler_utils_test.py | ConverJens/pipelines | a1d453af214ec9eebad73fb05845dd3499d60d00 | [
"Apache-2.0"
] | 6 | 2019-08-20T18:59:22.000Z | 2019-08-21T16:42:18.000Z | sdk/python/kfp/v2/compiler/compiler_utils_test.py | ConverJens/pipelines | a1d453af214ec9eebad73fb05845dd3499d60d00 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for kfp.v2.compiler.compiler_utils."""
import unittest
from kfp.v2.compiler import compiler_utils
from kfp.pipeline_spec import pipeline_spec_pb2
from google.protobuf import json_format
from google.protobuf import message
class CompilerUtilsTest(unittest.TestCase):
def assertProtoEquals(self, proto1: message.Message, proto2: message.Message):
"""Asserts the equality between two messages."""
self.assertDictEqual(
json_format.MessageToDict(proto1),
json_format.MessageToDict(proto2))
def test_build_runtime_config_spec(self):
expected_dict = {
'gcsOutputDirectory': 'gs://path',
'parameters': {
'input1': {
'stringValue': 'test'
}
}
}
expected_spec = pipeline_spec_pb2.PipelineJob.RuntimeConfig()
json_format.ParseDict(expected_dict, expected_spec)
runtime_config = compiler_utils.build_runtime_config_spec(
'gs://path', {'input1': 'test', 'input2': None})
self.assertEqual(expected_spec, runtime_config)
def test_validate_pipeline_name(self):
compiler_utils.validate_pipeline_name('my-pipeline')
compiler_utils.validate_pipeline_name('p' * 128)
with self.assertRaisesRegex(ValueError, 'Invalid pipeline name: '):
compiler_utils.validate_pipeline_name('my_pipeline')
with self.assertRaisesRegex(ValueError, 'Invalid pipeline name: '):
compiler_utils.validate_pipeline_name('My pipeline')
with self.assertRaisesRegex(ValueError, 'Invalid pipeline name: '):
compiler_utils.validate_pipeline_name('-my-pipeline')
with self.assertRaisesRegex(ValueError, 'Invalid pipeline name: '):
compiler_utils.validate_pipeline_name('p' * 129)
def test_refactor_v2_component_success(self):
test_v2_container_spec = compiler_utils.PipelineContainerSpec(
image='my/dummy-image',
command=['python', '-m', 'my_package.my_entrypoint'],
args=['arg1', 'arg2', '--function_name', 'test_func']
)
expected_container_spec = compiler_utils.PipelineContainerSpec(
image='my/dummy-image',
command=['python', '-m', 'kfp.container.entrypoint'],
args=['--executor_input_str','{{$}}', '--function_name', 'test_func']
)
compiler_utils.refactor_v2_container_spec(test_v2_container_spec)
self.assertProtoEquals(expected_container_spec, test_v2_container_spec)
if __name__ == '__main__':
unittest.main()
| 36.361446 | 80 | 0.725646 |
397e8739aff425fb3616fd5d22a4a31742305dee | 3,091 | py | Python | remind/models.py | jscpeterson/reminders | f1ad78daff6314a697a32a0a52d5ac16aa54eeca | [
"FSFAP"
] | null | null | null | remind/models.py | jscpeterson/reminders | f1ad78daff6314a697a32a0a52d5ac16aa54eeca | [
"FSFAP"
] | null | null | null | remind/models.py | jscpeterson/reminders | f1ad78daff6314a697a32a0a52d5ac16aa54eeca | [
"FSFAP"
] | null | null | null | from django.db import models
from reminders.models import TimeStampedModel
from cases.models import Case, Motion
class Deadline(TimeStampedModel):
FFA = 0
SCHEDULING_CONFERENCE = 1
WITNESS_LIST = 2
REQUEST_PTI = 3
CONDUCT_PTI = 4
WITNESS_PTI = 5
SCIENTIFIC_EVIDENCE = 6
PRETRIAL_MOTION_FILING = 7
PRETRIAL_MOTION_RESPONSE = 8
PRETRIAL_MOTION_HEARING = 9
FINAL_WITNESS_LIST = 10
NEED_FOR_INTERPRETER = 11
PLEA_AGREEMENT = 12
CERTIFICATION_OF_READINESS = 13
PRETRIAL_CONFERENCE = 14
TRIAL = 15
TYPE_CHOICES = (
(FFA, 'FFA'),
(SCHEDULING_CONFERENCE, 'Scheduling Conference'),
(WITNESS_LIST, 'Initial Witness List'),
(REQUEST_PTI, 'PTIs Requested'),
(CONDUCT_PTI, 'PTIs Conducted'),
(WITNESS_PTI, 'Witness PTIs'),
(SCIENTIFIC_EVIDENCE, 'Scientific Evidence'),
(PRETRIAL_MOTION_FILING, 'Pretrial Motion Filing'),
(PRETRIAL_MOTION_RESPONSE, 'Pretrial Motion Response'),
(PRETRIAL_MOTION_HEARING, 'Pretrial Motion Hearing'),
(FINAL_WITNESS_LIST, 'Final Witness List'),
(NEED_FOR_INTERPRETER, 'Need for Interpreter'),
(PLEA_AGREEMENT, 'Plea Agreement'),
(CERTIFICATION_OF_READINESS, 'Certification of Readiness'),
(PRETRIAL_CONFERENCE, 'PTC/Docket Call'),
(TRIAL, 'Trial'),
)
ACTIVE = 0
COMPLETED = 1
EXPIRED = 2
STATUS_CHOICES = (
(ACTIVE, 'Active'),
(COMPLETED, 'Complete'),
(EXPIRED, 'Expired')
# TODO Add "Old" and "Inactive"
)
type = models.IntegerField(choices=TYPE_CHOICES)
status = models.IntegerField(default=ACTIVE, choices=STATUS_CHOICES)
case = models.ForeignKey(Case, on_delete=models.PROTECT)
motion = models.ForeignKey(Motion, on_delete=models.PROTECT, null=True, blank=True)
datetime = models.DateTimeField()
reminders_sent = models.IntegerField(default=0)
invalid_notice_sent = models.BooleanField(default=False)
invalid_judge_approved = models.BooleanField(default=False)
invalid_extension_filed = models.BooleanField(default=False)
def first_reminder_days(self):
return self.type
def deadline_name(self):
if self.type in [Deadline.PRETRIAL_MOTION_RESPONSE,]:
label = 'Response to {motion_title}'.format(
motion_title=self.motion.title,
)
elif self.type in [Deadline.PRETRIAL_MOTION_HEARING,]:
label = 'Hearing for {motion_title}'.format(
motion_title=self.motion.title
)
else:
label = '{deadline_simple_desc}'.format(
deadline_simple_desc=Deadline.TYPE_CHOICES[self.type][1],
)
return label
def defendant(self):
return self.case.defendant
def case_number(self):
return self.case.case_number
def judge(self):
return self.case.judge
def defense_attorney(self):
return self.case.defense_attorney
def __str__(self):
return self.TYPE_CHOICES[self.type][1]
| 32.197917 | 87 | 0.658687 |
efb1b3c431a17a275e54d9661c24c3508e266e62 | 440 | py | Python | gates/Sub.py | DrugoLebowski/nram-executor | 3abb49b3f28cc1457f246b158167f664eaf37a8e | [
"MIT"
] | 2 | 2018-03-13T19:55:25.000Z | 2020-10-24T10:02:52.000Z | gates/Sub.py | DrugoLebowski/nram-executor | 3abb49b3f28cc1457f246b158167f664eaf37a8e | [
"MIT"
] | null | null | null | gates/Sub.py | DrugoLebowski/nram-executor | 3abb49b3f28cc1457f246b158167f664eaf37a8e | [
"MIT"
] | null | null | null | import numpy as np
from numpy import tensordot, roll, transpose, stack
from gates.Gate import Gate
class Sub(Gate):
def __call__(self, M: np.array, A: np.array = None, B: np.array = None) -> (np.array, np.array):
rows = [roll(B[:], shift=shift, axis=1)
for shift in range(M.shape[1])]
B_prime = transpose(stack(rows, axis=1), axes=[0, 2, 1])
return M, tensordot(A, B_prime, axes=2)[None, ...]
| 33.846154 | 100 | 0.611364 |
fbd3d0c2b0afe09da7bc6aeb29de36cbf84cf76a | 216,520 | py | Python | msgraph-cli-extensions/v1_0/applications_v1_0/azext_applications_v1_0/vendored_sdks/applications/aio/operations/_service_principals_operations.py | thewahome/msgraph-cli | 33127d9efa23a0e5f5303c93242fbdbb73348671 | [
"MIT"
] | null | null | null | msgraph-cli-extensions/v1_0/applications_v1_0/azext_applications_v1_0/vendored_sdks/applications/aio/operations/_service_principals_operations.py | thewahome/msgraph-cli | 33127d9efa23a0e5f5303c93242fbdbb73348671 | [
"MIT"
] | 22 | 2022-03-29T22:54:37.000Z | 2022-03-29T22:55:27.000Z | msgraph-cli-extensions/v1_0/applications_v1_0/azext_applications_v1_0/vendored_sdks/applications/aio/operations/_service_principals_operations.py | thewahome/msgraph-cli | 33127d9efa23a0e5f5303c93242fbdbb73348671 | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ServicePrincipalsOperations:
"""ServicePrincipalsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~applications.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_app_role_assigned_to(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum32"]]] = None,
select: Optional[List[Union[str, "models.Enum33"]]] = None,
expand: Optional[List[str]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfAppRoleAssignment0"]:
"""Get appRoleAssignedTo from servicePrincipals.
Get appRoleAssignedTo from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum32]
:param select: Select properties to be returned.
:type select: list[str or ~applications.models.Enum33]
:param expand: Expand related entities.
:type expand: list[str]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfAppRoleAssignment0 or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfAppRoleAssignment0]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfAppRoleAssignment0"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_app_role_assigned_to.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfAppRoleAssignment0', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_app_role_assigned_to.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/appRoleAssignedTo'} # type: ignore
async def create_app_role_assigned_to(
self,
service_principal_id: str,
body: "models.MicrosoftGraphAppRoleAssignment",
**kwargs
) -> "models.MicrosoftGraphAppRoleAssignment":
"""Create new navigation property to appRoleAssignedTo for servicePrincipals.
Create new navigation property to appRoleAssignedTo for servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: New navigation property.
:type body: ~applications.models.MicrosoftGraphAppRoleAssignment
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphAppRoleAssignment, or the result of cls(response)
:rtype: ~applications.models.MicrosoftGraphAppRoleAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphAppRoleAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_app_role_assigned_to.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MicrosoftGraphAppRoleAssignment')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphAppRoleAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_app_role_assigned_to.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/appRoleAssignedTo'} # type: ignore
async def get_app_role_assigned_to(
self,
service_principal_id: str,
app_role_assignment_id: str,
select: Optional[List[Union[str, "models.Enum34"]]] = None,
expand: Optional[List[str]] = None,
**kwargs
) -> "models.MicrosoftGraphAppRoleAssignment":
"""Get appRoleAssignedTo from servicePrincipals.
Get appRoleAssignedTo from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param app_role_assignment_id: key: id of appRoleAssignment.
:type app_role_assignment_id: str
:param select: Select properties to be returned.
:type select: list[str or ~applications.models.Enum34]
:param expand: Expand related entities.
:type expand: list[str]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphAppRoleAssignment, or the result of cls(response)
:rtype: ~applications.models.MicrosoftGraphAppRoleAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphAppRoleAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_app_role_assigned_to.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
'appRoleAssignment-id': self._serialize.url("app_role_assignment_id", app_role_assignment_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphAppRoleAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_app_role_assigned_to.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/appRoleAssignedTo/{appRoleAssignment-id}'} # type: ignore
async def update_app_role_assigned_to(
self,
service_principal_id: str,
app_role_assignment_id: str,
body: "models.MicrosoftGraphAppRoleAssignment",
**kwargs
) -> None:
"""Update the navigation property appRoleAssignedTo in servicePrincipals.
Update the navigation property appRoleAssignedTo in servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param app_role_assignment_id: key: id of appRoleAssignment.
:type app_role_assignment_id: str
:param body: New navigation property values.
:type body: ~applications.models.MicrosoftGraphAppRoleAssignment
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_app_role_assigned_to.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
'appRoleAssignment-id': self._serialize.url("app_role_assignment_id", app_role_assignment_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MicrosoftGraphAppRoleAssignment')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
update_app_role_assigned_to.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/appRoleAssignedTo/{appRoleAssignment-id}'} # type: ignore
async def delete_app_role_assigned_to(
self,
service_principal_id: str,
app_role_assignment_id: str,
if_match: Optional[str] = None,
**kwargs
) -> None:
"""Delete navigation property appRoleAssignedTo for servicePrincipals.
Delete navigation property appRoleAssignedTo for servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param app_role_assignment_id: key: id of appRoleAssignment.
:type app_role_assignment_id: str
:param if_match: ETag.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.delete_app_role_assigned_to.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
'appRoleAssignment-id': self._serialize.url("app_role_assignment_id", app_role_assignment_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_app_role_assigned_to.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/appRoleAssignedTo/{appRoleAssignment-id}'} # type: ignore
def list_app_role_assignments(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum35"]]] = None,
select: Optional[List[Union[str, "models.Enum36"]]] = None,
expand: Optional[List[str]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfAppRoleAssignment1"]:
"""Get appRoleAssignments from servicePrincipals.
Get appRoleAssignments from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum35]
:param select: Select properties to be returned.
:type select: list[str or ~applications.models.Enum36]
:param expand: Expand related entities.
:type expand: list[str]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfAppRoleAssignment1 or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfAppRoleAssignment1]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfAppRoleAssignment1"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_app_role_assignments.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfAppRoleAssignment1', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_app_role_assignments.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/appRoleAssignments'} # type: ignore
async def create_app_role_assignments(
self,
service_principal_id: str,
body: "models.MicrosoftGraphAppRoleAssignment",
**kwargs
) -> "models.MicrosoftGraphAppRoleAssignment":
"""Create new navigation property to appRoleAssignments for servicePrincipals.
Create new navigation property to appRoleAssignments for servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: New navigation property.
:type body: ~applications.models.MicrosoftGraphAppRoleAssignment
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphAppRoleAssignment, or the result of cls(response)
:rtype: ~applications.models.MicrosoftGraphAppRoleAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphAppRoleAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_app_role_assignments.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MicrosoftGraphAppRoleAssignment')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphAppRoleAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_app_role_assignments.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/appRoleAssignments'} # type: ignore
async def get_app_role_assignments(
self,
service_principal_id: str,
app_role_assignment_id: str,
select: Optional[List[Union[str, "models.Enum37"]]] = None,
expand: Optional[List[str]] = None,
**kwargs
) -> "models.MicrosoftGraphAppRoleAssignment":
"""Get appRoleAssignments from servicePrincipals.
Get appRoleAssignments from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param app_role_assignment_id: key: id of appRoleAssignment.
:type app_role_assignment_id: str
:param select: Select properties to be returned.
:type select: list[str or ~applications.models.Enum37]
:param expand: Expand related entities.
:type expand: list[str]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphAppRoleAssignment, or the result of cls(response)
:rtype: ~applications.models.MicrosoftGraphAppRoleAssignment
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphAppRoleAssignment"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_app_role_assignments.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
'appRoleAssignment-id': self._serialize.url("app_role_assignment_id", app_role_assignment_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphAppRoleAssignment', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_app_role_assignments.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/appRoleAssignments/{appRoleAssignment-id}'} # type: ignore
async def update_app_role_assignments(
self,
service_principal_id: str,
app_role_assignment_id: str,
body: "models.MicrosoftGraphAppRoleAssignment",
**kwargs
) -> None:
"""Update the navigation property appRoleAssignments in servicePrincipals.
Update the navigation property appRoleAssignments in servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param app_role_assignment_id: key: id of appRoleAssignment.
:type app_role_assignment_id: str
:param body: New navigation property values.
:type body: ~applications.models.MicrosoftGraphAppRoleAssignment
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_app_role_assignments.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
'appRoleAssignment-id': self._serialize.url("app_role_assignment_id", app_role_assignment_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MicrosoftGraphAppRoleAssignment')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
update_app_role_assignments.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/appRoleAssignments/{appRoleAssignment-id}'} # type: ignore
async def delete_app_role_assignments(
self,
service_principal_id: str,
app_role_assignment_id: str,
if_match: Optional[str] = None,
**kwargs
) -> None:
"""Delete navigation property appRoleAssignments for servicePrincipals.
Delete navigation property appRoleAssignments for servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param app_role_assignment_id: key: id of appRoleAssignment.
:type app_role_assignment_id: str
:param if_match: ETag.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.delete_app_role_assignments.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
'appRoleAssignment-id': self._serialize.url("app_role_assignment_id", app_role_assignment_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_app_role_assignments.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/appRoleAssignments/{appRoleAssignment-id}'} # type: ignore
def list_claims_mapping_policies(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum38"]]] = None,
select: Optional[List[Union[str, "models.Enum39"]]] = None,
expand: Optional[List[Union[str, "models.Enum40"]]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfClaimsMappingPolicy"]:
"""Get claimsMappingPolicies from servicePrincipals.
Get claimsMappingPolicies from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum38]
:param select: Select properties to be returned.
:type select: list[str or ~applications.models.Enum39]
:param expand: Expand related entities.
:type expand: list[str or ~applications.models.Enum40]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfClaimsMappingPolicy or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfClaimsMappingPolicy]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfClaimsMappingPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_claims_mapping_policies.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfClaimsMappingPolicy', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_claims_mapping_policies.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/claimsMappingPolicies'} # type: ignore
def list_ref_claims_mapping_policies(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum41"]]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfLinksOfClaimsMappingPolicy"]:
"""Get ref of claimsMappingPolicies from servicePrincipals.
Get ref of claimsMappingPolicies from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum41]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfLinksOfClaimsMappingPolicy or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfLinksOfClaimsMappingPolicy]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfLinksOfClaimsMappingPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_ref_claims_mapping_policies.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfLinksOfClaimsMappingPolicy', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_ref_claims_mapping_policies.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/claimsMappingPolicies/$ref'} # type: ignore
async def create_ref_claims_mapping_policies(
self,
service_principal_id: str,
body: Dict[str, object],
**kwargs
) -> Dict[str, object]:
"""Create new navigation property ref to claimsMappingPolicies for servicePrincipals.
Create new navigation property ref to claimsMappingPolicies for servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: New navigation property ref value.
:type body: dict[str, object]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: dict mapping str to object, or the result of cls(response)
:rtype: dict[str, object]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, object]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_ref_claims_mapping_policies.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, '{object}')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('{object}', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_ref_claims_mapping_policies.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/claimsMappingPolicies/$ref'} # type: ignore
def list_created_objects(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum42"]]] = None,
select: Optional[List[Union[str, "models.Enum43"]]] = None,
expand: Optional[List[str]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfDirectoryObject0"]:
"""Get createdObjects from servicePrincipals.
Get createdObjects from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum42]
:param select: Select properties to be returned.
:type select: list[str or ~applications.models.Enum43]
:param expand: Expand related entities.
:type expand: list[str]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfDirectoryObject0 or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfDirectoryObject0]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfDirectoryObject0"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_created_objects.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfDirectoryObject0', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_created_objects.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/createdObjects'} # type: ignore
def list_ref_created_objects(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum44"]]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfLinksOfDirectoryObject0"]:
"""Get ref of createdObjects from servicePrincipals.
Get ref of createdObjects from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum44]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfLinksOfDirectoryObject0 or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfLinksOfDirectoryObject0]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfLinksOfDirectoryObject0"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_ref_created_objects.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfLinksOfDirectoryObject0', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_ref_created_objects.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/createdObjects/$ref'} # type: ignore
async def create_ref_created_objects(
self,
service_principal_id: str,
body: Dict[str, object],
**kwargs
) -> Dict[str, object]:
"""Create new navigation property ref to createdObjects for servicePrincipals.
Create new navigation property ref to createdObjects for servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: New navigation property ref value.
:type body: dict[str, object]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: dict mapping str to object, or the result of cls(response)
:rtype: dict[str, object]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, object]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_ref_created_objects.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, '{object}')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('{object}', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_ref_created_objects.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/createdObjects/$ref'} # type: ignore
def list_endpoints(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum45"]]] = None,
select: Optional[List[Union[str, "models.Enum46"]]] = None,
expand: Optional[List[str]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfEndpoint"]:
"""Get endpoints from servicePrincipals.
Get endpoints from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum45]
:param select: Select properties to be returned.
:type select: list[str or ~applications.models.Enum46]
:param expand: Expand related entities.
:type expand: list[str]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfEndpoint or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfEndpoint]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfEndpoint"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_endpoints.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfEndpoint', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_endpoints.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/endpoints'} # type: ignore
async def create_endpoints(
self,
service_principal_id: str,
body: "models.MicrosoftGraphEndpoint",
**kwargs
) -> "models.MicrosoftGraphEndpoint":
"""Create new navigation property to endpoints for servicePrincipals.
Create new navigation property to endpoints for servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: New navigation property.
:type body: ~applications.models.MicrosoftGraphEndpoint
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphEndpoint, or the result of cls(response)
:rtype: ~applications.models.MicrosoftGraphEndpoint
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphEndpoint"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_endpoints.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MicrosoftGraphEndpoint')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphEndpoint', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_endpoints.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/endpoints'} # type: ignore
async def get_endpoints(
self,
service_principal_id: str,
endpoint_id: str,
select: Optional[List[Union[str, "models.Enum47"]]] = None,
expand: Optional[List[str]] = None,
**kwargs
) -> "models.MicrosoftGraphEndpoint":
"""Get endpoints from servicePrincipals.
Get endpoints from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param endpoint_id: key: id of endpoint.
:type endpoint_id: str
:param select: Select properties to be returned.
:type select: list[str or ~applications.models.Enum47]
:param expand: Expand related entities.
:type expand: list[str]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphEndpoint, or the result of cls(response)
:rtype: ~applications.models.MicrosoftGraphEndpoint
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphEndpoint"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.get_endpoints.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
'endpoint-id': self._serialize.url("endpoint_id", endpoint_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphEndpoint', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_endpoints.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/endpoints/{endpoint-id}'} # type: ignore
async def update_endpoints(
self,
service_principal_id: str,
endpoint_id: str,
body: "models.MicrosoftGraphEndpoint",
**kwargs
) -> None:
"""Update the navigation property endpoints in servicePrincipals.
Update the navigation property endpoints in servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param endpoint_id: key: id of endpoint.
:type endpoint_id: str
:param body: New navigation property values.
:type body: ~applications.models.MicrosoftGraphEndpoint
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_endpoints.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
'endpoint-id': self._serialize.url("endpoint_id", endpoint_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'MicrosoftGraphEndpoint')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
update_endpoints.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/endpoints/{endpoint-id}'} # type: ignore
async def delete_endpoints(
self,
service_principal_id: str,
endpoint_id: str,
if_match: Optional[str] = None,
**kwargs
) -> None:
"""Delete navigation property endpoints for servicePrincipals.
Delete navigation property endpoints for servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param endpoint_id: key: id of endpoint.
:type endpoint_id: str
:param if_match: ETag.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.delete_endpoints.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
'endpoint-id': self._serialize.url("endpoint_id", endpoint_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_endpoints.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/endpoints/{endpoint-id}'} # type: ignore
def list_home_realm_discovery_policies(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum48"]]] = None,
select: Optional[List[Union[str, "models.Enum49"]]] = None,
expand: Optional[List[Union[str, "models.Enum50"]]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfHomeRealmDiscoveryPolicy0"]:
"""Get homeRealmDiscoveryPolicies from servicePrincipals.
Get homeRealmDiscoveryPolicies from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum48]
:param select: Select properties to be returned.
:type select: list[str or ~applications.models.Enum49]
:param expand: Expand related entities.
:type expand: list[str or ~applications.models.Enum50]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfHomeRealmDiscoveryPolicy0 or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfHomeRealmDiscoveryPolicy0]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfHomeRealmDiscoveryPolicy0"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_home_realm_discovery_policies.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfHomeRealmDiscoveryPolicy0', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_home_realm_discovery_policies.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/homeRealmDiscoveryPolicies'} # type: ignore
def list_ref_home_realm_discovery_policies(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum51"]]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfLinksOfHomeRealmDiscoveryPolicy0"]:
"""Get ref of homeRealmDiscoveryPolicies from servicePrincipals.
Get ref of homeRealmDiscoveryPolicies from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum51]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfLinksOfHomeRealmDiscoveryPolicy0 or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfLinksOfHomeRealmDiscoveryPolicy0]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfLinksOfHomeRealmDiscoveryPolicy0"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_ref_home_realm_discovery_policies.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfLinksOfHomeRealmDiscoveryPolicy0', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_ref_home_realm_discovery_policies.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/homeRealmDiscoveryPolicies/$ref'} # type: ignore
async def create_ref_home_realm_discovery_policies(
self,
service_principal_id: str,
body: Dict[str, object],
**kwargs
) -> Dict[str, object]:
"""Create new navigation property ref to homeRealmDiscoveryPolicies for servicePrincipals.
Create new navigation property ref to homeRealmDiscoveryPolicies for servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: New navigation property ref value.
:type body: dict[str, object]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: dict mapping str to object, or the result of cls(response)
:rtype: dict[str, object]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, object]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_ref_home_realm_discovery_policies.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, '{object}')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('{object}', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_ref_home_realm_discovery_policies.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/homeRealmDiscoveryPolicies/$ref'} # type: ignore
def list_member_of(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum52"]]] = None,
select: Optional[List[Union[str, "models.Enum53"]]] = None,
expand: Optional[List[str]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfDirectoryObject1"]:
"""Get memberOf from servicePrincipals.
Get memberOf from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum52]
:param select: Select properties to be returned.
:type select: list[str or ~applications.models.Enum53]
:param expand: Expand related entities.
:type expand: list[str]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfDirectoryObject1 or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfDirectoryObject1]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfDirectoryObject1"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_member_of.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfDirectoryObject1', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_member_of.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/memberOf'} # type: ignore
def list_ref_member_of(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum54"]]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfLinksOfDirectoryObject1"]:
"""Get ref of memberOf from servicePrincipals.
Get ref of memberOf from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum54]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfLinksOfDirectoryObject1 or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfLinksOfDirectoryObject1]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfLinksOfDirectoryObject1"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_ref_member_of.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfLinksOfDirectoryObject1', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_ref_member_of.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/memberOf/$ref'} # type: ignore
async def create_ref_member_of(
self,
service_principal_id: str,
body: Dict[str, object],
**kwargs
) -> Dict[str, object]:
"""Create new navigation property ref to memberOf for servicePrincipals.
Create new navigation property ref to memberOf for servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: New navigation property ref value.
:type body: dict[str, object]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: dict mapping str to object, or the result of cls(response)
:rtype: dict[str, object]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, object]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_ref_member_of.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, '{object}')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('{object}', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_ref_member_of.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/memberOf/$ref'} # type: ignore
async def add_key(
self,
service_principal_id: str,
body: "models.PathsN3Fx9GServiceprincipalsServiceprincipalIdMicrosoftGraphAddkeyPostRequestbodyContentApplicationJsonSchema",
**kwargs
) -> "models.MicrosoftGraphKeyCredential":
"""Invoke action addKey.
Invoke action addKey.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: Action parameters.
:type body: ~applications.models.PathsN3Fx9GServiceprincipalsServiceprincipalIdMicrosoftGraphAddkeyPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphKeyCredential, or the result of cls(response)
:rtype: ~applications.models.MicrosoftGraphKeyCredential
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphKeyCredential"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.add_key.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'PathsN3Fx9GServiceprincipalsServiceprincipalIdMicrosoftGraphAddkeyPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphKeyCredential', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
add_key.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/microsoft.graph.addKey'} # type: ignore
async def add_password(
self,
service_principal_id: str,
body: "models.PathsIeboplServiceprincipalsServiceprincipalIdMicrosoftGraphAddpasswordPostRequestbodyContentApplicationJsonSchema",
**kwargs
) -> "models.MicrosoftGraphPasswordCredential":
"""Invoke action addPassword.
Invoke action addPassword.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: Action parameters.
:type body: ~applications.models.PathsIeboplServiceprincipalsServiceprincipalIdMicrosoftGraphAddpasswordPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphPasswordCredential, or the result of cls(response)
:rtype: ~applications.models.MicrosoftGraphPasswordCredential
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphPasswordCredential"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.add_password.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'PathsIeboplServiceprincipalsServiceprincipalIdMicrosoftGraphAddpasswordPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphPasswordCredential', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
add_password.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/microsoft.graph.addPassword'} # type: ignore
async def check_member_groups(
self,
service_principal_id: str,
body: "models.PathsO5Kx2YServiceprincipalsServiceprincipalIdMicrosoftGraphCheckmembergroupsPostRequestbodyContentApplicationJsonSchema",
**kwargs
) -> List[str]:
"""Invoke action checkMemberGroups.
Invoke action checkMemberGroups.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: Action parameters.
:type body: ~applications.models.PathsO5Kx2YServiceprincipalsServiceprincipalIdMicrosoftGraphCheckmembergroupsPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of str, or the result of cls(response)
:rtype: list[str]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.check_member_groups.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'PathsO5Kx2YServiceprincipalsServiceprincipalIdMicrosoftGraphCheckmembergroupsPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[str]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_member_groups.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/microsoft.graph.checkMemberGroups'} # type: ignore
async def check_member_objects(
self,
service_principal_id: str,
body: "models.Paths1Ffhl47ServiceprincipalsServiceprincipalIdMicrosoftGraphCheckmemberobjectsPostRequestbodyContentApplicationJsonSchema",
**kwargs
) -> List[str]:
"""Invoke action checkMemberObjects.
Invoke action checkMemberObjects.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: Action parameters.
:type body: ~applications.models.Paths1Ffhl47ServiceprincipalsServiceprincipalIdMicrosoftGraphCheckmemberobjectsPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of str, or the result of cls(response)
:rtype: list[str]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.check_member_objects.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'Paths1Ffhl47ServiceprincipalsServiceprincipalIdMicrosoftGraphCheckmemberobjectsPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[str]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_member_objects.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/microsoft.graph.checkMemberObjects'} # type: ignore
async def get_member_groups(
self,
service_principal_id: str,
body: "models.Paths1850388ServiceprincipalsServiceprincipalIdMicrosoftGraphGetmembergroupsPostRequestbodyContentApplicationJsonSchema",
**kwargs
) -> List[str]:
"""Invoke action getMemberGroups.
Invoke action getMemberGroups.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: Action parameters.
:type body: ~applications.models.Paths1850388ServiceprincipalsServiceprincipalIdMicrosoftGraphGetmembergroupsPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of str, or the result of cls(response)
:rtype: list[str]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.get_member_groups.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'Paths1850388ServiceprincipalsServiceprincipalIdMicrosoftGraphGetmembergroupsPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[str]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_member_groups.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/microsoft.graph.getMemberGroups'} # type: ignore
async def get_member_objects(
self,
service_principal_id: str,
body: "models.Paths1Md6PmhServiceprincipalsServiceprincipalIdMicrosoftGraphGetmemberobjectsPostRequestbodyContentApplicationJsonSchema",
**kwargs
) -> List[str]:
"""Invoke action getMemberObjects.
Invoke action getMemberObjects.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: Action parameters.
:type body: ~applications.models.Paths1Md6PmhServiceprincipalsServiceprincipalIdMicrosoftGraphGetmemberobjectsPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of str, or the result of cls(response)
:rtype: list[str]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.get_member_objects.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'Paths1Md6PmhServiceprincipalsServiceprincipalIdMicrosoftGraphGetmemberobjectsPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[str]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_member_objects.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/microsoft.graph.getMemberObjects'} # type: ignore
async def remove_key(
self,
service_principal_id: str,
body: "models.Paths1UhuhlbServiceprincipalsServiceprincipalIdMicrosoftGraphRemovekeyPostRequestbodyContentApplicationJsonSchema",
**kwargs
) -> None:
"""Invoke action removeKey.
Invoke action removeKey.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: Action parameters.
:type body: ~applications.models.Paths1UhuhlbServiceprincipalsServiceprincipalIdMicrosoftGraphRemovekeyPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.remove_key.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'Paths1UhuhlbServiceprincipalsServiceprincipalIdMicrosoftGraphRemovekeyPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
remove_key.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/microsoft.graph.removeKey'} # type: ignore
async def remove_password(
self,
service_principal_id: str,
body: "models.Paths1Idoj4GServiceprincipalsServiceprincipalIdMicrosoftGraphRemovepasswordPostRequestbodyContentApplicationJsonSchema",
**kwargs
) -> None:
"""Invoke action removePassword.
Invoke action removePassword.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: Action parameters.
:type body: ~applications.models.Paths1Idoj4GServiceprincipalsServiceprincipalIdMicrosoftGraphRemovepasswordPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.remove_password.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'Paths1Idoj4GServiceprincipalsServiceprincipalIdMicrosoftGraphRemovepasswordPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
remove_password.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/microsoft.graph.removePassword'} # type: ignore
async def restore(
self,
service_principal_id: str,
**kwargs
) -> "models.MicrosoftGraphDirectoryObject":
"""Invoke action restore.
Invoke action restore.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MicrosoftGraphDirectoryObject, or the result of cls(response)
:rtype: ~applications.models.MicrosoftGraphDirectoryObject
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.MicrosoftGraphDirectoryObject"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.restore.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('MicrosoftGraphDirectoryObject', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
restore.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/microsoft.graph.restore'} # type: ignore
def list_oauth2_permission_grants(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum55"]]] = None,
select: Optional[List[Union[str, "models.Enum56"]]] = None,
expand: Optional[List[str]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfOAuth2PermissionGrant"]:
"""Get oauth2PermissionGrants from servicePrincipals.
Get oauth2PermissionGrants from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum55]
:param select: Select properties to be returned.
:type select: list[str or ~applications.models.Enum56]
:param expand: Expand related entities.
:type expand: list[str]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfOAuth2PermissionGrant or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfOAuth2PermissionGrant]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfOAuth2PermissionGrant"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_oauth2_permission_grants.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfOAuth2PermissionGrant', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_oauth2_permission_grants.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/oauth2PermissionGrants'} # type: ignore
def list_ref_oauth2_permission_grants(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum57"]]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfLinksOfOAuth2PermissionGrant"]:
"""Get ref of oauth2PermissionGrants from servicePrincipals.
Get ref of oauth2PermissionGrants from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum57]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfLinksOfOAuth2PermissionGrant or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfLinksOfOAuth2PermissionGrant]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfLinksOfOAuth2PermissionGrant"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_ref_oauth2_permission_grants.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfLinksOfOAuth2PermissionGrant', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_ref_oauth2_permission_grants.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/oauth2PermissionGrants/$ref'} # type: ignore
async def create_ref_oauth2_permission_grants(
self,
service_principal_id: str,
body: Dict[str, object],
**kwargs
) -> Dict[str, object]:
"""Create new navigation property ref to oauth2PermissionGrants for servicePrincipals.
Create new navigation property ref to oauth2PermissionGrants for servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: New navigation property ref value.
:type body: dict[str, object]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: dict mapping str to object, or the result of cls(response)
:rtype: dict[str, object]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, object]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_ref_oauth2_permission_grants.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, '{object}')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('{object}', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_ref_oauth2_permission_grants.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/oauth2PermissionGrants/$ref'} # type: ignore
def list_owned_objects(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum58"]]] = None,
select: Optional[List[Union[str, "models.Enum59"]]] = None,
expand: Optional[List[str]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfDirectoryObject2"]:
"""Get ownedObjects from servicePrincipals.
Get ownedObjects from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum58]
:param select: Select properties to be returned.
:type select: list[str or ~applications.models.Enum59]
:param expand: Expand related entities.
:type expand: list[str]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfDirectoryObject2 or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfDirectoryObject2]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfDirectoryObject2"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_owned_objects.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfDirectoryObject2', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_owned_objects.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/ownedObjects'} # type: ignore
def list_ref_owned_objects(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum60"]]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfLinksOfDirectoryObject2"]:
"""Get ref of ownedObjects from servicePrincipals.
Get ref of ownedObjects from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum60]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfLinksOfDirectoryObject2 or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfLinksOfDirectoryObject2]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfLinksOfDirectoryObject2"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_ref_owned_objects.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfLinksOfDirectoryObject2', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_ref_owned_objects.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/ownedObjects/$ref'} # type: ignore
async def create_ref_owned_objects(
self,
service_principal_id: str,
body: Dict[str, object],
**kwargs
) -> Dict[str, object]:
"""Create new navigation property ref to ownedObjects for servicePrincipals.
Create new navigation property ref to ownedObjects for servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: New navigation property ref value.
:type body: dict[str, object]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: dict mapping str to object, or the result of cls(response)
:rtype: dict[str, object]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, object]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_ref_owned_objects.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, '{object}')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('{object}', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_ref_owned_objects.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/ownedObjects/$ref'} # type: ignore
def list_owners(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum61"]]] = None,
select: Optional[List[Union[str, "models.Enum62"]]] = None,
expand: Optional[List[str]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfDirectoryObject3"]:
"""Get owners from servicePrincipals.
Get owners from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum61]
:param select: Select properties to be returned.
:type select: list[str or ~applications.models.Enum62]
:param expand: Expand related entities.
:type expand: list[str]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfDirectoryObject3 or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfDirectoryObject3]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfDirectoryObject3"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_owners.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfDirectoryObject3', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_owners.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/owners'} # type: ignore
def list_ref_owners(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum63"]]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfLinksOfDirectoryObject3"]:
"""Get ref of owners from servicePrincipals.
Get ref of owners from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum63]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfLinksOfDirectoryObject3 or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfLinksOfDirectoryObject3]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfLinksOfDirectoryObject3"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_ref_owners.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfLinksOfDirectoryObject3', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_ref_owners.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/owners/$ref'} # type: ignore
async def create_ref_owners(
self,
service_principal_id: str,
body: Dict[str, object],
**kwargs
) -> Dict[str, object]:
"""Create new navigation property ref to owners for servicePrincipals.
Create new navigation property ref to owners for servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: New navigation property ref value.
:type body: dict[str, object]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: dict mapping str to object, or the result of cls(response)
:rtype: dict[str, object]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, object]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_ref_owners.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, '{object}')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('{object}', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_ref_owners.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/owners/$ref'} # type: ignore
def list_token_issuance_policies(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum64"]]] = None,
select: Optional[List[Union[str, "models.Enum65"]]] = None,
expand: Optional[List[Union[str, "models.Enum66"]]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfTokenIssuancePolicy0"]:
"""Get tokenIssuancePolicies from servicePrincipals.
Get tokenIssuancePolicies from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum64]
:param select: Select properties to be returned.
:type select: list[str or ~applications.models.Enum65]
:param expand: Expand related entities.
:type expand: list[str or ~applications.models.Enum66]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfTokenIssuancePolicy0 or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfTokenIssuancePolicy0]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfTokenIssuancePolicy0"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_token_issuance_policies.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfTokenIssuancePolicy0', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_token_issuance_policies.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/tokenIssuancePolicies'} # type: ignore
def list_ref_token_issuance_policies(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum67"]]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfLinksOfTokenIssuancePolicy0"]:
"""Get ref of tokenIssuancePolicies from servicePrincipals.
Get ref of tokenIssuancePolicies from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum67]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfLinksOfTokenIssuancePolicy0 or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfLinksOfTokenIssuancePolicy0]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfLinksOfTokenIssuancePolicy0"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_ref_token_issuance_policies.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfLinksOfTokenIssuancePolicy0', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_ref_token_issuance_policies.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/tokenIssuancePolicies/$ref'} # type: ignore
async def create_ref_token_issuance_policies(
self,
service_principal_id: str,
body: Dict[str, object],
**kwargs
) -> Dict[str, object]:
"""Create new navigation property ref to tokenIssuancePolicies for servicePrincipals.
Create new navigation property ref to tokenIssuancePolicies for servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: New navigation property ref value.
:type body: dict[str, object]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: dict mapping str to object, or the result of cls(response)
:rtype: dict[str, object]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, object]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_ref_token_issuance_policies.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, '{object}')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('{object}', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_ref_token_issuance_policies.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/tokenIssuancePolicies/$ref'} # type: ignore
def list_token_lifetime_policies(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum68"]]] = None,
select: Optional[List[Union[str, "models.Enum69"]]] = None,
expand: Optional[List[Union[str, "models.Enum70"]]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfTokenLifetimePolicy0"]:
"""Get tokenLifetimePolicies from servicePrincipals.
Get tokenLifetimePolicies from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum68]
:param select: Select properties to be returned.
:type select: list[str or ~applications.models.Enum69]
:param expand: Expand related entities.
:type expand: list[str or ~applications.models.Enum70]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfTokenLifetimePolicy0 or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfTokenLifetimePolicy0]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfTokenLifetimePolicy0"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_token_lifetime_policies.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfTokenLifetimePolicy0', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_token_lifetime_policies.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/tokenLifetimePolicies'} # type: ignore
def list_ref_token_lifetime_policies(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum71"]]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfLinksOfTokenLifetimePolicy0"]:
"""Get ref of tokenLifetimePolicies from servicePrincipals.
Get ref of tokenLifetimePolicies from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum71]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfLinksOfTokenLifetimePolicy0 or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfLinksOfTokenLifetimePolicy0]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfLinksOfTokenLifetimePolicy0"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_ref_token_lifetime_policies.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfLinksOfTokenLifetimePolicy0', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_ref_token_lifetime_policies.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/tokenLifetimePolicies/$ref'} # type: ignore
async def create_ref_token_lifetime_policies(
self,
service_principal_id: str,
body: Dict[str, object],
**kwargs
) -> Dict[str, object]:
"""Create new navigation property ref to tokenLifetimePolicies for servicePrincipals.
Create new navigation property ref to tokenLifetimePolicies for servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: New navigation property ref value.
:type body: dict[str, object]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: dict mapping str to object, or the result of cls(response)
:rtype: dict[str, object]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, object]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_ref_token_lifetime_policies.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, '{object}')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('{object}', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_ref_token_lifetime_policies.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/tokenLifetimePolicies/$ref'} # type: ignore
def list_transitive_member_of(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum72"]]] = None,
select: Optional[List[Union[str, "models.Enum73"]]] = None,
expand: Optional[List[str]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfDirectoryObject4"]:
"""Get transitiveMemberOf from servicePrincipals.
Get transitiveMemberOf from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum72]
:param select: Select properties to be returned.
:type select: list[str or ~applications.models.Enum73]
:param expand: Expand related entities.
:type expand: list[str]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfDirectoryObject4 or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfDirectoryObject4]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfDirectoryObject4"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_transitive_member_of.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
if select is not None:
query_parameters['$select'] = self._serialize.query("select", select, '[str]', div=',')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfDirectoryObject4', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_transitive_member_of.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/transitiveMemberOf'} # type: ignore
def list_ref_transitive_member_of(
self,
service_principal_id: str,
orderby: Optional[List[Union[str, "models.Enum74"]]] = None,
**kwargs
) -> AsyncIterable["models.CollectionOfLinksOfDirectoryObject4"]:
"""Get ref of transitiveMemberOf from servicePrincipals.
Get ref of transitiveMemberOf from servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param orderby: Order items by property values.
:type orderby: list[str or ~applications.models.Enum74]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CollectionOfLinksOfDirectoryObject4 or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~applications.models.CollectionOfLinksOfDirectoryObject4]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CollectionOfLinksOfDirectoryObject4"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_ref_transitive_member_of.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if self._config.top is not None:
query_parameters['$top'] = self._serialize.query("self._config.top", self._config.top, 'int', minimum=0)
if self._config.skip is not None:
query_parameters['$skip'] = self._serialize.query("self._config.skip", self._config.skip, 'int', minimum=0)
if self._config.search is not None:
query_parameters['$search'] = self._serialize.query("self._config.search", self._config.search, 'str')
if self._config.filter is not None:
query_parameters['$filter'] = self._serialize.query("self._config.filter", self._config.filter, 'str')
if self._config.count is not None:
query_parameters['$count'] = self._serialize.query("self._config.count", self._config.count, 'bool')
if orderby is not None:
query_parameters['$orderby'] = self._serialize.query("orderby", orderby, '[str]', div=',')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('CollectionOfLinksOfDirectoryObject4', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.odata_next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.OdataError, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_ref_transitive_member_of.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/transitiveMemberOf/$ref'} # type: ignore
async def create_ref_transitive_member_of(
self,
service_principal_id: str,
body: Dict[str, object],
**kwargs
) -> Dict[str, object]:
"""Create new navigation property ref to transitiveMemberOf for servicePrincipals.
Create new navigation property ref to transitiveMemberOf for servicePrincipals.
:param service_principal_id: key: id of servicePrincipal.
:type service_principal_id: str
:param body: New navigation property ref value.
:type body: dict[str, object]
:keyword callable cls: A custom type or function that will be passed the direct response
:return: dict mapping str to object, or the result of cls(response)
:rtype: dict[str, object]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Dict[str, object]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_ref_transitive_member_of.metadata['url'] # type: ignore
path_format_arguments = {
'servicePrincipal-id': self._serialize.url("service_principal_id", service_principal_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, '{object}')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('{object}', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_ref_transitive_member_of.metadata = {'url': '/servicePrincipals/{servicePrincipal-id}/transitiveMemberOf/$ref'} # type: ignore
async def delta(
self,
**kwargs
) -> List["models.MicrosoftGraphServicePrincipal"]:
"""Invoke function delta.
Invoke function delta.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of MicrosoftGraphServicePrincipal, or the result of cls(response)
:rtype: list[~applications.models.MicrosoftGraphServicePrincipal]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["models.MicrosoftGraphServicePrincipal"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
accept = "application/json"
# Construct URL
url = self.delta.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[MicrosoftGraphServicePrincipal]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
delta.metadata = {'url': '/servicePrincipals/microsoft.graph.delta()'} # type: ignore
async def get_available_extension_properties(
self,
body: "models.PathsGo2T4HServiceprincipalsMicrosoftGraphGetavailableextensionpropertiesPostRequestbodyContentApplicationJsonSchema",
**kwargs
) -> List["models.MicrosoftGraphExtensionProperty"]:
"""Invoke action getAvailableExtensionProperties.
Invoke action getAvailableExtensionProperties.
:param body: Action parameters.
:type body: ~applications.models.PathsGo2T4HServiceprincipalsMicrosoftGraphGetavailableextensionpropertiesPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of MicrosoftGraphExtensionProperty, or the result of cls(response)
:rtype: list[~applications.models.MicrosoftGraphExtensionProperty]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["models.MicrosoftGraphExtensionProperty"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.get_available_extension_properties.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'PathsGo2T4HServiceprincipalsMicrosoftGraphGetavailableextensionpropertiesPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[MicrosoftGraphExtensionProperty]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_available_extension_properties.metadata = {'url': '/servicePrincipals/microsoft.graph.getAvailableExtensionProperties'} # type: ignore
async def get_by_ids(
self,
body: "models.Paths15YkyvsServiceprincipalsMicrosoftGraphGetbyidsPostRequestbodyContentApplicationJsonSchema",
**kwargs
) -> List["models.MicrosoftGraphDirectoryObject"]:
"""Invoke action getByIds.
Invoke action getByIds.
:param body: Action parameters.
:type body: ~applications.models.Paths15YkyvsServiceprincipalsMicrosoftGraphGetbyidsPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of MicrosoftGraphDirectoryObject, or the result of cls(response)
:rtype: list[~applications.models.MicrosoftGraphDirectoryObject]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["models.MicrosoftGraphDirectoryObject"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.get_by_ids.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'Paths15YkyvsServiceprincipalsMicrosoftGraphGetbyidsPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[MicrosoftGraphDirectoryObject]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_by_ids.metadata = {'url': '/servicePrincipals/microsoft.graph.getByIds'} # type: ignore
async def validate_properties(
self,
body: "models.PathsYq15M4ServiceprincipalsMicrosoftGraphValidatepropertiesPostRequestbodyContentApplicationJsonSchema",
**kwargs
) -> None:
"""Invoke action validateProperties.
Invoke action validateProperties.
:param body: Action parameters.
:type body: ~applications.models.PathsYq15M4ServiceprincipalsMicrosoftGraphValidatepropertiesPostRequestbodyContentApplicationJsonSchema
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.validate_properties.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(body, 'PathsYq15M4ServiceprincipalsMicrosoftGraphValidatepropertiesPostRequestbodyContentApplicationJsonSchema')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.OdataError, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
validate_properties.metadata = {'url': '/servicePrincipals/microsoft.graph.validateProperties'} # type: ignore
| 50.826291 | 175 | 0.656466 |
31f577c112ff35e98c9e574a562888c04ebef78a | 3,409 | py | Python | barbeque/staticfiles/middleware.py | moccu/barbeque | 6f18ef67162763bb9c33f73ad6af7f20e2d08b86 | [
"BSD-3-Clause"
] | 5 | 2015-04-15T08:56:14.000Z | 2020-04-10T00:02:41.000Z | barbeque/staticfiles/middleware.py | moccu/barbeque | 6f18ef67162763bb9c33f73ad6af7f20e2d08b86 | [
"BSD-3-Clause"
] | 22 | 2015-03-11T13:38:44.000Z | 2018-12-04T17:01:23.000Z | barbeque/staticfiles/middleware.py | moccu/barbeque | 6f18ef67162763bb9c33f73ad6af7f20e2d08b86 | [
"BSD-3-Clause"
] | 3 | 2015-10-01T13:20:08.000Z | 2018-04-18T09:04:14.000Z | """ServeStaticFileMiddleware facilitates serving static files on docker.
When serving static files with docker we first serve them through Django,
it happens only for the first time a static file is requested,
then static files are cached by nginx.
Another function of the middleware is to maps static files to their hashed names,
so it is possible to reduce static files to just files with hashed names
(without keeping the original duplicates).
"""
import re
from django.conf import settings
from django.http.response import Http404
from django.views.static import serve
from django.utils.module_loading import import_string
class ServeStaticFileMiddleware(object):
def __init__(self, get_response=None):
self.get_response = get_response
self.path_regex = re.compile(
r'^/{0}(.*)$'.format(settings.STATIC_URL.strip('/')))
self.manifest = self.load_staticfiles_manifest()
def __call__(self, request):
response = self.get_response(request)
return self.process_response(request, response)
def serve_response(self, request, file_path):
return serve(request, file_path, document_root=settings.STATIC_ROOT)
def load_staticfiles_manifest(self):
"""Staticfiles manifest maps original names to names with hash.
The method will reise if project storage does not implement load_manifest.
"""
storage_module = import_string(settings.STATICFILES_STORAGE)
storage = storage_module()
return storage.load_manifest()
def unhash_file_name(self, requested_path):
"""Returns file original name (without hash),
which is a key in staticfiles manifest
"""
result = re.search(r'(.+)(\.[0-9a-f]{12})(\.?)(\w+)?$', requested_path)
if result:
result_str = '{}{}{}'.format(
result.group(1) or '',
result.group(3) or '',
result.group(4) or ''
)
return result_str
else:
return requested_path
def find_requested_file(self, requested_path):
"""Returns path to existing file (file path with current hash)"""
# manifest = self.load_staticfiles_manifest()
if self.manifest is None or len(self.manifest) == 0:
return None
file_name = self.unhash_file_name(requested_path).strip('/')
try:
return self.manifest[file_name]
except KeyError:
return None
def process_response(self, request, response):
if not is_static_request(request, response):
return response
path = self.path_regex.match(request.path)
if not path:
return response
# Try to serve a file with original name from request
try:
return self.serve_response(request, path.group(1))
except Http404:
pass
# Map requested file to hash and try to serve file with hash
requested_path = self.find_requested_file(path.group(1))
if requested_path is None:
return response
try:
return self.serve_response(request, requested_path)
except Http404:
pass
return response
def is_static_request(request, response):
return all([
request.path.startswith(settings.STATIC_URL),
response.status_code in [301, 404],
])
| 33.752475 | 82 | 0.654444 |
aab52ff5307fb4dee5350f205e019e6650f97996 | 5,298 | py | Python | tibiawikisql/models/spell.py | Galarzaa90/tibiawiki-sql | d1683320661ec3272666c846bdbeed851db1b26c | [
"Apache-2.0"
] | 19 | 2017-10-31T12:24:37.000Z | 2022-01-25T14:16:38.000Z | tibiawikisql/models/spell.py | Galarzaa90/tibiawiki-sql | d1683320661ec3272666c846bdbeed851db1b26c | [
"Apache-2.0"
] | 29 | 2017-11-01T21:01:57.000Z | 2021-08-17T23:47:51.000Z | tibiawikisql/models/spell.py | Galarzaa90/tibiawiki-sql | d1683320661ec3272666c846bdbeed851db1b26c | [
"Apache-2.0"
] | 7 | 2017-11-01T20:47:06.000Z | 2021-01-16T23:37:26.000Z | # Copyright 2021 Allan Galarza
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tibiawikisql import schema
from tibiawikisql.models import NpcSpell, abc
from tibiawikisql.utils import clean_links, parse_boolean, parse_integer
class Spell(abc.Row, abc.Parseable, table=schema.Spell):
"""Represents a Spell.
Attributes
----------
article_id: :class:`int`
The id of the containing article.
title: :class:`str`
The title of the containing article.
timestamp: :class:`int`
The last time the containing article was edited.
name: :class:`str`
The name of the spell.
words: :class:`str`
The spell's invocation words.
effect: :class:`str`
The effects of casting the spell.
type: :class:`str`
The spell's type.
group_spell: :class:`str`
The spell's group.
group_secondary: :class:`str`
The spell's secondary group.
group_rune: :class:`str`
The group of the rune created by this spell.
element: :class:`str`
The element of the damage made by the spell.
mana: :class:`int`
The mana cost of the spell
soul: :class:`int`
The soul cost of the spell.
price: :class:`int`
The gold cost of the spell.
cooldown: :class:`int`
The spell's individual cooldown in seconds.
cooldown_group: :class:`int`
The spell's group cooldown in seconds. The time you have to wait before casting another spell in the same group.
cooldown_group_secondary: :class:`int`
The spell's secondary group cooldown.
level: :class:`int`
The level required to use the spell.
premium: :class:`bool`
Whether the spell is premium only or not.
promotion: :class:`bool`
Whether you need to be promoted to buy or cast this spell.
knight: :class:`bool`
Whether the spell can be used by knights or not.
paladin: :class:`bool`
Whether the spell can be used by paladins or not.
druid: :class:`bool`
Whether the spell can be used by druids or not.
sorcerer: :class:`bool`
Whether the spell can be used by sorcerers or not.
taught_by: list of :class:`NpcSpell`
NPCs that teach this spell.
status: :class:`str`
The status of this spell in the game.
version: :class:`str`
The client version where the spell was implemented.
image: :class:`bytes`
The spell's image in bytes.
"""
__slots__ = (
"article_id",
"title",
"timestamp",
"name",
"words",
"type",
"group_spell",
"group_secondary",
"group_rune",
"element",
"mana",
"soul",
"price",
"cooldown",
"cooldown_group",
"cooldown_group_secondary",
"level",
"premium",
"promotion",
"taught_by",
"knight",
"sorcerer",
"druid",
"paladin",
"image",
"version",
"effect",
"status",
)
_map = {
"name": ("name", str.strip),
"effect": ("effect", clean_links),
"words": ("words", str.strip),
"type": ("type", str.strip),
"subclass": ("group_spell", str.strip),
"secondarygroup": ("group_secondary", str.strip),
"runegroup": ("group_rune", str.strip),
"damagetype": ("element", str.strip),
"mana": ("mana", parse_integer),
"soul": ("soul", parse_integer),
"spellcost": ("price", parse_integer),
"cooldown": ("cooldown", parse_integer),
"cooldowngroup": ("cooldown_group", parse_integer),
"cooldowngroup2": ("cooldown_group_secondary", parse_integer),
"levelrequired": ("level", parse_integer),
"premium": ("premium", parse_boolean),
"promotion": ("promotion", lambda x: parse_boolean(x, False)),
"implemented": ("version", str.strip),
"status": ("status", str.lower),
}
_template = "Infobox_Spell"
def __init__(self, **kwargs):
super().__init__(**kwargs)
@classmethod
def from_article(cls, article):
spell = super().from_article(article)
if not spell:
return None
if "voc" in spell._raw_attributes:
for vocation in ["knight", "sorcerer", "druid", "paladin"]:
if vocation in spell._raw_attributes["voc"].lower():
setattr(spell, vocation, True)
return spell
@classmethod
def get_by_field(cls, c, field, value, use_like=False):
spell: cls = super().get_by_field(c, field, value, use_like)
if spell is None:
return None
spell.taught_by = NpcSpell.search(c, "spell_id", spell.article_id)
return spell
| 33.745223 | 120 | 0.606833 |
9eb7109d3dcc98a8cd5bea3da453230886cf093d | 16,233 | py | Python | tests/xbeam/modal/rotating_beam/generate_bielawa_baromega2_1e3.py | AntonioWR/sharpy | c922be8d5a1831c4624b22f39264e2f417a03deb | [
"BSD-3-Clause"
] | null | null | null | tests/xbeam/modal/rotating_beam/generate_bielawa_baromega2_1e3.py | AntonioWR/sharpy | c922be8d5a1831c4624b22f39264e2f417a03deb | [
"BSD-3-Clause"
] | null | null | null | tests/xbeam/modal/rotating_beam/generate_bielawa_baromega2_1e3.py | AntonioWR/sharpy | c922be8d5a1831c4624b22f39264e2f417a03deb | [
"BSD-3-Clause"
] | null | null | null | import h5py as h5
import numpy as np
import configparser
import os
import sharpy.utils.algebra as algebra
# Generate errors during execution
import sys
import sharpy.utils.cout_utils as cout
case_name = 'bielawa_baromega2_1e3'
route = os.path.dirname(os.path.realpath(__file__)) + '/'
num_elem = 40
num_node_elem = 3
length = 1
# linear_factor: scaling factor to make the non linear solver behave as a linear one
linear_factor=1
E=1e6*linear_factor
A=1e4
I=1e-4
ei = E*I
m_bar = 1*linear_factor
rot_speed=np.sqrt(1e3*ei/m_bar/length**4)
steps_per_revolution = 180
dt = 2.0*np.pi/rot_speed/steps_per_revolution
n_tstep = 1*steps_per_revolution+1
n_tstep = 90
def clean_test_files():
fem_file_name = route + '/' + case_name + '.fem.h5'
if os.path.isfile(fem_file_name):
os.remove(fem_file_name)
dyn_file_name = route + '/' + case_name + '.dyn.h5'
if os.path.isfile(dyn_file_name):
os.remove(dyn_file_name)
aero_file_name = route + '/' + case_name + '.aero.h5'
if os.path.isfile(aero_file_name):
os.remove(aero_file_name)
solver_file_name = route + '/' + case_name + '.sharpy'
if os.path.isfile(solver_file_name):
os.remove(solver_file_name)
def generate_fem_file(route, case_name, num_elem, num_node_elem=3):
global num_node
num_node = (num_node_elem - 1)*num_elem + 1
# import pdb; pdb.set_trace()
angle = 0*np.pi/180.0
x = (np.linspace(0, length, num_node))*np.cos(angle)
y = (np.linspace(0, length, num_node))*np.sin(angle)
z = np.zeros((num_node,))
structural_twist = np.zeros((num_elem, num_node_elem))
frame_of_reference_delta = np.zeros((num_elem, num_node_elem, 3))
for ielem in range(num_elem):
for inode in range(num_node_elem):
frame_of_reference_delta[ielem, inode, :] = [-np.sin(angle), np.cos(angle), 0]
scale = 1
x *= scale
y *= scale
z *= scale
conn = np.zeros((num_elem, num_node_elem), dtype=int)
for ielem in range(num_elem):
conn[ielem, :] = (np.ones((3,)) * ielem * (num_node_elem - 1)
+ [0, 2, 1])
# stiffness array
# import pdb; pdb.set_trace()
num_stiffness = 1
ea = E*A
# APPROXIMATION!!!
cout.cout_wrap("Assuming isotropic material", 2)
G = E / 2.0 / (1.0+0.3)
cout.cout_wrap("Using total cross-section area as shear area", 2)
ga = G*A
cout.cout_wrap("Assuming planar cross-sections", 2)
J = 2.0* I
gj = G*J
base_stiffness = np.diag([ea, ga, ga, gj, ei, ei])
stiffness = np.zeros((num_stiffness, 6, 6))
# import pdb; pdb.set_trace()
for i in range(num_stiffness):
stiffness[i, :, :] = base_stiffness
# element stiffness
elem_stiffness = np.zeros((num_elem,), dtype=int)
# mass array
num_mass = 1
base_mass = m_bar*np.diag([1.0, 1.0, 1.0, J/A, I/A, I/A])
# base_mass = m_bar*np.diag([1.0, 1.0, 1.0, 1.0,1.0,1.0])
mass = np.zeros((num_mass, 6, 6))
for i in range(num_mass):
mass[i, :, :] = base_mass
# element masses
elem_mass = np.zeros((num_elem,), dtype=int)
# bocos
boundary_conditions = np.zeros((num_node, 1), dtype=int)
boundary_conditions[0] = 1
boundary_conditions[-1] = -1
# beam number
beam_number = np.zeros((num_node, 1), dtype=int)
# new app forces scheme (only follower)
app_forces = np.zeros((num_node, 6))
# app_forces[0, :] = [0, 0, 3000000, 0, 0, 0]
# lumped masses input
n_lumped_mass = 1
lumped_mass_nodes = np.array([num_node - 1], dtype=int)
lumped_mass = np.zeros((n_lumped_mass, ))
lumped_mass[0] = 0.0
lumped_mass_inertia = np.zeros((n_lumped_mass, 3, 3))
lumped_mass_position = np.zeros((n_lumped_mass, 3))
#n_lumped_mass = 1
#lumped_mass_nodes = np.ones((num_node,), dtype=int)
#lumped_mass = np.zeros((n_lumped_mass, ))
#lumped_mass[0] = m_bar*length/num_elem/(num_node_elem-1)
#lumped_mass_inertia[0,:,:] = np.diag([J, I, I])
#lumped_mass_position = np.zeros((n_lumped_mass, 3))
with h5.File(route + '/' + case_name + '.fem.h5', 'a') as h5file:
# CHECKING
if(elem_stiffness.shape[0]!=num_elem):
sys.exit("ERROR: Element stiffness must be defined for each element")
if(elem_mass.shape[0]!=num_elem):
sys.exit("ERROR: Element mass must be defined for each element")
if(frame_of_reference_delta.shape[0]!=num_elem):
sys.exit("ERROR: The first dimension of FoR does not match the number of elements")
if(frame_of_reference_delta.shape[1]!=num_node_elem):
sys.exit("ERROR: The second dimension of FoR does not match the number of nodes element")
if(frame_of_reference_delta.shape[2]!=3):
sys.exit("ERROR: The third dimension of FoR must be 3")
if(structural_twist.shape[0]!=num_node):
sys.exit("ERROR: The structural twist must be defined for each node")
if(boundary_conditions.shape[0]!=num_node):
sys.exit("ERROR: The boundary conditions must be defined for each node")
if(beam_number.shape[0]!=num_node):
sys.exit("ERROR: The beam number must be defined for each node")
if(app_forces.shape[0]!=num_node):
sys.exit("ERROR: The first dimension of the applied forces matrix does not match the number of nodes")
if(app_forces.shape[1]!=6):
sys.exit("ERROR: The second dimension of the applied forces matrix must be 6")
coordinates = h5file.create_dataset('coordinates', data = np.column_stack((x, y, z)))
conectivities = h5file.create_dataset('connectivities', data = conn)
num_nodes_elem_handle = h5file.create_dataset(
'num_node_elem', data = num_node_elem)
num_nodes_handle = h5file.create_dataset(
'num_node', data = num_node)
num_elem_handle = h5file.create_dataset(
'num_elem', data = num_elem)
stiffness_db_handle = h5file.create_dataset(
'stiffness_db', data = stiffness)
stiffness_handle = h5file.create_dataset(
'elem_stiffness', data = elem_stiffness)
mass_db_handle = h5file.create_dataset(
'mass_db', data = mass)
mass_handle = h5file.create_dataset(
'elem_mass', data = elem_mass)
frame_of_reference_delta_handle = h5file.create_dataset(
'frame_of_reference_delta', data=frame_of_reference_delta)
structural_twist_handle = h5file.create_dataset(
'structural_twist', data=structural_twist)
bocos_handle = h5file.create_dataset(
'boundary_conditions', data=boundary_conditions)
beam_handle = h5file.create_dataset(
'beam_number', data=beam_number)
app_forces_handle = h5file.create_dataset(
'app_forces', data=app_forces)
lumped_mass_nodes_handle = h5file.create_dataset(
'lumped_mass_nodes', data=lumped_mass_nodes)
lumped_mass_handle = h5file.create_dataset(
'lumped_mass', data=lumped_mass)
lumped_mass_inertia_handle = h5file.create_dataset(
'lumped_mass_inertia', data=lumped_mass_inertia)
lumped_mass_position_handle = h5file.create_dataset(
'lumped_mass_position', data=lumped_mass_position)
return num_node, coordinates
def generate_dyn_file():
global num_node
forced_for_vel = np.zeros((n_tstep, 6))
dynamic_forces_time = np.zeros((n_tstep, num_node,6))
for it in range(n_tstep):
# forced_for_vel[it, 3:6] = it/n_tstep*angular_velocity
forced_for_vel[it, 5] = rot_speed
# dynamic_forces_time[it,-1,2] = 100
with h5.File(route + '/' + case_name + '.dyn.h5', 'a') as h5file:
h5file.create_dataset(
'dynamic_forces', data=dynamic_forces_time)
h5file.create_dataset(
'for_vel', data=forced_for_vel)
h5file.create_dataset(
'num_steps', data=n_tstep)
def generate_aero_file():
global num_node
with h5.File(route + '/' + case_name + '.aero.h5', 'a') as h5file:
airfoils_group = h5file.create_group('airfoils')
# add the airfoils
airfoils_group.create_dataset("0", data = np.column_stack( (np.linspace( 0.0, 1.0, 10), np.zeros(10) )) )
# chord
chord_input = h5file.create_dataset('chord', data= np.ones((num_elem,num_node_elem),))
dim_attr = chord_input .attrs['units'] = 'm'
# twist
twist_input = h5file.create_dataset('twist', data=np.zeros((num_elem,num_node_elem),))
dim_attr = twist_input.attrs['units'] = 'rad'
# airfoil distribution
airfoil_distribution_input = h5file.create_dataset('airfoil_distribution', data=np.zeros((num_elem,num_node_elem),dtype=int))
surface_distribution_input = h5file.create_dataset('surface_distribution', data=np.zeros((num_elem,),dtype=int))
surface_m_input = h5file.create_dataset('surface_m', data = np.ones((1,),dtype=int))
m_distribution = 'uniform'
m_distribution_input = h5file.create_dataset('m_distribution', data=m_distribution.encode('ascii', 'ignore'))
aero_node = np.zeros((num_node,),dtype=bool)
aero_node[-3:] = np.ones((3,),dtype=bool)
aero_node_input = h5file.create_dataset('aero_node', data=aero_node)
elastic_axis_input = h5file.create_dataset('elastic_axis', data=0.5*np.ones((num_elem,num_node_elem),))
def generate_solver_file():
file_name = route + '/' + case_name + '.sharpy'
aux_settings = dict()
settings = dict()
settings['SHARPy'] = {'case': case_name,
'route': route,
'flow': ['BeamLoader', 'AerogridLoader', 'StaticCoupled', 'BeamPlot', 'AerogridPlot', 'DynamicPrescribedCoupled', 'Modal'],
'write_screen': 'off',
'write_log': 'on',
'log_folder': route + '/output/',
'log_file': case_name + '.log'}
# AUX DICTIONARIES
aux_settings['velocity_field_input'] = {'u_inf': 100.0,
'u_inf_direction': [0.0, -1.0, 0.0]}
# LOADERS
settings['BeamLoader'] = {'unsteady': 'on',
'orientation': algebra.euler2quat(np.array([0.0,0.0,0.0]))}
settings['AerogridLoader'] = {'unsteady': 'on',
'aligned_grid': 'on',
'mstar': 1,
'freestream_dir': ['0', '-1', '0'],
'wake_shape_generator': 'StraightWake',
'wake_shape_generator_input': {'u_inf': 100,
'u_inf_direction': np.array([0., -1., 0.]),
'dt': dt}}
# POSTPROCESS
settings['AerogridPlot'] = {'include_rbm': 'on',
'include_forward_motion': 'off',
'include_applied_forces': 'on',
'minus_m_star': 0,
'u_inf': 100.0,
'dt': dt}
settings['BeamPlot'] = {'include_rbm': 'on',
'include_applied_forces': 'on',
'include_forward_motion': 'on'}
settings['BeamLoads'] = {}
# STATIC COUPLED
settings['NonLinearStatic'] = {'print_info': 'on',
'max_iterations': 150,
'num_load_steps': 1,
'delta_curved': 1e-15,
'min_delta': 1e-8,
'gravity_on': 'off',
'gravity': 9.81}
settings['StaticUvlm'] = {'print_info': 'on',
'horseshoe': 'off',
'num_cores': 4,
'n_rollup': 0,
'rollup_dt': dt,
'rollup_aic_refresh': 1,
'rollup_tolerance': 1e-4,
'velocity_field_generator': 'SteadyVelocityField',
'velocity_field_input': aux_settings['velocity_field_input'],
'rho': 0.0}
settings['StaticCoupled'] = {'print_info': 'on',
'structural_solver': 'NonLinearStatic',
'structural_solver_settings': settings['NonLinearStatic'],
'aero_solver': 'StaticUvlm',
'aero_solver_settings': settings['StaticUvlm'],
'max_iter': 100,
'n_load_steps': 4,
'tolerance': 1e-8,
'relaxation_factor': 0}
# DYNAMIC PRESCRIBED COUPLED
settings['NonLinearDynamicPrescribedStep'] = {'print_info': 'on',
'max_iterations': 95000,
'delta_curved': 1e-9,
'min_delta': 1e-6,
'newmark_damp': 1e-3,
'gravity_on': 'off',
'gravity': 9.81,
'num_steps': n_tstep,
'dt': dt}
settings['StepUvlm'] = {'print_info': 'on',
'horseshoe': 'off',
'num_cores': 4,
'n_rollup': 0,
'convection_scheme': 2,
'rollup_dt': dt,
'rollup_aic_refresh': 1,
'rollup_tolerance': 1e-4,
'velocity_field_generator': 'SteadyVelocityField',
'velocity_field_input': aux_settings['velocity_field_input'],
'rho': 0.0,
'n_time_steps': n_tstep,
'dt': dt}
settings['DynamicPrescribedCoupled'] = {'structural_solver': 'NonLinearDynamicPrescribedStep',
'structural_solver_settings': settings['NonLinearDynamicPrescribedStep'],
'aero_solver': 'StepUvlm',
'aero_solver_settings': settings['StepUvlm'],
'fsi_substeps': 20000,
'fsi_tolerance': 1e-9,
'relaxation_factor': 0,
'minimum_steps': 1,
'relaxation_steps': 150,
'final_relaxation_factor': 0.0,
'n_time_steps': n_tstep,
'dt': dt,
'postprocessors': ['BeamPlot', 'AerogridPlot'],
'postprocessors_settings': {'BeamPlot': settings['BeamPlot'],
'AerogridPlot': settings['AerogridPlot']}}
settings['Modal'] = {'include_rbm': 'on',
'NumLambda': 10000,
'num_steps': 1,
'print_matrices': 'on'}
import configobj
config = configobj.ConfigObj()
config.filename = file_name
for k, v in settings.items():
config[k] = v
config.write()
# run everything
clean_test_files()
generate_fem_file(route, case_name, num_elem, num_node_elem)
generate_aero_file()
generate_dyn_file()
generate_solver_file()
cout.cout_wrap('Reference for validation: "Rotary wing structural dynamics and aeroelasticity", R.L. Bielawa. AIAA education series. Second edition', 1)
| 41.837629 | 152 | 0.547157 |
1aa30dfd34d6396648b15822b1c41c2ec03fccaf | 8,011 | py | Python | Pyto Mac/PyObjC/DiscRecordingUI/_metadata.py | cclauss/Pyto | 1c4ccc47e3a91e996bf6ec38c527d244de2cf7ed | [
"MIT"
] | 4 | 2019-03-11T18:05:49.000Z | 2021-05-22T21:09:09.000Z | Pyto Mac/PyObjC/DiscRecordingUI/_metadata.py | cclauss/Pyto | 1c4ccc47e3a91e996bf6ec38c527d244de2cf7ed | [
"MIT"
] | null | null | null | Pyto Mac/PyObjC/DiscRecordingUI/_metadata.py | cclauss/Pyto | 1c4ccc47e3a91e996bf6ec38c527d244de2cf7ed | [
"MIT"
] | 1 | 2019-03-18T18:53:36.000Z | 2019-03-18T18:53:36.000Z | # This file is generated by objective.metadata
#
# Last update: Sun Jul 1 17:39:54 2018
import objc, sys
if sys.maxsize > 2 ** 32:
def sel32or64(a, b): return b
else:
def sel32or64(a, b): return a
if sys.byteorder == 'little':
def littleOrBig(a, b): return a
else:
def littleOrBig(a, b): return b
misc = {
}
misc.update({'DRBurnSessionProgressDialogOptions': objc.createStructType('DRBurnSessionProgressDialogOptions', sel32or64(b'{DRBurnSessionProgressDialogOptions=LL^{__CFString=}}', b'{DRBurnSessionProgressDialogOptions=II^{__CFString=}}'), ['version', 'dialogOptionFlags', 'description']), 'DREraseSessionSetupDialogOptions': objc.createStructType('DREraseSessionSetupDialogOptions', sel32or64(b'{DREraseSessionSetupDialogOptions=LL}', b'{DREraseSessionSetupDialogOptions=II}'), ['version', 'dialogOptionFlags']), 'DREraseSessionProgressDialogOptions': objc.createStructType('DREraseSessionProgressDialogOptions', sel32or64(b'{DREraseSessionProgressDialogOptions=LL^{__CFString=}}', b'{DREraseSessionProgressDialogOptions=II^{__CFString=}}'), ['version', 'dialogOptionFlags', 'description']), 'DRBurnSessionSetupDialogOptions': objc.createStructType('DRBurnSessionSetupDialogOptions', sel32or64(b'{DRBurnSessionSetupDialogOptions=LL^{__CFString=}}', b'{DRBurnSessionSetupDialogOptions=II^{__CFString=}}'), ['version', 'dialogOptionFlags', 'defaultButtonTitle'])})
constants = '''$DRBurnIcon$DRBurnProgressPanelDidFinishNotification$DRBurnProgressPanelWillBeginNotification$DRBurnSetupPanelDefaultButtonDefaultTitle$DREraseIcon$DREraseProgressPanelDidFinishNotification$DREraseProgressPanelWillBeginNotification$DRSetupPanelDeviceSelectionChangedNotification$DRSetupPanelSelectedDeviceKey$'''
enums = '''$kBurnSessionProgressDialogDefaultOptions@0$kBurnSessionProgressDialogDisplayVerboseProgress@1$kBurnSessionProgressDialogOptionsCurrentVersion@1$kBurnSessionSetupDialogAllowTestBurns@2147483652$kBurnSessionSetupDialogDefaultOptions@0$kBurnSessionSetupDialogDontHandleReservations@2$kBurnSessionSetupDialogForceClosedDiscs@1$kBurnSessionSetupDialogOptionsCurrentVersion@1$kDRBurnProgressSetupCallbacksCurrentVersion@1$kDRBurnSessionCancel@0$kDRBurnSessionOK@1$kDRBurnSessionSetupCallbacksCurrentVersion@1$kDREraseProgressSetupCallbacksCurrentVersion@1$kDREraseSessionCancel@0$kDREraseSessionOK@1$kDREraseSessionSetupCallbacksCurrentVersion@1$kEraseSessionProgressDialogDefaultOptions@0$kEraseSessionProgressDialogOptionsCurrentVersion@1$kEraseSessionSetupDialogDefaultOptions@0$kEraseSessionSetupDialogDontHandleReservations@1$kEraseSessionSetupDialogOptionsCurrentVersion@1$'''
misc.update({})
functions={'DRBurnSessionSetBurn': (b'v^{__DRBurnSession=}^{__DRBurn=}',), 'DRBurnSessionCreate': (b'^{__DRBurnSession=}', '', {'retval': {'already_cfretained': True}}), 'DREraseSessionSetupDialog': (sel32or64(b'c^{__DREraseSession=}^{DREraseSessionSetupDialogOptions=LL}^{DREraseSessionSetupCallbacks=L^?^?^?}', b'c^{__DREraseSession=}^{DREraseSessionSetupDialogOptions=II}^{DREraseSessionSetupCallbacks=I^?^?^?}'), '', {'arguments': {1: {'type_modifier': 'n'}, 2: {'type_modifier': 'n'}}}), 'DRBurnSessionGetTypeID': (sel32or64(b'L', b'Q'),), 'DREraseSessionCreate': (b'^{__DREraseSession=}', '', {'retval': {'already_cfretained': True}}), 'DREraseSessionGetTypeID': (sel32or64(b'L', b'Q'),), 'DRBurnSessionSetupDialog': (sel32or64(b'c^{__DRBurnSession=}^{DRBurnSessionSetupDialogOptions=LL^{__CFString=}}^{DRBurnSessionSetupCallbacks=L^?^?^?}', b'c^{__DRBurnSession=}^{DRBurnSessionSetupDialogOptions=II^{__CFString=}}^{DRBurnSessionSetupCallbacks=I^?^?^?}'), '', {'arguments': {1: {'type_modifier': 'n'}, 2: {'type_modifier': 'n'}}}), 'DRBurnSessionGetBurn': (b'^{__DRBurn=}^{__DRBurnSession=}',), 'DREraseSessionGetErase': (b'^{__DRErase=}^{__DREraseSession=}',), 'DREraseSessionBeginProgressDialog': (sel32or64(b'v^{__DREraseSession=}^{DREraseSessionProgressDialogOptions=LL^{__CFString=}}^{DREraseSessionProgressCallbacks=L^?^?^?}', b'v^{__DREraseSession=}^{DREraseSessionProgressDialogOptions=II^{__CFString=}}^{DREraseSessionProgressCallbacks=I^?^?^?}'), '', {'arguments': {1: {'type_modifier': 'n'}, 2: {'type_modifier': 'n'}}}), 'DREraseSessionSetErase': (b'v^{__DREraseSession=}^{__DRErase=}',), 'DRBurnSessionBeginProgressDialog': (sel32or64(b'v^{__DRBurnSession=}@^{DRBurnSessionProgressDialogOptions=LL^{__CFString=}}^{DRBurnSessionProgressCallbacks=L^?^?^?}', b'v^{__DRBurnSession=}@^{DRBurnSessionProgressDialogOptions=II^{__CFString=}}^{DRBurnSessionProgressCallbacks=I^?^?^?}'), '', {'arguments': {2: {'type_modifier': 'n'}, 3: {'type_modifier': 'n'}}})}
cftypes=[('DRBurnSessionRef', b'^{__DRBurnSession=}', None, None), ('DREraseSessionRef', b'^{__DREraseSession=}', None, None)]
r = objc.registerMetaDataForSelector
objc._updatingMetadata(True)
try:
r(b'DRBurnProgressPanel', b'setVerboseProgressStatus:', {'arguments': {2: {'type': b'Z'}}})
r(b'DRBurnProgressPanel', b'verboseProgressStatus', {'retval': {'type': b'Z'}})
r(b'DRBurnSetupPanel', b'setCanSelectAppendableMedia:', {'arguments': {2: {'type': b'Z'}}})
r(b'DRBurnSetupPanel', b'setCanSelectTestBurn:', {'arguments': {2: {'type': b'Z'}}})
r(b'DRSetupPanel', b'mediaStateChanged:', {'retval': {'type': b'Z'}})
r(b'NSObject', b'burnProgressPanel:burnDidFinish:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}})
r(b'NSObject', b'burnProgressPanelDidFinish:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}})
r(b'NSObject', b'burnProgressPanelWillBegin:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}})
r(b'NSObject', b'eraseProgressPanel:eraseDidFinish:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}})
r(b'NSObject', b'eraseProgressPanelDidFinish:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}})
r(b'NSObject', b'eraseProgressPanelWillBegin:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}})
r(b'NSObject', b'setupPanel:determineBestDeviceOfA:orB:', {'retval': {'type': b'@'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'@'}}})
r(b'NSObject', b'setupPanel:deviceContainsSuitableMedia:promptString:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}, 4: {'type': b'^@'}}})
r(b'NSObject', b'setupPanel:deviceCouldBeTarget:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}, 3: {'type': b'@'}}})
r(b'NSObject', b'setupPanelDeviceSelectionChanged:', {'retval': {'type': b'v'}, 'arguments': {2: {'type': b'@'}}})
r(b'NSObject', b'setupPanelShouldHandleMediaReservations:', {'retval': {'type': b'Z'}, 'arguments': {2: {'type': b'@'}}})
finally:
objc._updatingMetadata(False)
protocols={'DRBurnProgressPanelDelegateMethods': objc.informal_protocol('DRBurnProgressPanelDelegateMethods', [objc.selector(None, b'burnProgressPanelDidFinish:', b'v@:@', isRequired=False), objc.selector(None, b'burnProgressPanelWillBegin:', b'v@:@', isRequired=False), objc.selector(None, b'burnProgressPanel:burnDidFinish:', b'Z@:@@', isRequired=False)]), 'DRSetupPanelDelegate': objc.informal_protocol('DRSetupPanelDelegate', [objc.selector(None, b'setupPanel:deviceContainsSuitableMedia:promptString:', b'Z@:@@^@', isRequired=False), objc.selector(None, b'setupPanel:deviceCouldBeTarget:', b'Z@:@@', isRequired=False), objc.selector(None, b'setupPanelShouldHandleMediaReservations:', b'Z@:@', isRequired=False), objc.selector(None, b'setupPanelDeviceSelectionChanged:', b'v@:@', isRequired=False), objc.selector(None, b'setupPanel:determineBestDeviceOfA:orB:', b'@@:@@@', isRequired=False)]), 'DREraseProgressPanelDelegateMethods': objc.informal_protocol('DREraseProgressPanelDelegateMethods', [objc.selector(None, b'eraseProgressPanelWillBegin:', b'v@:@', isRequired=False), objc.selector(None, b'eraseProgressPanel:eraseDidFinish:', b'Z@:@@', isRequired=False), objc.selector(None, b'eraseProgressPanelDidFinish:', b'v@:@', isRequired=False)])}
expressions = {}
# END OF FILE
| 163.489796 | 1,973 | 0.739858 |
e1d8991d599b5dc94ae780af73d0eafaceac0003 | 2,433 | py | Python | tools/gcc-4.8.2-Ee500v2-eabispe/powerpc-eabispe/lib/mpc7400/altivec/abi-altivec/le/libstdc++.a-gdb.py | singhalshubh/echronos | c996f1d2c8af6c6536205eb319c1bf1d4d84569c | [
"MIT"
] | 154 | 2015-08-14T03:28:16.000Z | 2022-03-19T00:06:38.000Z | tools/gcc-4.8.2-Ee500v2-eabispe/powerpc-eabispe/lib/mpc7400/altivec/abi-altivec/le/libstdc++.a-gdb.py | singhalshubh/echronos | c996f1d2c8af6c6536205eb319c1bf1d4d84569c | [
"MIT"
] | 59 | 2015-08-30T23:17:33.000Z | 2019-06-12T09:20:57.000Z | tools/gcc-4.8.2-Ee500v2-eabispe/powerpc-eabispe/lib/mpc7400/altivec/abi-altivec/le/libstdc++.a-gdb.py | singhalshubh/echronos | c996f1d2c8af6c6536205eb319c1bf1d4d84569c | [
"MIT"
] | 44 | 2015-09-19T13:02:07.000Z | 2022-03-19T00:14:11.000Z | # -*- python -*-
# Copyright (C) 2009-2013 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import gdb
import os
import os.path
pythondir = '/pkg/fs-DTgnu-/gcc-4.8.2-Ee500v2-eabispe/i686-linux/share/gcc-4.8.2/python'
libdir = '/pkg/fs-DTgnu-/gcc-4.8.2-Ee500v2-eabispe/i686-linux/powerpc-eabispe/lib/mpc7400/altivec/abi-altivec/le'
# This file might be loaded when there is no current objfile. This
# can happen if the user loads it manually. In this case we don't
# update sys.path; instead we just hope the user managed to do that
# beforehand.
if gdb.current_objfile () is not None:
# Update module path. We want to find the relative path from libdir
# to pythondir, and then we want to apply that relative path to the
# directory holding the objfile with which this file is associated.
# This preserves relocatability of the gcc tree.
# Do a simple normalization that removes duplicate separators.
pythondir = os.path.normpath (pythondir)
libdir = os.path.normpath (libdir)
prefix = os.path.commonprefix ([libdir, pythondir])
# In some bizarre configuration we might have found a match in the
# middle of a directory name.
if prefix[-1] != '/':
prefix = os.path.dirname (prefix) + '/'
# Strip off the prefix.
pythondir = pythondir[len (prefix):]
libdir = libdir[len (prefix):]
# Compute the ".."s needed to get from libdir to the prefix.
dotdots = ('..' + os.sep) * len (libdir.split (os.sep))
objfile = gdb.current_objfile ().filename
dir_ = os.path.join (os.path.dirname (objfile), dotdots, pythondir)
if not dir_ in sys.path:
sys.path.insert(0, dir_)
# Load the pretty-printers.
from libstdcxx.v6.printers import register_libstdcxx_printers
register_libstdcxx_printers (gdb.current_objfile ())
| 39.885246 | 113 | 0.721332 |
03ac98798faabf1f1f24d400b430a29064e1e7e4 | 145,105 | py | Python | nova/db/sqlalchemy/api.py | bopopescu/trusted-nova | b440afb89f6f170c0831f5d6318a08ec41bc8c0a | [
"Apache-2.0"
] | null | null | null | nova/db/sqlalchemy/api.py | bopopescu/trusted-nova | b440afb89f6f170c0831f5d6318a08ec41bc8c0a | [
"Apache-2.0"
] | null | null | null | nova/db/sqlalchemy/api.py | bopopescu/trusted-nova | b440afb89f6f170c0831f5d6318a08ec41bc8c0a | [
"Apache-2.0"
] | 1 | 2020-07-24T10:43:58.000Z | 2020-07-24T10:43:58.000Z | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implementation of SQLAlchemy backend."""
import datetime
import functools
import re
import warnings
from nova import block_device
from nova import db
from nova import exception
from nova import flags
from nova import utils
from nova import log as logging
from nova.compute import aggregate_states
from nova.compute import vm_states
from nova.db.sqlalchemy import models
from nova.db.sqlalchemy.session import get_session
from sqlalchemy import and_
from sqlalchemy import or_
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import joinedload
from sqlalchemy.orm import joinedload_all
from sqlalchemy.sql import func
from sqlalchemy.sql.expression import asc
from sqlalchemy.sql.expression import desc
from sqlalchemy.sql.expression import literal_column
FLAGS = flags.FLAGS
flags.DECLARE('reserved_host_disk_mb', 'nova.scheduler.host_manager')
flags.DECLARE('reserved_host_memory_mb', 'nova.scheduler.host_manager')
LOG = logging.getLogger(__name__)
def is_admin_context(context):
"""Indicates if the request context is an administrator."""
if not context:
warnings.warn(_('Use of empty request context is deprecated'),
DeprecationWarning)
raise Exception('die')
return context.is_admin
def is_user_context(context):
"""Indicates if the request context is a normal user."""
if not context:
return False
if context.is_admin:
return False
if not context.user_id or not context.project_id:
return False
return True
def authorize_project_context(context, project_id):
"""Ensures a request has permission to access the given project."""
if is_user_context(context):
if not context.project_id:
raise exception.NotAuthorized()
elif context.project_id != project_id:
raise exception.NotAuthorized()
def authorize_user_context(context, user_id):
"""Ensures a request has permission to access the given user."""
if is_user_context(context):
if not context.user_id:
raise exception.NotAuthorized()
elif context.user_id != user_id:
raise exception.NotAuthorized()
def require_admin_context(f):
"""Decorator to require admin request context.
The first argument to the wrapped function must be the context.
"""
def wrapper(*args, **kwargs):
if not is_admin_context(args[0]):
raise exception.AdminRequired()
return f(*args, **kwargs)
return wrapper
def require_context(f):
"""Decorator to require *any* user or admin context.
This does no authorization for user or project access matching, see
:py:func:`authorize_project_context` and
:py:func:`authorize_user_context`.
The first argument to the wrapped function must be the context.
"""
def wrapper(*args, **kwargs):
if not is_admin_context(args[0]) and not is_user_context(args[0]):
raise exception.NotAuthorized()
return f(*args, **kwargs)
return wrapper
def require_instance_exists(f):
"""Decorator to require the specified instance to exist.
Requires the wrapped function to use context and instance_id as
their first two arguments.
"""
def wrapper(context, instance_id, *args, **kwargs):
db.instance_get(context, instance_id)
return f(context, instance_id, *args, **kwargs)
wrapper.__name__ = f.__name__
return wrapper
def require_volume_exists(f):
"""Decorator to require the specified volume to exist.
Requires the wrapped function to use context and volume_id as
their first two arguments.
"""
def wrapper(context, volume_id, *args, **kwargs):
db.volume_get(context, volume_id)
return f(context, volume_id, *args, **kwargs)
wrapper.__name__ = f.__name__
return wrapper
def require_aggregate_exists(f):
"""Decorator to require the specified aggregate to exist.
Requires the wrapped function to use context and aggregate_id as
their first two arguments.
"""
@functools.wraps(f)
def wrapper(context, aggregate_id, *args, **kwargs):
db.aggregate_get(context, aggregate_id)
return f(context, aggregate_id, *args, **kwargs)
return wrapper
def model_query(context, *args, **kwargs):
"""Query helper that accounts for context's `read_deleted` field.
:param context: context to query under
:param session: if present, the session to use
:param read_deleted: if present, overrides context's read_deleted field.
:param project_only: if present and context is user-type, then restrict
query to match the context's project_id.
"""
session = kwargs.get('session') or get_session()
read_deleted = kwargs.get('read_deleted') or context.read_deleted
project_only = kwargs.get('project_only')
query = session.query(*args)
if read_deleted == 'no':
query = query.filter_by(deleted=False)
elif read_deleted == 'yes':
pass # omit the filter to include deleted and active
elif read_deleted == 'only':
query = query.filter_by(deleted=True)
else:
raise Exception(
_("Unrecognized read_deleted value '%s'") % read_deleted)
if project_only and is_user_context(context):
query = query.filter_by(project_id=context.project_id)
return query
def exact_filter(query, model, filters, legal_keys):
"""Applies exact match filtering to a query.
Returns the updated query. Modifies filters argument to remove
filters consumed.
:param query: query to apply filters to
:param model: model object the query applies to, for IN-style
filtering
:param filters: dictionary of filters; values that are lists,
tuples, sets, or frozensets cause an 'IN' test to
be performed, while exact matching ('==' operator)
is used for other values
:param legal_keys: list of keys to apply exact filtering to
"""
filter_dict = {}
# Walk through all the keys
for key in legal_keys:
# Skip ones we're not filtering on
if key not in filters:
continue
# OK, filtering on this key; what value do we search for?
value = filters.pop(key)
if isinstance(value, (list, tuple, set, frozenset)):
# Looking for values in a list; apply to query directly
column_attr = getattr(model, key)
query = query.filter(column_attr.in_(value))
else:
# OK, simple exact match; save for later
filter_dict[key] = value
# Apply simple exact matches
if filter_dict:
query = query.filter_by(**filter_dict)
return query
###################
@require_admin_context
def service_destroy(context, service_id):
session = get_session()
with session.begin():
service_ref = service_get(context, service_id, session=session)
service_ref.delete(session=session)
if service_ref.topic == 'compute' and service_ref.compute_node:
for c in service_ref.compute_node:
c.delete(session=session)
@require_admin_context
def service_get(context, service_id, session=None):
result = model_query(context, models.Service, session=session).\
options(joinedload('compute_node')).\
filter_by(id=service_id).\
first()
if not result:
raise exception.ServiceNotFound(service_id=service_id)
return result
@require_admin_context
def service_get_all(context, disabled=None):
query = model_query(context, models.Service)
if disabled is not None:
query = query.filter_by(disabled=disabled)
return query.all()
@require_admin_context
def service_get_all_by_topic(context, topic):
return model_query(context, models.Service, read_deleted="no").\
filter_by(disabled=False).\
filter_by(topic=topic).\
all()
@require_admin_context
def service_get_by_host_and_topic(context, host, topic):
return model_query(context, models.Service, read_deleted="no").\
filter_by(disabled=False).\
filter_by(host=host).\
filter_by(topic=topic).\
first()
@require_admin_context
def service_get_all_by_host(context, host):
return model_query(context, models.Service, read_deleted="no").\
filter_by(host=host).\
all()
@require_admin_context
def service_get_all_compute_by_host(context, host):
result = model_query(context, models.Service, read_deleted="no").\
options(joinedload('compute_node')).\
filter_by(host=host).\
filter_by(topic="compute").\
all()
if not result:
raise exception.ComputeHostNotFound(host=host)
return result
@require_admin_context
def _service_get_all_topic_subquery(context, session, topic, subq, label):
sort_value = getattr(subq.c, label)
return model_query(context, models.Service,
func.coalesce(sort_value, 0),
session=session, read_deleted="no").\
filter_by(topic=topic).\
filter_by(disabled=False).\
outerjoin((subq, models.Service.host == subq.c.host)).\
order_by(sort_value).\
all()
@require_admin_context
def service_get_all_compute_sorted(context):
session = get_session()
with session.begin():
# NOTE(vish): The intended query is below
# SELECT services.*, COALESCE(inst_cores.instance_cores,
# 0)
# FROM services LEFT OUTER JOIN
# (SELECT host, SUM(instances.vcpus) AS instance_cores
# FROM instances GROUP BY host) AS inst_cores
# ON services.host = inst_cores.host
topic = 'compute'
label = 'instance_cores'
subq = model_query(context, models.Instance.host,
func.sum(models.Instance.vcpus).label(label),
session=session, read_deleted="no").\
group_by(models.Instance.host).\
subquery()
return _service_get_all_topic_subquery(context,
session,
topic,
subq,
label)
@require_admin_context
def service_get_all_volume_sorted(context):
session = get_session()
with session.begin():
topic = 'volume'
label = 'volume_gigabytes'
subq = model_query(context, models.Volume.host,
func.sum(models.Volume.size).label(label),
session=session, read_deleted="no").\
group_by(models.Volume.host).\
subquery()
return _service_get_all_topic_subquery(context,
session,
topic,
subq,
label)
@require_admin_context
def service_get_by_args(context, host, binary):
result = model_query(context, models.Service).\
filter_by(host=host).\
filter_by(binary=binary).\
first()
if not result:
raise exception.HostBinaryNotFound(host=host, binary=binary)
return result
@require_admin_context
def service_create(context, values):
service_ref = models.Service()
service_ref.update(values)
if not FLAGS.enable_new_services:
service_ref.disabled = True
service_ref.save()
return service_ref
@require_admin_context
def service_update(context, service_id, values):
session = get_session()
with session.begin():
service_ref = service_get(context, service_id, session=session)
service_ref.update(values)
service_ref.save(session=session)
###################
@require_admin_context
def compute_node_get(context, compute_id, session=None):
result = model_query(context, models.ComputeNode, session=session).\
filter_by(id=compute_id).\
first()
if not result:
raise exception.ComputeHostNotFound(host=compute_id)
return result
@require_admin_context
def compute_node_get_all(context, session=None):
return model_query(context, models.ComputeNode, session=session).\
options(joinedload('service')).\
all()
def _get_host_utilization(context, host, ram_mb, disk_gb):
"""Compute the current utilization of a given host."""
instances = instance_get_all_by_host(context, host)
vms = len(instances)
free_ram_mb = ram_mb - FLAGS.reserved_host_memory_mb
free_disk_gb = disk_gb - (FLAGS.reserved_host_disk_mb * 1024)
work = 0
for instance in instances:
free_ram_mb -= instance.memory_mb
free_disk_gb -= instance.root_gb
free_disk_gb -= instance.ephemeral_gb
if instance.vm_state in [vm_states.BUILDING, vm_states.REBUILDING,
vm_states.MIGRATING, vm_states.RESIZING]:
work += 1
return dict(free_ram_mb=free_ram_mb,
free_disk_gb=free_disk_gb,
current_workload=work,
running_vms=vms)
def _adjust_compute_node_values_for_utilization(context, values, session):
service_ref = service_get(context, values['service_id'], session=session)
host = service_ref['host']
ram_mb = values['memory_mb']
disk_gb = values['local_gb']
values.update(_get_host_utilization(context, host, ram_mb, disk_gb))
@require_admin_context
def compute_node_create(context, values, session=None):
"""Creates a new ComputeNode and populates the capacity fields
with the most recent data."""
if not session:
session = get_session()
_adjust_compute_node_values_for_utilization(context, values, session)
with session.begin(subtransactions=True):
compute_node_ref = models.ComputeNode()
session.add(compute_node_ref)
compute_node_ref.update(values)
return compute_node_ref
@require_admin_context
def compute_node_update(context, compute_id, values, auto_adjust):
"""Creates a new ComputeNode and populates the capacity fields
with the most recent data."""
session = get_session()
if auto_adjust:
_adjust_compute_node_values_for_utilization(context, values, session)
with session.begin(subtransactions=True):
compute_ref = compute_node_get(context, compute_id, session=session)
compute_ref.update(values)
compute_ref.save(session=session)
def compute_node_get_by_host(context, host):
"""Get all capacity entries for the given host."""
session = get_session()
with session.begin():
node = session.query(models.ComputeNode).\
options(joinedload('service')).\
filter(models.Service.host == host).\
filter_by(deleted=False)
return node.first()
def compute_node_utilization_update(context, host, free_ram_mb_delta=0,
free_disk_gb_delta=0, work_delta=0, vm_delta=0):
"""Update a specific ComputeNode entry by a series of deltas.
Do this as a single atomic action and lock the row for the
duration of the operation. Requires that ComputeNode record exist."""
session = get_session()
compute_node = None
with session.begin(subtransactions=True):
compute_node = session.query(models.ComputeNode).\
options(joinedload('service')).\
filter(models.Service.host == host).\
filter_by(deleted=False).\
with_lockmode('update').\
first()
if compute_node is None:
raise exception.NotFound(_("No ComputeNode for %(host)s") %
locals())
# This table thingy is how we get atomic UPDATE x = x + 1
# semantics.
table = models.ComputeNode.__table__
if free_ram_mb_delta != 0:
compute_node.free_ram_mb = table.c.free_ram_mb + free_ram_mb_delta
if free_disk_gb_delta != 0:
compute_node.free_disk_gb = (table.c.free_disk_gb +
free_disk_gb_delta)
if work_delta != 0:
compute_node.current_workload = (table.c.current_workload +
work_delta)
if vm_delta != 0:
compute_node.running_vms = table.c.running_vms + vm_delta
return compute_node
def compute_node_utilization_set(context, host, free_ram_mb=None,
free_disk_gb=None, work=None, vms=None):
"""Like compute_node_utilization_update() modify a specific host
entry. But this function will set the metrics absolutely
(vs. a delta update).
"""
session = get_session()
compute_node = None
with session.begin(subtransactions=True):
compute_node = session.query(models.ComputeNode).\
options(joinedload('service')).\
filter(models.Service.host == host).\
filter_by(deleted=False).\
with_lockmode('update').\
first()
if compute_node is None:
raise exception.NotFound(_("No ComputeNode for %(host)s") %
locals())
if free_ram_mb != None:
compute_node.free_ram_mb = free_ram_mb
if free_disk_gb != None:
compute_node.free_disk_gb = free_disk_gb
if work != None:
compute_node.current_workload = work
if vms != None:
compute_node.running_vms = vms
return compute_node
###################
@require_admin_context
def certificate_get(context, certificate_id, session=None):
result = model_query(context, models.Certificate, session=session).\
filter_by(id=certificate_id).\
first()
if not result:
raise exception.CertificateNotFound(certificate_id=certificate_id)
return result
@require_admin_context
def certificate_create(context, values):
certificate_ref = models.Certificate()
for (key, value) in values.iteritems():
certificate_ref[key] = value
certificate_ref.save()
return certificate_ref
@require_admin_context
def certificate_get_all_by_project(context, project_id):
return model_query(context, models.Certificate, read_deleted="no").\
filter_by(project_id=project_id).\
all()
@require_admin_context
def certificate_get_all_by_user(context, user_id):
return model_query(context, models.Certificate, read_deleted="no").\
filter_by(user_id=user_id).\
all()
@require_admin_context
def certificate_get_all_by_user_and_project(context, user_id, project_id):
return model_query(context, models.Certificate, read_deleted="no").\
filter_by(user_id=user_id).\
filter_by(project_id=project_id).\
all()
###################
@require_context
def floating_ip_get(context, id):
result = model_query(context, models.FloatingIp, project_only=True).\
filter_by(id=id).\
first()
if not result:
raise exception.FloatingIpNotFound(id=id)
return result
@require_context
def floating_ip_get_pools(context):
session = get_session()
pools = []
for result in session.query(models.FloatingIp.pool).distinct():
pools.append({'name': result[0]})
return pools
@require_context
def floating_ip_allocate_address(context, project_id, pool):
authorize_project_context(context, project_id)
session = get_session()
with session.begin():
floating_ip_ref = model_query(context, models.FloatingIp,
session=session, read_deleted="no").\
filter_by(fixed_ip_id=None).\
filter_by(project_id=None).\
filter_by(pool=pool).\
with_lockmode('update').\
first()
# NOTE(vish): if with_lockmode isn't supported, as in sqlite,
# then this has concurrency issues
if not floating_ip_ref:
raise exception.NoMoreFloatingIps()
floating_ip_ref['project_id'] = project_id
session.add(floating_ip_ref)
return floating_ip_ref['address']
@require_context
def floating_ip_create(context, values):
floating_ip_ref = models.FloatingIp()
floating_ip_ref.update(values)
floating_ip_ref.save()
return floating_ip_ref['address']
@require_context
def floating_ip_count_by_project(context, project_id):
authorize_project_context(context, project_id)
# TODO(tr3buchet): why leave auto_assigned floating IPs out?
return model_query(context, models.FloatingIp, read_deleted="no").\
filter_by(project_id=project_id).\
filter_by(auto_assigned=False).\
count()
@require_context
def floating_ip_fixed_ip_associate(context, floating_address,
fixed_address, host):
session = get_session()
with session.begin():
floating_ip_ref = floating_ip_get_by_address(context,
floating_address,
session=session)
fixed_ip_ref = fixed_ip_get_by_address(context,
fixed_address,
session=session)
floating_ip_ref.fixed_ip_id = fixed_ip_ref["id"]
floating_ip_ref.host = host
floating_ip_ref.save(session=session)
@require_context
def floating_ip_deallocate(context, address):
session = get_session()
with session.begin():
floating_ip_ref = floating_ip_get_by_address(context,
address,
session=session)
floating_ip_ref['project_id'] = None
floating_ip_ref['host'] = None
floating_ip_ref['auto_assigned'] = False
floating_ip_ref.save(session=session)
@require_context
def floating_ip_destroy(context, address):
session = get_session()
with session.begin():
floating_ip_ref = floating_ip_get_by_address(context,
address,
session=session)
floating_ip_ref.delete(session=session)
@require_context
def floating_ip_disassociate(context, address):
session = get_session()
with session.begin():
floating_ip_ref = floating_ip_get_by_address(context,
address,
session=session)
fixed_ip_ref = fixed_ip_get(context,
floating_ip_ref['fixed_ip_id'])
if fixed_ip_ref:
fixed_ip_address = fixed_ip_ref['address']
else:
fixed_ip_address = None
floating_ip_ref.fixed_ip_id = None
floating_ip_ref.host = None
floating_ip_ref.save(session=session)
return fixed_ip_address
@require_context
def floating_ip_set_auto_assigned(context, address):
session = get_session()
with session.begin():
floating_ip_ref = floating_ip_get_by_address(context,
address,
session=session)
floating_ip_ref.auto_assigned = True
floating_ip_ref.save(session=session)
def _floating_ip_get_all(context):
return model_query(context, models.FloatingIp, read_deleted="no")
@require_admin_context
def floating_ip_get_all(context):
floating_ip_refs = _floating_ip_get_all(context).all()
if not floating_ip_refs:
raise exception.NoFloatingIpsDefined()
return floating_ip_refs
@require_admin_context
def floating_ip_get_all_by_host(context, host):
floating_ip_refs = _floating_ip_get_all(context).\
filter_by(host=host).\
all()
if not floating_ip_refs:
raise exception.FloatingIpNotFoundForHost(host=host)
return floating_ip_refs
@require_context
def floating_ip_get_all_by_project(context, project_id):
authorize_project_context(context, project_id)
# TODO(tr3buchet): why do we not want auto_assigned floating IPs here?
return _floating_ip_get_all(context).\
filter_by(project_id=project_id).\
filter_by(auto_assigned=False).\
all()
@require_context
def floating_ip_get_by_address(context, address, session=None):
result = model_query(context, models.FloatingIp, session=session).\
filter_by(address=address).\
first()
if not result:
raise exception.FloatingIpNotFoundForAddress(address=address)
# If the floating IP has a project ID set, check to make sure
# the non-admin user has access.
if result.project_id and is_user_context(context):
authorize_project_context(context, result.project_id)
return result
@require_context
def floating_ip_get_by_fixed_address(context, fixed_address, session=None):
if not session:
session = get_session()
fixed_ip = fixed_ip_get_by_address(context, fixed_address, session)
fixed_ip_id = fixed_ip['id']
return model_query(context, models.FloatingIp, session=session).\
filter_by(fixed_ip_id=fixed_ip_id).\
all()
# NOTE(tr3buchet) please don't invent an exception here, empty list is fine
@require_context
def floating_ip_get_by_fixed_ip_id(context, fixed_ip_id, session=None):
if not session:
session = get_session()
return model_query(context, models.FloatingIp, session=session).\
filter_by(fixed_ip_id=fixed_ip_id).\
all()
@require_context
def floating_ip_update(context, address, values):
session = get_session()
with session.begin():
floating_ip_ref = floating_ip_get_by_address(context, address, session)
for (key, value) in values.iteritems():
floating_ip_ref[key] = value
floating_ip_ref.save(session=session)
@require_context
def _dnsdomain_get(context, session, fqdomain):
return model_query(context, models.DNSDomain,
session=session, read_deleted="no").\
filter_by(domain=fqdomain).\
with_lockmode('update').\
first()
@require_context
def dnsdomain_get(context, fqdomain):
session = get_session()
with session.begin():
return _dnsdomain_get(context, session, fqdomain)
@require_admin_context
def _dnsdomain_get_or_create(context, session, fqdomain):
domain_ref = _dnsdomain_get(context, session, fqdomain)
if not domain_ref:
dns_ref = models.DNSDomain()
dns_ref.update({'domain': fqdomain,
'availability_zone': None,
'project_id': None})
return dns_ref
return domain_ref
@require_admin_context
def dnsdomain_register_for_zone(context, fqdomain, zone):
session = get_session()
with session.begin():
domain_ref = _dnsdomain_get_or_create(context, session, fqdomain)
domain_ref.scope = 'private'
domain_ref.availability_zone = zone
domain_ref.save(session=session)
@require_admin_context
def dnsdomain_register_for_project(context, fqdomain, project):
session = get_session()
with session.begin():
domain_ref = _dnsdomain_get_or_create(context, session, fqdomain)
domain_ref.scope = 'public'
domain_ref.project_id = project
domain_ref.save(session=session)
@require_admin_context
def dnsdomain_unregister(context, fqdomain):
session = get_session()
with session.begin():
session.query(models.DNSDomain).\
filter_by(domain=fqdomain).\
delete()
@require_context
def dnsdomain_list(context):
session = get_session()
records = model_query(context, models.DNSDomain,
session=session, read_deleted="no").\
with_lockmode('update').all()
domains = []
for record in records:
domains.append(record.domain)
return domains
###################
@require_admin_context
def fixed_ip_associate(context, address, instance_id, network_id=None,
reserved=False):
"""Keyword arguments:
reserved -- should be a boolean value(True or False), exact value will be
used to filter on the fixed ip address
"""
session = get_session()
with session.begin():
network_or_none = or_(models.FixedIp.network_id == network_id,
models.FixedIp.network_id == None)
fixed_ip_ref = model_query(context, models.FixedIp, session=session,
read_deleted="no").\
filter(network_or_none).\
filter_by(reserved=reserved).\
filter_by(address=address).\
with_lockmode('update').\
first()
# NOTE(vish): if with_lockmode isn't supported, as in sqlite,
# then this has concurrency issues
if fixed_ip_ref is None:
raise exception.FixedIpNotFoundForNetwork(address=address,
network_id=network_id)
if fixed_ip_ref.instance_id:
raise exception.FixedIpAlreadyInUse(address=address)
if not fixed_ip_ref.network_id:
fixed_ip_ref.network_id = network_id
fixed_ip_ref.instance_id = instance_id
session.add(fixed_ip_ref)
return fixed_ip_ref['address']
@require_admin_context
def fixed_ip_associate_pool(context, network_id, instance_id=None, host=None):
session = get_session()
with session.begin():
network_or_none = or_(models.FixedIp.network_id == network_id,
models.FixedIp.network_id == None)
fixed_ip_ref = model_query(context, models.FixedIp, session=session,
read_deleted="no").\
filter(network_or_none).\
filter_by(reserved=False).\
filter_by(instance_id=None).\
filter_by(host=None).\
with_lockmode('update').\
first()
# NOTE(vish): if with_lockmode isn't supported, as in sqlite,
# then this has concurrency issues
if not fixed_ip_ref:
raise exception.NoMoreFixedIps()
if fixed_ip_ref['network_id'] is None:
fixed_ip_ref['network'] = network_id
if instance_id:
fixed_ip_ref['instance_id'] = instance_id
if host:
fixed_ip_ref['host'] = host
session.add(fixed_ip_ref)
return fixed_ip_ref['address']
@require_context
def fixed_ip_create(context, values):
fixed_ip_ref = models.FixedIp()
fixed_ip_ref.update(values)
fixed_ip_ref.save()
return fixed_ip_ref['address']
@require_context
def fixed_ip_bulk_create(context, ips):
session = get_session()
with session.begin():
for ip in ips:
model = models.FixedIp()
model.update(ip)
session.add(model)
@require_context
def fixed_ip_disassociate(context, address):
session = get_session()
with session.begin():
fixed_ip_ref = fixed_ip_get_by_address(context,
address,
session=session)
fixed_ip_ref['instance_id'] = None
fixed_ip_ref.save(session=session)
@require_admin_context
def fixed_ip_disassociate_all_by_timeout(context, host, time):
session = get_session()
# NOTE(vish): only update fixed ips that "belong" to this
# host; i.e. the network host or the instance
# host matches. Two queries necessary because
# join with update doesn't work.
host_filter = or_(and_(models.Instance.host == host,
models.Network.multi_host == True),
models.Network.host == host)
result = session.query(models.FixedIp.id).\
filter(models.FixedIp.deleted == False).\
filter(models.FixedIp.allocated == False).\
filter(models.FixedIp.updated_at < time).\
join((models.Network,
models.Network.id == models.FixedIp.network_id)).\
join((models.Instance,
models.Instance.id == models.FixedIp.instance_id)).\
filter(host_filter).\
all()
fixed_ip_ids = [fip[0] for fip in result]
if not fixed_ip_ids:
return 0
result = model_query(context, models.FixedIp, session=session).\
filter(models.FixedIp.id.in_(fixed_ip_ids)).\
update({'instance_id': None,
'leased': False,
'updated_at': utils.utcnow()},
synchronize_session='fetch')
return result
@require_context
def fixed_ip_get(context, id, session=None):
result = model_query(context, models.FixedIp, session=session).\
filter_by(id=id).\
first()
if not result:
raise exception.FixedIpNotFound(id=id)
# FIXME(sirp): shouldn't we just use project_only here to restrict the
# results?
if is_user_context(context) and result['instance_id'] is not None:
instance = instance_get(context, result['instance_id'], session)
authorize_project_context(context, instance.project_id)
return result
@require_admin_context
def fixed_ip_get_all(context, session=None):
result = model_query(context, models.FixedIp, session=session,
read_deleted="yes").\
all()
if not result:
raise exception.NoFixedIpsDefined()
return result
@require_context
def fixed_ip_get_by_address(context, address, session=None):
result = model_query(context, models.FixedIp, session=session,
read_deleted="yes").\
filter_by(address=address).\
first()
if not result:
raise exception.FixedIpNotFoundForAddress(address=address)
# NOTE(sirp): shouldn't we just use project_only here to restrict the
# results?
if is_user_context(context) and result['instance_id'] is not None:
instance = instance_get(context, result['instance_id'], session)
authorize_project_context(context, instance.project_id)
return result
@require_context
def fixed_ip_get_by_instance(context, instance_id):
result = model_query(context, models.FixedIp, read_deleted="no").\
filter_by(instance_id=instance_id).\
all()
if not result:
raise exception.FixedIpNotFoundForInstance(instance_id=instance_id)
return result
@require_context
def fixed_ip_get_by_network_host(context, network_id, host):
result = model_query(context, models.FixedIp, read_deleted="no").\
filter_by(network_id=network_id).\
filter_by(host=host).\
first()
if not result:
raise exception.FixedIpNotFoundForNetworkHost(network_id=network_id,
host=host)
return result
@require_context
def fixed_ips_by_virtual_interface(context, vif_id):
result = model_query(context, models.FixedIp, read_deleted="no").\
filter_by(virtual_interface_id=vif_id).\
all()
return result
@require_admin_context
def fixed_ip_get_network(context, address):
fixed_ip_ref = fixed_ip_get_by_address(context, address)
return fixed_ip_ref.network
@require_context
def fixed_ip_update(context, address, values):
session = get_session()
with session.begin():
fixed_ip_ref = fixed_ip_get_by_address(context,
address,
session=session)
fixed_ip_ref.update(values)
fixed_ip_ref.save(session=session)
###################
@require_context
def virtual_interface_create(context, values):
"""Create a new virtual interface record in the database.
:param values: = dict containing column values
"""
try:
vif_ref = models.VirtualInterface()
vif_ref.update(values)
vif_ref.save()
except IntegrityError:
raise exception.VirtualInterfaceCreateException()
return vif_ref
@require_context
def _virtual_interface_query(context, session=None):
return model_query(context, models.VirtualInterface, session=session,
read_deleted="yes")
@require_context
def virtual_interface_get(context, vif_id, session=None):
"""Gets a virtual interface from the table.
:param vif_id: = id of the virtual interface
"""
vif_ref = _virtual_interface_query(context, session=session).\
filter_by(id=vif_id).\
first()
return vif_ref
@require_context
def virtual_interface_get_by_address(context, address):
"""Gets a virtual interface from the table.
:param address: = the address of the interface you're looking to get
"""
vif_ref = _virtual_interface_query(context).\
filter_by(address=address).\
first()
return vif_ref
@require_context
def virtual_interface_get_by_uuid(context, vif_uuid):
"""Gets a virtual interface from the table.
:param vif_uuid: the uuid of the interface you're looking to get
"""
vif_ref = _virtual_interface_query(context).\
filter_by(uuid=vif_uuid).\
first()
return vif_ref
@require_context
@require_instance_exists
def virtual_interface_get_by_instance(context, instance_id):
"""Gets all virtual interfaces for instance.
:param instance_id: = id of the instance to retrieve vifs for
"""
vif_refs = _virtual_interface_query(context).\
filter_by(instance_id=instance_id).\
all()
return vif_refs
@require_context
def virtual_interface_get_by_instance_and_network(context, instance_id,
network_id):
"""Gets virtual interface for instance that's associated with network."""
vif_ref = _virtual_interface_query(context).\
filter_by(instance_id=instance_id).\
filter_by(network_id=network_id).\
first()
return vif_ref
@require_context
def virtual_interface_delete(context, vif_id):
"""Delete virtual interface record from the database.
:param vif_id: = id of vif to delete
"""
session = get_session()
vif_ref = virtual_interface_get(context, vif_id, session)
with session.begin():
session.delete(vif_ref)
@require_context
def virtual_interface_delete_by_instance(context, instance_id):
"""Delete virtual interface records that are associated
with the instance given by instance_id.
:param instance_id: = id of instance
"""
vif_refs = virtual_interface_get_by_instance(context, instance_id)
for vif_ref in vif_refs:
virtual_interface_delete(context, vif_ref['id'])
@require_context
def virtual_interface_get_all(context):
"""Get all vifs"""
vif_refs = _virtual_interface_query(context).all()
return vif_refs
###################
def _metadata_refs(metadata_dict, meta_class):
metadata_refs = []
if metadata_dict:
for k, v in metadata_dict.iteritems():
metadata_ref = meta_class()
metadata_ref['key'] = k
metadata_ref['value'] = v
metadata_refs.append(metadata_ref)
return metadata_refs
@require_context
def instance_create(context, values):
"""Create a new Instance record in the database.
context - request context object
values - dict containing column values.
"""
values = values.copy()
values['metadata'] = _metadata_refs(values.get('metadata'),
models.InstanceMetadata)
instance_ref = models.Instance()
if not values.get('uuid'):
values['uuid'] = str(utils.gen_uuid())
instance_ref.update(values)
session = get_session()
with session.begin():
instance_ref.save(session=session)
# and creat the info_cache table entry for instance
instance_info_cache_create(context, {'instance_id': instance_ref['uuid']})
return instance_ref
@require_admin_context
def instance_data_get_for_project(context, project_id):
result = model_query(context,
func.count(models.Instance.id),
func.sum(models.Instance.vcpus),
func.sum(models.Instance.memory_mb),
read_deleted="no").\
filter_by(project_id=project_id).\
first()
# NOTE(vish): convert None to 0
return (result[0] or 0, result[1] or 0, result[2] or 0)
@require_context
def instance_destroy(context, instance_id):
session = get_session()
with session.begin():
if utils.is_uuid_like(instance_id):
instance_ref = instance_get_by_uuid(context, instance_id,
session=session)
instance_id = instance_ref['id']
else:
instance_ref = instance_get(context, instance_id,
session=session)
session.query(models.Instance).\
filter_by(id=instance_id).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
session.query(models.SecurityGroupInstanceAssociation).\
filter_by(instance_id=instance_id).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
session.query(models.InstanceMetadata).\
filter_by(instance_id=instance_id).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
session.query(models.BlockDeviceMapping).\
filter_by(instance_id=instance_id).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
instance_info_cache_delete(context, instance_ref['uuid'],
session=session)
return instance_ref
@require_context
def instance_get_by_uuid(context, uuid, session=None):
result = _build_instance_get(context, session=session).\
filter_by(uuid=uuid).\
first()
if not result:
raise exception.InstanceNotFound(instance_id=uuid)
return result
@require_context
def instance_get(context, instance_id, session=None):
result = _build_instance_get(context, session=session).\
filter_by(id=instance_id).\
first()
if not result:
raise exception.InstanceNotFound(instance_id=instance_id)
return result
@require_context
def _build_instance_get(context, session=None):
return model_query(context, models.Instance, session=session,
project_only=True).\
options(joinedload_all('security_groups.rules')).\
options(joinedload('info_cache')).\
options(joinedload('volumes')).\
options(joinedload('metadata')).\
options(joinedload('instance_type'))
@require_admin_context
def instance_get_all(context):
return model_query(context, models.Instance).\
options(joinedload('info_cache')).\
options(joinedload('security_groups')).\
options(joinedload('metadata')).\
options(joinedload('instance_type')).\
all()
@require_context
def instance_get_all_by_filters(context, filters, sort_key, sort_dir):
"""Return instances that match all filters. Deleted instances
will be returned by default, unless there's a filter that says
otherwise"""
def _regexp_filter_by_metadata(instance, meta):
inst_metadata = [{node['key']: node['value']}
for node in instance['metadata']]
if isinstance(meta, list):
for node in meta:
if node not in inst_metadata:
return False
elif isinstance(meta, dict):
for k, v in meta.iteritems():
if {k: v} not in inst_metadata:
return False
return True
def _regexp_filter_by_column(instance, filter_name, filter_re):
try:
v = getattr(instance, filter_name)
except AttributeError:
return True
if v and filter_re.match(str(v)):
return True
return False
sort_fn = {'desc': desc, 'asc': asc}
session = get_session()
query_prefix = session.query(models.Instance).\
options(joinedload('info_cache')).\
options(joinedload('security_groups')).\
options(joinedload('metadata')).\
options(joinedload('instance_type')).\
order_by(sort_fn[sort_dir](getattr(models.Instance, sort_key)))
# Make a copy of the filters dictionary to use going forward, as we'll
# be modifying it and we shouldn't affect the caller's use of it.
filters = filters.copy()
if 'changes-since' in filters:
changes_since = utils.normalize_time(filters['changes-since'])
query_prefix = query_prefix.\
filter(models.Instance.updated_at > changes_since)
if 'deleted' in filters:
# Instances can be soft or hard deleted and the query needs to
# include or exclude both
if filters.pop('deleted'):
deleted = or_(models.Instance.deleted == True,
models.Instance.vm_state == vm_states.SOFT_DELETE)
query_prefix = query_prefix.filter(deleted)
else:
query_prefix = query_prefix.\
filter_by(deleted=False).\
filter(models.Instance.vm_state != vm_states.SOFT_DELETE)
if not context.is_admin:
# If we're not admin context, add appropriate filter..
if context.project_id:
filters['project_id'] = context.project_id
else:
filters['user_id'] = context.user_id
# Filters for exact matches that we can do along with the SQL query...
# For other filters that don't match this, we will do regexp matching
exact_match_filter_names = ['project_id', 'user_id', 'image_ref',
'vm_state', 'instance_type_id', 'uuid']
# Filter the query
query_prefix = exact_filter(query_prefix, models.Instance,
filters, exact_match_filter_names)
instances = query_prefix.all()
if not instances:
return []
# Now filter on everything else for regexp matching..
# For filters not in the list, we'll attempt to use the filter_name
# as a column name in Instance..
regexp_filter_funcs = {}
for filter_name in filters.iterkeys():
filter_func = regexp_filter_funcs.get(filter_name, None)
filter_re = re.compile(str(filters[filter_name]))
if filter_func:
filter_l = lambda instance: filter_func(instance, filter_re)
elif filter_name == 'metadata':
filter_l = lambda instance: _regexp_filter_by_metadata(instance,
filters[filter_name])
else:
filter_l = lambda instance: _regexp_filter_by_column(instance,
filter_name, filter_re)
instances = filter(filter_l, instances)
if not instances:
break
return instances
@require_context
def instance_get_active_by_window(context, begin, end=None, project_id=None):
"""Return instances that were active during window."""
session = get_session()
query = session.query(models.Instance)
query = query.filter(or_(models.Instance.terminated_at == None,
models.Instance.terminated_at > begin))
if end:
query = query.filter(models.Instance.launched_at < end)
if project_id:
query = query.filter_by(project_id=project_id)
return query.all()
@require_admin_context
def instance_get_active_by_window_joined(context, begin, end=None,
project_id=None):
"""Return instances and joins that were active during window."""
session = get_session()
query = session.query(models.Instance)
query = query.options(joinedload('info_cache')).\
options(joinedload('security_groups')).\
options(joinedload('metadata')).\
options(joinedload('instance_type')).\
filter(or_(models.Instance.terminated_at == None,
models.Instance.terminated_at > begin))
if end:
query = query.filter(models.Instance.launched_at < end)
if project_id:
query = query.filter_by(project_id=project_id)
return query.all()
@require_admin_context
def _instance_get_all_query(context, project_only=False):
return model_query(context, models.Instance, project_only=project_only).\
options(joinedload('info_cache')).\
options(joinedload('security_groups')).\
options(joinedload('metadata')).\
options(joinedload('instance_type'))
@require_admin_context
def instance_get_all_by_host(context, host):
return _instance_get_all_query(context).filter_by(host=host).all()
@require_context
def instance_get_all_by_project(context, project_id):
authorize_project_context(context, project_id)
return _instance_get_all_query(context).\
filter_by(project_id=project_id).\
all()
@require_context
def instance_get_all_by_reservation(context, reservation_id):
return _instance_get_all_query(context, project_only=True).\
filter_by(reservation_id=reservation_id).\
all()
# NOTE(jkoelker) This is only being left here for compat with floating
# ips. Currently the network_api doesn't return floaters
# in network_info. Once it starts return the model. This
# function and it's call in compute/manager.py on 1829 can
# go away
@require_context
def instance_get_floating_address(context, instance_id):
fixed_ips = fixed_ip_get_by_instance(context, instance_id)
if not fixed_ips:
return None
# NOTE(tr3buchet): this only gets the first fixed_ip
# won't find floating ips associated with other fixed_ips
floating_ips = floating_ip_get_by_fixed_address(context,
fixed_ips[0]['address'])
if not floating_ips:
return None
# NOTE(vish): this just returns the first floating ip
return floating_ips[0]['address']
@require_admin_context
def instance_get_all_hung_in_rebooting(context, reboot_window, session=None):
reboot_window = datetime.datetime.utcnow() - datetime.timedelta(
seconds=reboot_window)
if not session:
session = get_session()
results = session.query(models.Instance).\
filter(models.Instance.updated_at <= reboot_window).\
filter_by(task_state="rebooting").all()
return results
@require_context
def instance_test_and_set(context, instance_id, attr, ok_states,
new_state, session=None):
"""Atomically check if an instance is in a valid state, and if it is, set
the instance into a new state.
"""
if not session:
session = get_session()
with session.begin():
query = model_query(context, models.Instance, session=session,
project_only=True)
if utils.is_uuid_like(instance_id):
query = query.filter_by(uuid=instance_id)
else:
query = query.filter_by(id=instance_id)
# NOTE(vish): if with_lockmode isn't supported, as in sqlite,
# then this has concurrency issues
instance = query.with_lockmode('update').first()
state = instance[attr]
if state not in ok_states:
raise exception.InstanceInvalidState(
attr=attr,
instance_uuid=instance['uuid'],
state=state,
method='instance_test_and_set')
instance[attr] = new_state
instance.save(session=session)
@require_context
def instance_update(context, instance_id, values):
session = get_session()
if utils.is_uuid_like(instance_id):
instance_ref = instance_get_by_uuid(context, instance_id,
session=session)
else:
instance_ref = instance_get(context, instance_id, session=session)
metadata = values.get('metadata')
if metadata is not None:
instance_metadata_update(context,
instance_ref['id'],
values.pop('metadata'),
delete=True)
with session.begin():
instance_ref.update(values)
instance_ref.save(session=session)
return instance_ref
def instance_add_security_group(context, instance_uuid, security_group_id):
"""Associate the given security group with the given instance"""
session = get_session()
with session.begin():
instance_ref = instance_get_by_uuid(context, instance_uuid,
session=session)
security_group_ref = security_group_get(context,
security_group_id,
session=session)
instance_ref.security_groups += [security_group_ref]
instance_ref.save(session=session)
@require_context
def instance_remove_security_group(context, instance_uuid, security_group_id):
"""Disassociate the given security group from the given instance"""
session = get_session()
instance_ref = instance_get_by_uuid(context, instance_uuid,
session=session)
session.query(models.SecurityGroupInstanceAssociation).\
filter_by(instance_id=instance_ref['id']).\
filter_by(security_group_id=security_group_id).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
@require_context
def instance_action_create(context, values):
"""Create an instance action from the values dictionary."""
action_ref = models.InstanceActions()
action_ref.update(values)
session = get_session()
with session.begin():
action_ref.save(session=session)
return action_ref
@require_admin_context
def instance_get_actions(context, instance_uuid):
"""Return the actions associated to the given instance id"""
session = get_session()
return session.query(models.InstanceActions).\
filter_by(instance_uuid=instance_uuid).\
all()
@require_context
def instance_get_id_to_uuid_mapping(context, ids):
session = get_session()
instances = session.query(models.Instance).\
filter(models.Instance.id.in_(ids)).\
all()
mapping = {}
for instance in instances:
mapping[instance['id']] = instance['uuid']
return mapping
###################
@require_context
def instance_info_cache_create(context, values):
"""Create a new instance cache record in the table.
:param context: = request context object
:param values: = dict containing column values
"""
info_cache = models.InstanceInfoCache()
info_cache.update(values)
session = get_session()
with session.begin():
info_cache.save(session=session)
return info_cache
@require_context
def instance_info_cache_get(context, instance_uuid, session=None):
"""Gets an instance info cache from the table.
:param instance_uuid: = uuid of the info cache's instance
:param session: = optional session object
"""
session = session or get_session()
info_cache = session.query(models.InstanceInfoCache).\
filter_by(instance_id=instance_uuid).\
first()
return info_cache
@require_context
def instance_info_cache_update(context, instance_uuid, values,
session=None):
"""Update an instance info cache record in the table.
:param instance_uuid: = uuid of info cache's instance
:param values: = dict containing column values to update
:param session: = optional session object
"""
session = session or get_session()
info_cache = instance_info_cache_get(context, instance_uuid,
session=session)
if info_cache:
info_cache.update(values)
info_cache.save(session=session)
else:
# NOTE(tr3buchet): just in case someone blows away an instance's
# cache entry
values['instance_id'] = instance_uuid
info_cache = instance_info_cache_create(context, values)
return info_cache
@require_context
def instance_info_cache_delete(context, instance_uuid, session=None):
"""Deletes an existing instance_info_cache record
:param instance_uuid: = uuid of the instance tied to the cache record
:param session: = optional session object
"""
values = {'deleted': True,
'deleted_at': utils.utcnow()}
instance_info_cache_update(context, instance_uuid, values, session)
###################
@require_context
def key_pair_create(context, values):
key_pair_ref = models.KeyPair()
key_pair_ref.update(values)
key_pair_ref.save()
return key_pair_ref
@require_context
def key_pair_destroy(context, user_id, name):
authorize_user_context(context, user_id)
session = get_session()
with session.begin():
key_pair_ref = key_pair_get(context, user_id, name, session=session)
key_pair_ref.delete(session=session)
@require_context
def key_pair_destroy_all_by_user(context, user_id):
authorize_user_context(context, user_id)
session = get_session()
with session.begin():
session.query(models.KeyPair).\
filter_by(user_id=user_id).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
@require_context
def key_pair_get(context, user_id, name, session=None):
authorize_user_context(context, user_id)
result = model_query(context, models.KeyPair, session=session).\
filter_by(user_id=user_id).\
filter_by(name=name).\
first()
if not result:
raise exception.KeypairNotFound(user_id=user_id, name=name)
return result
@require_context
def key_pair_get_all_by_user(context, user_id):
authorize_user_context(context, user_id)
return model_query(context, models.KeyPair, read_deleted="no").\
filter_by(user_id=user_id).\
all()
###################
@require_admin_context
def network_associate(context, project_id, force=False):
"""Associate a project with a network.
called by project_get_networks under certain conditions
and network manager add_network_to_project()
only associate if the project doesn't already have a network
or if force is True
force solves race condition where a fresh project has multiple instance
builds simultaneously picked up by multiple network hosts which attempt
to associate the project with multiple networks
force should only be used as a direct consequence of user request
all automated requests should not use force
"""
session = get_session()
with session.begin():
def network_query(project_filter):
return model_query(context, models.Network, session=session,
read_deleted="no").\
filter_by(project_id=project_filter).\
with_lockmode('update').\
first()
if not force:
# find out if project has a network
network_ref = network_query(project_id)
if force or not network_ref:
# in force mode or project doesn't have a network so associate
# with a new network
# get new network
network_ref = network_query(None)
if not network_ref:
raise db.NoMoreNetworks()
# associate with network
# NOTE(vish): if with_lockmode isn't supported, as in sqlite,
# then this has concurrency issues
network_ref['project_id'] = project_id
session.add(network_ref)
return network_ref
@require_admin_context
def network_count(context):
return model_query(context, models.Network).count()
@require_admin_context
def _network_ips_query(context, network_id):
return model_query(context, models.FixedIp, read_deleted="no").\
filter_by(network_id=network_id)
@require_admin_context
def network_count_reserved_ips(context, network_id):
return _network_ips_query(context, network_id).\
filter_by(reserved=True).\
count()
@require_admin_context
def network_create_safe(context, values):
if values.get('vlan'):
if model_query(context, models.Network, read_deleted="no")\
.filter_by(vlan=values['vlan'])\
.first():
raise exception.DuplicateVlan(vlan=values['vlan'])
network_ref = models.Network()
network_ref['uuid'] = str(utils.gen_uuid())
network_ref.update(values)
try:
network_ref.save()
return network_ref
except IntegrityError:
return None
@require_admin_context
def network_delete_safe(context, network_id):
session = get_session()
with session.begin():
result = session.query(models.FixedIp).\
filter_by(network_id=network_id).\
filter_by(deleted=False).\
filter_by(allocated=True).\
all()
if result:
raise exception.NetworkInUse(network_id=network_id)
network_ref = network_get(context, network_id=network_id,
session=session)
session.query(models.FixedIp).\
filter_by(network_id=network_id).\
filter_by(deleted=False).\
update({'deleted': True,
'updated_at': literal_column('updated_at'),
'deleted_at': utils.utcnow()})
session.delete(network_ref)
@require_admin_context
def network_disassociate(context, network_id):
network_update(context, network_id, {'project_id': None,
'host': None})
@require_context
def network_get(context, network_id, session=None):
result = model_query(context, models.Network, session=session,
project_only=True).\
filter_by(id=network_id).\
first()
if not result:
raise exception.NetworkNotFound(network_id=network_id)
return result
@require_admin_context
def network_get_all(context):
result = model_query(context, models.Network, read_deleted="no").all()
if not result:
raise exception.NoNetworksFound()
return result
@require_admin_context
def network_get_all_by_uuids(context, network_uuids, project_id=None):
project_or_none = or_(models.Network.project_id == project_id,
models.Network.project_id == None)
result = model_query(context, models.Network, read_deleted="no").\
filter(models.Network.uuid.in_(network_uuids)).\
filter(project_or_none).\
all()
if not result:
raise exception.NoNetworksFound()
#check if host is set to all of the networks
# returned in the result
for network in result:
if network['host'] is None:
raise exception.NetworkHostNotSet(network_id=network['id'])
#check if the result contains all the networks
#we are looking for
for network_uuid in network_uuids:
found = False
for network in result:
if network['uuid'] == network_uuid:
found = True
break
if not found:
if project_id:
raise exception.NetworkNotFoundForProject(
network_uuid=network_uuid, project_id=context.project_id)
raise exception.NetworkNotFound(network_id=network_uuid)
return result
# NOTE(vish): pylint complains because of the long method name, but
# it fits with the names of the rest of the methods
# pylint: disable=C0103
@require_admin_context
def network_get_associated_fixed_ips(context, network_id, host=None):
# FIXME(sirp): since this returns fixed_ips, this would be better named
# fixed_ip_get_all_by_network.
# NOTE(vish): The ugly joins here are to solve a performance issue and
# should be removed once we can add and remove leases
# without regenerating the whole list
vif_and = and_(models.VirtualInterface.id ==
models.FixedIp.virtual_interface_id,
models.VirtualInterface.deleted == False)
inst_and = and_(models.Instance.id == models.FixedIp.instance_id,
models.Instance.deleted == False)
session = get_session()
query = session.query(models.FixedIp.address,
models.FixedIp.instance_id,
models.FixedIp.network_id,
models.FixedIp.virtual_interface_id,
models.VirtualInterface.address,
models.Instance.hostname,
models.Instance.updated_at,
models.Instance.created_at).\
filter(models.FixedIp.deleted == False).\
filter(models.FixedIp.network_id == network_id).\
filter(models.FixedIp.allocated == True).\
join((models.VirtualInterface, vif_and)).\
join((models.Instance, inst_and)).\
filter(models.FixedIp.instance_id != None).\
filter(models.FixedIp.virtual_interface_id != None)
if host:
query = query.filter(models.Instance.host == host)
result = query.all()
data = []
for datum in result:
cleaned = {}
cleaned['address'] = datum[0]
cleaned['instance_id'] = datum[1]
cleaned['network_id'] = datum[2]
cleaned['vif_id'] = datum[3]
cleaned['vif_address'] = datum[4]
cleaned['instance_hostname'] = datum[5]
cleaned['instance_updated'] = datum[6]
cleaned['instance_created'] = datum[7]
data.append(cleaned)
return data
@require_admin_context
def _network_get_query(context, session=None):
return model_query(context, models.Network, session=session,
read_deleted="no")
@require_admin_context
def network_get_by_bridge(context, bridge):
result = _network_get_query(context).filter_by(bridge=bridge).first()
if not result:
raise exception.NetworkNotFoundForBridge(bridge=bridge)
return result
@require_admin_context
def network_get_by_uuid(context, uuid):
result = _network_get_query(context).filter_by(uuid=uuid).first()
if not result:
raise exception.NetworkNotFoundForUUID(uuid=uuid)
return result
@require_admin_context
def network_get_by_cidr(context, cidr):
result = _network_get_query(context).\
filter(or_(models.Network.cidr == cidr,
models.Network.cidr_v6 == cidr)).\
first()
if not result:
raise exception.NetworkNotFoundForCidr(cidr=cidr)
return result
@require_admin_context
def network_get_by_instance(context, instance_id):
# note this uses fixed IP to get to instance
# only works for networks the instance has an IP from
result = _network_get_query(context).\
filter_by(instance_id=instance_id).\
first()
if not result:
raise exception.NetworkNotFoundForInstance(instance_id=instance_id)
return result
@require_admin_context
def network_get_all_by_instance(context, instance_id):
result = _network_get_query(context).\
filter_by(instance_id=instance_id).\
all()
if not result:
raise exception.NetworkNotFoundForInstance(instance_id=instance_id)
return result
@require_admin_context
def network_get_all_by_host(context, host):
session = get_session()
fixed_ip_query = model_query(context, models.FixedIp.network_id,
session=session).\
filter(models.FixedIp.host == host)
# NOTE(vish): return networks that have host set
# or that have a fixed ip with host set
host_filter = or_(models.Network.host == host,
models.Network.id.in_(fixed_ip_query.subquery()))
return _network_get_query(context, session=session).\
filter(host_filter).\
all()
@require_admin_context
def network_set_host(context, network_id, host_id):
session = get_session()
with session.begin():
network_ref = _network_get_query(context, session=session).\
filter_by(id=network_id).\
with_lockmode('update').\
first()
if not network_ref:
raise exception.NetworkNotFound(network_id=network_id)
# NOTE(vish): if with_lockmode isn't supported, as in sqlite,
# then this has concurrency issues
if not network_ref['host']:
network_ref['host'] = host_id
session.add(network_ref)
return network_ref['host']
@require_context
def network_update(context, network_id, values):
session = get_session()
with session.begin():
network_ref = network_get(context, network_id, session=session)
network_ref.update(values)
network_ref.save(session=session)
return network_ref
###################
def queue_get_for(context, topic, physical_node_id):
# FIXME(ja): this should be servername?
return "%s.%s" % (topic, physical_node_id)
###################
@require_admin_context
def iscsi_target_count_by_host(context, host):
return model_query(context, models.IscsiTarget).\
filter_by(host=host).\
count()
@require_admin_context
def iscsi_target_create_safe(context, values):
iscsi_target_ref = models.IscsiTarget()
for (key, value) in values.iteritems():
iscsi_target_ref[key] = value
try:
iscsi_target_ref.save()
return iscsi_target_ref
except IntegrityError:
return None
###################
@require_admin_context
def auth_token_destroy(context, token_id):
session = get_session()
with session.begin():
token_ref = auth_token_get(context, token_id, session=session)
token_ref.delete(session=session)
@require_admin_context
def auth_token_get(context, token_hash, session=None):
result = model_query(context, models.AuthToken, session=session).\
filter_by(token_hash=token_hash).\
first()
if not result:
raise exception.AuthTokenNotFound(token=token_hash)
return result
@require_admin_context
def auth_token_update(context, token_hash, values):
session = get_session()
with session.begin():
token_ref = auth_token_get(context, token_hash, session=session)
token_ref.update(values)
token_ref.save(session=session)
@require_admin_context
def auth_token_create(context, token):
tk = models.AuthToken()
tk.update(token)
tk.save()
return tk
###################
@require_context
def quota_get(context, project_id, resource, session=None):
result = model_query(context, models.Quota, session=session,
read_deleted="no").\
filter_by(project_id=project_id).\
filter_by(resource=resource).\
first()
if not result:
raise exception.ProjectQuotaNotFound(project_id=project_id)
return result
@require_context
def quota_get_all_by_project(context, project_id):
authorize_project_context(context, project_id)
rows = model_query(context, models.Quota, read_deleted="no").\
filter_by(project_id=project_id).\
all()
result = {'project_id': project_id}
for row in rows:
result[row.resource] = row.hard_limit
return result
@require_admin_context
def quota_create(context, project_id, resource, limit):
# NOTE: Treat -1 as unlimited for consistency w/ flags
if limit == -1:
limit = None
quota_ref = models.Quota()
quota_ref.project_id = project_id
quota_ref.resource = resource
quota_ref.hard_limit = limit
quota_ref.save()
return quota_ref
@require_admin_context
def quota_update(context, project_id, resource, limit):
# NOTE: Treat -1 as unlimited for consistency w/ flags
if limit == -1:
limit = None
session = get_session()
with session.begin():
quota_ref = quota_get(context, project_id, resource, session=session)
quota_ref.hard_limit = limit
quota_ref.save(session=session)
@require_admin_context
def quota_destroy(context, project_id, resource):
session = get_session()
with session.begin():
quota_ref = quota_get(context, project_id, resource, session=session)
quota_ref.delete(session=session)
@require_admin_context
def quota_destroy_all_by_project(context, project_id):
session = get_session()
with session.begin():
quotas = model_query(context, models.Quota, session=session,
read_deleted="no").\
filter_by(project_id=project_id).\
all()
for quota_ref in quotas:
quota_ref.delete(session=session)
###################
@require_admin_context
def volume_allocate_iscsi_target(context, volume_id, host):
session = get_session()
with session.begin():
iscsi_target_ref = model_query(context, models.IscsiTarget,
session=session, read_deleted="no").\
filter_by(volume=None).\
filter_by(host=host).\
with_lockmode('update').\
first()
# NOTE(vish): if with_lockmode isn't supported, as in sqlite,
# then this has concurrency issues
if not iscsi_target_ref:
raise db.NoMoreTargets()
iscsi_target_ref.volume_id = volume_id
session.add(iscsi_target_ref)
return iscsi_target_ref.target_num
@require_admin_context
def volume_attached(context, volume_id, instance_id, mountpoint):
session = get_session()
with session.begin():
volume_ref = volume_get(context, volume_id, session=session)
volume_ref['status'] = 'in-use'
volume_ref['mountpoint'] = mountpoint
volume_ref['attach_status'] = 'attached'
volume_ref.instance = instance_get(context, instance_id,
session=session)
volume_ref.save(session=session)
@require_context
def volume_create(context, values):
values['volume_metadata'] = _metadata_refs(values.get('metadata'),
models.VolumeMetadata)
volume_ref = models.Volume()
volume_ref.update(values)
session = get_session()
with session.begin():
volume_ref.save(session=session)
return volume_ref
@require_admin_context
def volume_data_get_for_project(context, project_id):
result = model_query(context,
func.count(models.Volume.id),
func.sum(models.Volume.size),
read_deleted="no").\
filter_by(project_id=project_id).\
first()
# NOTE(vish): convert None to 0
return (result[0] or 0, result[1] or 0)
@require_admin_context
def volume_destroy(context, volume_id):
session = get_session()
with session.begin():
session.query(models.Volume).\
filter_by(id=volume_id).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
session.query(models.IscsiTarget).\
filter_by(volume_id=volume_id).\
update({'volume_id': None})
session.query(models.VolumeMetadata).\
filter_by(volume_id=volume_id).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
@require_admin_context
def volume_detached(context, volume_id):
session = get_session()
with session.begin():
volume_ref = volume_get(context, volume_id, session=session)
volume_ref['status'] = 'available'
volume_ref['mountpoint'] = None
volume_ref['attach_status'] = 'detached'
volume_ref.instance = None
volume_ref.save(session=session)
@require_context
def _volume_get_query(context, session=None, project_only=False):
return model_query(context, models.Volume, session=session,
project_only=project_only).\
options(joinedload('instance')).\
options(joinedload('volume_metadata')).\
options(joinedload('volume_type'))
@require_context
def volume_get(context, volume_id, session=None):
result = _volume_get_query(context, session=session, project_only=True).\
filter_by(id=volume_id).\
first()
if not result:
raise exception.VolumeNotFound(volume_id=volume_id)
return result
@require_admin_context
def volume_get_all(context):
return _volume_get_query(context).all()
@require_admin_context
def volume_get_all_by_host(context, host):
return _volume_get_query(context).filter_by(host=host).all()
@require_admin_context
def volume_get_all_by_instance(context, instance_id):
result = model_query(context, models.Volume, read_deleted="no").\
options(joinedload('volume_metadata')).\
options(joinedload('volume_type')).\
filter_by(instance_id=instance_id).\
all()
if not result:
raise exception.VolumeNotFoundForInstance(instance_id=instance_id)
return result
@require_context
def volume_get_all_by_project(context, project_id):
authorize_project_context(context, project_id)
return _volume_get_query(context).filter_by(project_id=project_id).all()
@require_admin_context
def volume_get_instance(context, volume_id):
result = _volume_get_query(context).filter_by(id=volume_id).first()
if not result:
raise exception.VolumeNotFound(volume_id=volume_id)
return result.instance
@require_admin_context
def volume_get_iscsi_target_num(context, volume_id):
result = model_query(context, models.IscsiTarget, read_deleted="yes").\
filter_by(volume_id=volume_id).\
first()
if not result:
raise exception.ISCSITargetNotFoundForVolume(volume_id=volume_id)
return result.target_num
@require_context
def volume_update(context, volume_id, values):
session = get_session()
metadata = values.get('metadata')
if metadata is not None:
volume_metadata_update(context,
volume_id,
values.pop('metadata'),
delete=True)
with session.begin():
volume_ref = volume_get(context, volume_id, session=session)
volume_ref.update(values)
volume_ref.save(session=session)
####################
def _volume_metadata_get_query(context, volume_id, session=None):
return model_query(context, models.VolumeMetadata,
session=session, read_deleted="no").\
filter_by(volume_id=volume_id)
@require_context
@require_volume_exists
def volume_metadata_get(context, volume_id):
rows = _volume_metadata_get_query(context, volume_id).all()
result = {}
for row in rows:
result[row['key']] = row['value']
return result
@require_context
@require_volume_exists
def volume_metadata_delete(context, volume_id, key):
_volume_metadata_get_query(context, volume_id).\
filter_by(key=key).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
@require_context
@require_volume_exists
def volume_metadata_get_item(context, volume_id, key, session=None):
result = _volume_metadata_get_query(context, volume_id, session=session).\
filter_by(key=key).\
first()
if not result:
raise exception.VolumeMetadataNotFound(metadata_key=key,
volume_id=volume_id)
return result
@require_context
@require_volume_exists
def volume_metadata_update(context, volume_id, metadata, delete):
session = get_session()
# Set existing metadata to deleted if delete argument is True
if delete:
original_metadata = volume_metadata_get(context, volume_id)
for meta_key, meta_value in original_metadata.iteritems():
if meta_key not in metadata:
meta_ref = volume_metadata_get_item(context, volume_id,
meta_key, session)
meta_ref.update({'deleted': True})
meta_ref.save(session=session)
meta_ref = None
# Now update all existing items with new values, or create new meta objects
for meta_key, meta_value in metadata.iteritems():
# update the value whether it exists or not
item = {"value": meta_value}
try:
meta_ref = volume_metadata_get_item(context, volume_id,
meta_key, session)
except exception.VolumeMetadataNotFound, e:
meta_ref = models.VolumeMetadata()
item.update({"key": meta_key, "volume_id": volume_id})
meta_ref.update(item)
meta_ref.save(session=session)
return metadata
###################
@require_context
def snapshot_create(context, values):
snapshot_ref = models.Snapshot()
snapshot_ref.update(values)
session = get_session()
with session.begin():
snapshot_ref.save(session=session)
return snapshot_ref
@require_admin_context
def snapshot_destroy(context, snapshot_id):
session = get_session()
with session.begin():
session.query(models.Snapshot).\
filter_by(id=snapshot_id).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
@require_context
def snapshot_get(context, snapshot_id, session=None):
result = model_query(context, models.Snapshot, session=session,
project_only=True).\
filter_by(id=snapshot_id).\
first()
if not result:
raise exception.SnapshotNotFound(snapshot_id=snapshot_id)
return result
@require_admin_context
def snapshot_get_all(context):
return model_query(context, models.Snapshot).all()
@require_context
def snapshot_get_all_for_volume(context, volume_id):
return model_query(context, models.Snapshot, read_deleted='no',
project_only=True).\
filter_by(volume_id=volume_id).all()
@require_context
def snapshot_get_all_by_project(context, project_id):
authorize_project_context(context, project_id)
return model_query(context, models.Snapshot).\
filter_by(project_id=project_id).\
all()
@require_context
def snapshot_update(context, snapshot_id, values):
session = get_session()
with session.begin():
snapshot_ref = snapshot_get(context, snapshot_id, session=session)
snapshot_ref.update(values)
snapshot_ref.save(session=session)
###################
def _block_device_mapping_get_query(context, session=None):
return model_query(context, models.BlockDeviceMapping, session=session,
read_deleted="no")
@require_context
def block_device_mapping_create(context, values):
bdm_ref = models.BlockDeviceMapping()
bdm_ref.update(values)
session = get_session()
with session.begin():
bdm_ref.save(session=session)
@require_context
def block_device_mapping_update(context, bdm_id, values):
session = get_session()
with session.begin():
_block_device_mapping_get_query(context, session=session).\
filter_by(id=bdm_id).\
update(values)
@require_context
def block_device_mapping_update_or_create(context, values):
session = get_session()
with session.begin():
result = _block_device_mapping_get_query(context, session=session).\
filter_by(instance_id=values['instance_id']).\
filter_by(device_name=values['device_name']).\
first()
if not result:
bdm_ref = models.BlockDeviceMapping()
bdm_ref.update(values)
bdm_ref.save(session=session)
else:
result.update(values)
# NOTE(yamahata): same virtual device name can be specified multiple
# times. So delete the existing ones.
virtual_name = values['virtual_name']
if (virtual_name is not None and
block_device.is_swap_or_ephemeral(virtual_name)):
session.query(models.BlockDeviceMapping).\
filter_by(instance_id=values['instance_id']).\
filter_by(virtual_name=virtual_name).\
filter(models.BlockDeviceMapping.device_name !=
values['device_name']).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
@require_context
def block_device_mapping_get_all_by_instance(context, instance_id):
return _block_device_mapping_get_query(context).\
filter_by(instance_id=instance_id).\
all()
@require_context
def block_device_mapping_destroy(context, bdm_id):
session = get_session()
with session.begin():
session.query(models.BlockDeviceMapping).\
filter_by(id=bdm_id).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
@require_context
def block_device_mapping_destroy_by_instance_and_volume(context, instance_id,
volume_id):
session = get_session()
with session.begin():
_block_device_mapping_get_query(context, session=session).\
filter_by(instance_id=instance_id).\
filter_by(volume_id=volume_id).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
###################
def _security_group_get_query(context, session=None, read_deleted=None,
project_only=False):
return model_query(context, models.SecurityGroup, session=session,
read_deleted=read_deleted, project_only=project_only).\
options(joinedload_all('rules'))
@require_context
def security_group_get_all(context):
return _security_group_get_query(context).all()
@require_context
def security_group_get(context, security_group_id, session=None):
result = _security_group_get_query(context, session=session,
project_only=True).\
filter_by(id=security_group_id).\
options(joinedload_all('instances')).\
first()
if not result:
raise exception.SecurityGroupNotFound(
security_group_id=security_group_id)
return result
@require_context
def security_group_get_by_name(context, project_id, group_name):
result = _security_group_get_query(context, read_deleted="no").\
filter_by(project_id=project_id).\
filter_by(name=group_name).\
options(joinedload_all('instances')).\
first()
if not result:
raise exception.SecurityGroupNotFoundForProject(
project_id=project_id, security_group_id=group_name)
return result
@require_context
def security_group_get_by_project(context, project_id):
return _security_group_get_query(context, read_deleted="no").\
filter_by(project_id=project_id).\
all()
@require_context
def security_group_get_by_instance(context, instance_id):
return _security_group_get_query(context, read_deleted="no").\
join(models.SecurityGroup.instances).\
filter_by(id=instance_id).\
all()
@require_context
def security_group_exists(context, project_id, group_name):
try:
group = security_group_get_by_name(context, project_id, group_name)
return group is not None
except exception.NotFound:
return False
@require_context
def security_group_in_use(context, group_id):
session = get_session()
with session.begin():
# Are there any instances that haven't been deleted
# that include this group?
inst_assoc = session.query(models.SecurityGroupInstanceAssociation).\
filter_by(security_group_id=group_id).\
filter_by(deleted=False).\
all()
for ia in inst_assoc:
num_instances = session.query(models.Instance).\
filter_by(deleted=False).\
filter_by(id=ia.instance_id).\
count()
if num_instances:
return True
return False
@require_context
def security_group_create(context, values):
security_group_ref = models.SecurityGroup()
# FIXME(devcamcar): Unless I do this, rules fails with lazy load exception
# once save() is called. This will get cleaned up in next orm pass.
security_group_ref.rules
security_group_ref.update(values)
security_group_ref.save()
return security_group_ref
@require_context
def security_group_destroy(context, security_group_id):
session = get_session()
with session.begin():
session.query(models.SecurityGroup).\
filter_by(id=security_group_id).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
session.query(models.SecurityGroupInstanceAssociation).\
filter_by(security_group_id=security_group_id).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
session.query(models.SecurityGroupIngressRule).\
filter_by(group_id=security_group_id).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
@require_context
def security_group_count_by_project(context, project_id):
authorize_project_context(context, project_id)
return model_query(context, models.SecurityGroup, read_deleted="no").\
filter_by(project_id=project_id).\
count()
###################
def _security_group_rule_get_query(context, session=None):
return model_query(context, models.SecurityGroupIngressRule,
session=session)
@require_context
def security_group_rule_get(context, security_group_rule_id, session=None):
result = _security_group_rule_get_query(context, session=session).\
filter_by(id=security_group_rule_id).\
first()
if not result:
raise exception.SecurityGroupNotFoundForRule(
rule_id=security_group_rule_id)
return result
@require_context
def security_group_rule_get_by_security_group(context, security_group_id,
session=None):
return _security_group_rule_get_query(context, session=session).\
filter_by(parent_group_id=security_group_id).\
options(joinedload_all('grantee_group.instances')).\
all()
@require_context
def security_group_rule_get_by_security_group_grantee(context,
security_group_id,
session=None):
return _security_group_rule_get_query(context, session=session).\
filter_by(group_id=security_group_id).\
all()
@require_context
def security_group_rule_create(context, values):
security_group_rule_ref = models.SecurityGroupIngressRule()
security_group_rule_ref.update(values)
security_group_rule_ref.save()
return security_group_rule_ref
@require_context
def security_group_rule_destroy(context, security_group_rule_id):
session = get_session()
with session.begin():
security_group_rule = security_group_rule_get(context,
security_group_rule_id,
session=session)
security_group_rule.delete(session=session)
@require_context
def security_group_rule_count_by_group(context, security_group_id):
return model_query(context, models.SecurityGroupIngressRule,
read_deleted="no").\
filter_by(parent_group_id=security_group_id).\
count()
#
###################
@require_admin_context
def provider_fw_rule_create(context, rule):
fw_rule_ref = models.ProviderFirewallRule()
fw_rule_ref.update(rule)
fw_rule_ref.save()
return fw_rule_ref
@require_admin_context
def provider_fw_rule_get_all(context):
return model_query(context, models.ProviderFirewallRule).all()
@require_admin_context
def provider_fw_rule_destroy(context, rule_id):
session = get_session()
with session.begin():
session.query(models.ProviderFirewallRule).\
filter_by(id=rule_id).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
###################
@require_admin_context
def user_get(context, id, session=None):
result = model_query(context, models.User, session=session).\
filter_by(id=id).\
first()
if not result:
raise exception.UserNotFound(user_id=id)
return result
@require_admin_context
def user_get_by_access_key(context, access_key, session=None):
result = model_query(context, models.User, session=session).\
filter_by(access_key=access_key).\
first()
if not result:
raise exception.AccessKeyNotFound(access_key=access_key)
return result
@require_admin_context
def user_create(context, values):
user_ref = models.User()
user_ref.update(values)
user_ref.save()
return user_ref
@require_admin_context
def user_delete(context, id):
session = get_session()
with session.begin():
session.query(models.UserProjectAssociation).\
filter_by(user_id=id).\
delete()
session.query(models.UserRoleAssociation).\
filter_by(user_id=id).\
delete()
session.query(models.UserProjectRoleAssociation).\
filter_by(user_id=id).\
delete()
user_ref = user_get(context, id, session=session)
session.delete(user_ref)
def user_get_all(context):
return model_query(context, models.User).all()
def user_get_roles(context, user_id):
session = get_session()
with session.begin():
user_ref = user_get(context, user_id, session=session)
return [role.role for role in user_ref['roles']]
def user_get_roles_for_project(context, user_id, project_id):
session = get_session()
with session.begin():
res = session.query(models.UserProjectRoleAssociation).\
filter_by(user_id=user_id).\
filter_by(project_id=project_id).\
all()
return [association.role for association in res]
def user_remove_project_role(context, user_id, project_id, role):
session = get_session()
with session.begin():
session.query(models.UserProjectRoleAssociation).\
filter_by(user_id=user_id).\
filter_by(project_id=project_id).\
filter_by(role=role).\
delete()
def user_remove_role(context, user_id, role):
session = get_session()
with session.begin():
res = session.query(models.UserRoleAssociation).\
filter_by(user_id=user_id).\
filter_by(role=role).\
all()
for role in res:
session.delete(role)
def user_add_role(context, user_id, role):
session = get_session()
with session.begin():
user_ref = user_get(context, user_id, session=session)
models.UserRoleAssociation(user=user_ref, role=role).\
save(session=session)
def user_add_project_role(context, user_id, project_id, role):
session = get_session()
with session.begin():
user_ref = user_get(context, user_id, session=session)
project_ref = project_get(context, project_id, session=session)
models.UserProjectRoleAssociation(user_id=user_ref['id'],
project_id=project_ref['id'],
role=role).save(session=session)
def user_update(context, user_id, values):
session = get_session()
with session.begin():
user_ref = user_get(context, user_id, session=session)
user_ref.update(values)
user_ref.save(session=session)
#
###################
def project_create(context, values):
project_ref = models.Project()
project_ref.update(values)
project_ref.save()
return project_ref
def project_add_member(context, project_id, user_id):
session = get_session()
with session.begin():
project_ref = project_get(context, project_id, session=session)
user_ref = user_get(context, user_id, session=session)
project_ref.members += [user_ref]
project_ref.save(session=session)
def project_get(context, id, session=None):
result = model_query(context, models.Project, session=session,
read_deleted="no").\
filter_by(id=id).\
options(joinedload_all('members')).\
first()
if not result:
raise exception.ProjectNotFound(project_id=id)
return result
def project_get_all(context):
return model_query(context, models.Project).\
options(joinedload_all('members')).\
all()
def project_get_by_user(context, user_id):
user = model_query(context, models.User).\
filter_by(id=user_id).\
options(joinedload_all('projects')).\
first()
if not user:
raise exception.UserNotFound(user_id=user_id)
return user.projects
def project_remove_member(context, project_id, user_id):
session = get_session()
project = project_get(context, project_id, session=session)
user = user_get(context, user_id, session=session)
if user in project.members:
project.members.remove(user)
project.save(session=session)
def project_update(context, project_id, values):
session = get_session()
with session.begin():
project_ref = project_get(context, project_id, session=session)
project_ref.update(values)
project_ref.save(session=session)
def project_delete(context, id):
session = get_session()
with session.begin():
session.query(models.UserProjectAssociation).\
filter_by(project_id=id).\
delete()
session.query(models.UserProjectRoleAssociation).\
filter_by(project_id=id).\
delete()
project_ref = project_get(context, id, session=session)
session.delete(project_ref)
@require_context
def project_get_networks(context, project_id, associate=True):
# NOTE(tr3buchet): as before this function will associate
# a project with a network if it doesn't have one and
# associate is true
result = model_query(context, models.Network, read_deleted="no").\
filter_by(project_id=project_id).\
all()
if not result:
if not associate:
return []
return [network_associate(context, project_id)]
return result
###################
@require_admin_context
def migration_create(context, values):
migration = models.Migration()
migration.update(values)
migration.save()
return migration
@require_admin_context
def migration_update(context, id, values):
session = get_session()
with session.begin():
migration = migration_get(context, id, session=session)
migration.update(values)
migration.save(session=session)
return migration
@require_admin_context
def migration_get(context, id, session=None):
result = model_query(context, models.Migration, session=session,
read_deleted="yes").\
filter_by(id=id).\
first()
if not result:
raise exception.MigrationNotFound(migration_id=id)
return result
@require_admin_context
def migration_get_by_instance_and_status(context, instance_uuid, status):
result = model_query(context, models.Migration, read_deleted="yes").\
filter_by(instance_uuid=instance_uuid).\
filter_by(status=status).\
first()
if not result:
raise exception.MigrationNotFoundByStatus(instance_id=instance_uuid,
status=status)
return result
@require_admin_context
def migration_get_all_unconfirmed(context, confirm_window, session=None):
confirm_window = datetime.datetime.utcnow() - datetime.timedelta(
seconds=confirm_window)
return model_query(context, models.Migration, session=session,
read_deleted="yes").\
filter(models.Migration.updated_at <= confirm_window).\
filter_by(status="FINISHED").\
all()
##################
def console_pool_create(context, values):
pool = models.ConsolePool()
pool.update(values)
pool.save()
return pool
def console_pool_get(context, pool_id):
result = model_query(context, models.ConsolePool, read_deleted="no").\
filter_by(id=pool_id).\
first()
if not result:
raise exception.ConsolePoolNotFound(pool_id=pool_id)
return result
def console_pool_get_by_host_type(context, compute_host, host,
console_type):
result = model_query(context, models.ConsolePool, read_deleted="no").\
filter_by(host=host).\
filter_by(console_type=console_type).\
filter_by(compute_host=compute_host).\
options(joinedload('consoles')).\
first()
if not result:
raise exception.ConsolePoolNotFoundForHostType(
host=host, console_type=console_type,
compute_host=compute_host)
return result
def console_pool_get_all_by_host_type(context, host, console_type):
return model_query(context, models.ConsolePool, read_deleted="no").\
filter_by(host=host).\
filter_by(console_type=console_type).\
options(joinedload('consoles')).\
all()
def console_create(context, values):
console = models.Console()
console.update(values)
console.save()
return console
def console_delete(context, console_id):
session = get_session()
with session.begin():
# NOTE(mdragon): consoles are meant to be transient.
session.query(models.Console).\
filter_by(id=console_id).\
delete()
def console_get_by_pool_instance(context, pool_id, instance_id):
result = model_query(context, models.Console, read_deleted="yes").\
filter_by(pool_id=pool_id).\
filter_by(instance_id=instance_id).\
options(joinedload('pool')).\
first()
if not result:
raise exception.ConsoleNotFoundInPoolForInstance(
pool_id=pool_id, instance_id=instance_id)
return result
def console_get_all_by_instance(context, instance_id):
return model_query(context, models.Console, read_deleted="yes").\
filter_by(instance_id=instance_id).\
all()
def console_get(context, console_id, instance_id=None):
query = model_query(context, models.Console, read_deleted="yes").\
filter_by(id=console_id).\
options(joinedload('pool'))
if instance_id is not None:
query = query.filter_by(instance_id=instance_id)
result = query.first()
if not result:
if instance_id:
raise exception.ConsoleNotFoundForInstance(
console_id=console_id, instance_id=instance_id)
else:
raise exception.ConsoleNotFound(console_id=console_id)
return result
##################
@require_admin_context
def instance_type_create(context, values):
"""Create a new instance type. In order to pass in extra specs,
the values dict should contain a 'extra_specs' key/value pair:
{'extra_specs' : {'k1': 'v1', 'k2': 'v2', ...}}
"""
session = get_session()
with session.begin():
try:
instance_type_get_by_name(context, values['name'], session)
raise exception.InstanceTypeExists(name=values['name'])
except exception.InstanceTypeNotFoundByName:
pass
try:
instance_type_get_by_flavor_id(context, values['flavorid'],
session)
raise exception.InstanceTypeExists(name=values['name'])
except exception.FlavorNotFound:
pass
try:
specs = values.get('extra_specs')
specs_refs = []
if specs:
for k, v in specs.iteritems():
specs_ref = models.InstanceTypeExtraSpecs()
specs_ref['key'] = k
specs_ref['value'] = v
specs_refs.append(specs_ref)
values['extra_specs'] = specs_refs
instance_type_ref = models.InstanceTypes()
instance_type_ref.update(values)
instance_type_ref.save(session=session)
except Exception, e:
raise exception.DBError(e)
return _dict_with_extra_specs(instance_type_ref)
def _dict_with_extra_specs(inst_type_query):
"""Takes an instance, volume, or instance type query returned
by sqlalchemy and returns it as a dictionary, converting the
extra_specs entry from a list of dicts:
'extra_specs' : [{'key': 'k1', 'value': 'v1', ...}, ...]
to a single dict:
'extra_specs' : {'k1': 'v1'}
"""
inst_type_dict = dict(inst_type_query)
extra_specs = dict([(x['key'], x['value'])
for x in inst_type_query['extra_specs']])
inst_type_dict['extra_specs'] = extra_specs
return inst_type_dict
def _instance_type_get_query(context, session=None, read_deleted=None):
return model_query(context, models.InstanceTypes, session=session,
read_deleted=read_deleted).\
options(joinedload('extra_specs'))
@require_context
def instance_type_get_all(context, inactive=False, filters=None):
"""
Returns all instance types.
"""
filters = filters or {}
read_deleted = "yes" if inactive else "no"
query = _instance_type_get_query(context, read_deleted=read_deleted)
if 'min_memory_mb' in filters:
query = query.filter(
models.InstanceTypes.memory_mb >= filters['min_memory_mb'])
if 'min_root_gb' in filters:
query = query.filter(
models.InstanceTypes.root_gb >= filters['min_root_gb'])
inst_types = query.order_by("name").all()
return [_dict_with_extra_specs(i) for i in inst_types]
@require_context
def instance_type_get(context, id, session=None):
"""Returns a dict describing specific instance_type"""
result = _instance_type_get_query(context, session=session).\
filter_by(id=id).\
first()
if not result:
raise exception.InstanceTypeNotFound(instance_type_id=id)
return _dict_with_extra_specs(result)
@require_context
def instance_type_get_by_name(context, name, session=None):
"""Returns a dict describing specific instance_type"""
result = _instance_type_get_query(context, session=session).\
filter_by(name=name).\
first()
if not result:
raise exception.InstanceTypeNotFoundByName(instance_type_name=name)
return _dict_with_extra_specs(result)
@require_context
def instance_type_get_by_flavor_id(context, flavor_id, session=None):
"""Returns a dict describing specific flavor_id"""
result = _instance_type_get_query(context, session=session).\
filter_by(flavorid=flavor_id).\
first()
if not result:
raise exception.FlavorNotFound(flavor_id=flavor_id)
return _dict_with_extra_specs(result)
@require_admin_context
def instance_type_destroy(context, name):
"""Marks specific instance_type as deleted"""
session = get_session()
with session.begin():
instance_type_ref = instance_type_get_by_name(context, name,
session=session)
instance_type_id = instance_type_ref['id']
session.query(models.InstanceTypes).\
filter_by(id=instance_type_id).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
session.query(models.InstanceTypeExtraSpecs).\
filter_by(instance_type_id=instance_type_id).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
####################
@require_admin_context
def cell_create(context, values):
cell = models.Cell()
cell.update(values)
cell.save()
return cell
def _cell_get_by_id_query(context, cell_id, session=None):
return model_query(context, models.Cell, session=session).\
filter_by(id=cell_id)
@require_admin_context
def cell_update(context, cell_id, values):
cell = cell_get(context, cell_id)
cell.update(values)
cell.save()
return cell
@require_admin_context
def cell_delete(context, cell_id):
session = get_session()
with session.begin():
_cell_get_by_id_query(context, cell_id, session=session).\
delete()
@require_admin_context
def cell_get(context, cell_id):
result = _cell_get_by_id_query(context, cell_id).first()
if not result:
raise exception.CellNotFound(cell_id=cell_id)
return result
@require_admin_context
def cell_get_all(context):
return model_query(context, models.Cell, read_deleted="no").all()
####################
def _instance_metadata_get_query(context, instance_id, session=None):
return model_query(context, models.InstanceMetadata, session=session,
read_deleted="no").\
filter_by(instance_id=instance_id)
@require_context
@require_instance_exists
def instance_metadata_get(context, instance_id):
rows = _instance_metadata_get_query(context, instance_id).all()
result = {}
for row in rows:
result[row['key']] = row['value']
return result
@require_context
@require_instance_exists
def instance_metadata_delete(context, instance_id, key):
_instance_metadata_get_query(context, instance_id).\
filter_by(key=key).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
@require_context
@require_instance_exists
def instance_metadata_get_item(context, instance_id, key, session=None):
result = _instance_metadata_get_query(
context, instance_id, session=session).\
filter_by(key=key).\
first()
if not result:
raise exception.InstanceMetadataNotFound(metadata_key=key,
instance_id=instance_id)
return result
@require_context
@require_instance_exists
def instance_metadata_update(context, instance_id, metadata, delete):
session = get_session()
# Set existing metadata to deleted if delete argument is True
if delete:
original_metadata = instance_metadata_get(context, instance_id)
for meta_key, meta_value in original_metadata.iteritems():
if meta_key not in metadata:
meta_ref = instance_metadata_get_item(context, instance_id,
meta_key, session)
meta_ref.update({'deleted': True})
meta_ref.save(session=session)
meta_ref = None
# Now update all existing items with new values, or create new meta objects
for meta_key, meta_value in metadata.iteritems():
# update the value whether it exists or not
item = {"value": meta_value}
try:
meta_ref = instance_metadata_get_item(context, instance_id,
meta_key, session)
except exception.InstanceMetadataNotFound, e:
meta_ref = models.InstanceMetadata()
item.update({"key": meta_key, "instance_id": instance_id})
meta_ref.update(item)
meta_ref.save(session=session)
return metadata
####################
@require_admin_context
def agent_build_create(context, values):
agent_build_ref = models.AgentBuild()
agent_build_ref.update(values)
agent_build_ref.save()
return agent_build_ref
@require_admin_context
def agent_build_get_by_triple(context, hypervisor, os, architecture,
session=None):
return model_query(context, models.AgentBuild, session=session,
read_deleted="no").\
filter_by(hypervisor=hypervisor).\
filter_by(os=os).\
filter_by(architecture=architecture).\
first()
@require_admin_context
def agent_build_get_all(context):
return model_query(context, models.AgentBuild, read_deleted="no").\
all()
@require_admin_context
def agent_build_destroy(context, agent_build_id):
session = get_session()
with session.begin():
model_query(context, models.AgentBuild, session=session,
read_deleted="yes").\
filter_by(id=agent_build_id).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
@require_admin_context
def agent_build_update(context, agent_build_id, values):
session = get_session()
with session.begin():
agent_build_ref = model_query(context, models.AgentBuild,
session=session, read_deleted="yes").\
filter_by(id=agent_build_id).\
first()
agent_build_ref.update(values)
agent_build_ref.save(session=session)
####################
@require_context
def bw_usage_get_by_macs(context, macs, start_period):
return model_query(context, models.BandwidthUsage, read_deleted="yes").\
filter(models.BandwidthUsage.mac.in_(macs)).\
filter_by(start_period=start_period).\
all()
@require_context
def bw_usage_update(context,
mac,
start_period,
bw_in, bw_out,
session=None):
if not session:
session = get_session()
with session.begin():
bwusage = model_query(context, models.BandwidthUsage,
session=session, read_deleted="yes").\
filter_by(start_period=start_period).\
filter_by(mac=mac).\
first()
if not bwusage:
bwusage = models.BandwidthUsage()
bwusage.start_period = start_period
bwusage.mac = mac
bwusage.last_refreshed = utils.utcnow()
bwusage.bw_in = bw_in
bwusage.bw_out = bw_out
bwusage.save(session=session)
####################
def _instance_type_extra_specs_get_query(context, instance_type_id,
session=None):
return model_query(context, models.InstanceTypeExtraSpecs,
session=session, read_deleted="no").\
filter_by(instance_type_id=instance_type_id)
@require_context
def instance_type_extra_specs_get(context, instance_type_id):
rows = _instance_type_extra_specs_get_query(
context, instance_type_id).\
all()
result = {}
for row in rows:
result[row['key']] = row['value']
return result
@require_context
def instance_type_extra_specs_delete(context, instance_type_id, key):
_instance_type_extra_specs_get_query(
context, instance_type_id).\
filter_by(key=key).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
@require_context
def instance_type_extra_specs_get_item(context, instance_type_id, key,
session=None):
result = _instance_type_extra_specs_get_query(
context, instance_type_id, session=session).\
filter_by(key=key).\
first()
if not result:
raise exception.InstanceTypeExtraSpecsNotFound(
extra_specs_key=key, instance_type_id=instance_type_id)
return result
@require_context
def instance_type_extra_specs_update_or_create(context, instance_type_id,
specs):
session = get_session()
spec_ref = None
for key, value in specs.iteritems():
try:
spec_ref = instance_type_extra_specs_get_item(
context, instance_type_id, key, session)
except exception.InstanceTypeExtraSpecsNotFound, e:
spec_ref = models.InstanceTypeExtraSpecs()
spec_ref.update({"key": key, "value": value,
"instance_type_id": instance_type_id,
"deleted": 0})
spec_ref.save(session=session)
return specs
##################
@require_admin_context
def volume_type_create(context, values):
"""Create a new instance type. In order to pass in extra specs,
the values dict should contain a 'extra_specs' key/value pair:
{'extra_specs' : {'k1': 'v1', 'k2': 'v2', ...}}
"""
session = get_session()
with session.begin():
try:
volume_type_get_by_name(context, values['name'], session)
raise exception.VolumeTypeExists(name=values['name'])
except exception.VolumeTypeNotFoundByName:
pass
try:
specs = values.get('extra_specs')
values['extra_specs'] = _metadata_refs(values.get('extra_specs'),
models.VolumeTypeExtraSpecs)
volume_type_ref = models.VolumeTypes()
volume_type_ref.update(values)
volume_type_ref.save()
except Exception, e:
raise exception.DBError(e)
return volume_type_ref
@require_context
def volume_type_get_all(context, inactive=False, filters=None):
"""
Returns a dict describing all volume_types with name as key.
"""
filters = filters or {}
read_deleted = "yes" if inactive else "no"
rows = model_query(context, models.VolumeTypes,
read_deleted=read_deleted).\
options(joinedload('extra_specs')).\
order_by("name").\
all()
# TODO(sirp): this patern of converting rows to a result with extra_specs
# is repeated quite a bit, might be worth creating a method for it
result = {}
for row in rows:
result[row['name']] = _dict_with_extra_specs(row)
return result
@require_context
def volume_type_get(context, id, session=None):
"""Returns a dict describing specific volume_type"""
result = model_query(context, models.VolumeTypes, session=session).\
options(joinedload('extra_specs')).\
filter_by(id=id).\
first()
if not result:
raise exception.VolumeTypeNotFound(volume_type=id)
return _dict_with_extra_specs(result)
@require_context
def volume_type_get_by_name(context, name, session=None):
"""Returns a dict describing specific volume_type"""
result = model_query(context, models.VolumeTypes, session=session).\
options(joinedload('extra_specs')).\
filter_by(name=name).\
first()
if not result:
raise exception.VolumeTypeNotFoundByName(volume_type_name=name)
else:
return _dict_with_extra_specs(result)
@require_admin_context
def volume_type_destroy(context, name):
session = get_session()
with session.begin():
volume_type_ref = volume_type_get_by_name(context, name,
session=session)
volume_type_id = volume_type_ref['id']
session.query(models.VolumeTypes).\
filter_by(id=volume_type_id).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
session.query(models.VolumeTypeExtraSpecs).\
filter_by(volume_type_id=volume_type_id).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
####################
def _volume_type_extra_specs_query(context, volume_type_id, session=None):
return model_query(context, models.VolumeTypeExtraSpecs, session=session,
read_deleted="no").\
filter_by(volume_type_id=volume_type_id)
@require_context
def volume_type_extra_specs_get(context, volume_type_id):
rows = _volume_type_extra_specs_query(context, volume_type_id).\
all()
result = {}
for row in rows:
result[row['key']] = row['value']
return result
@require_context
def volume_type_extra_specs_delete(context, volume_type_id, key):
_volume_type_extra_specs_query(context, volume_type_id).\
filter_by(key=key).\
update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
@require_context
def volume_type_extra_specs_get_item(context, volume_type_id, key,
session=None):
result = _volume_type_extra_specs_query(
context, volume_type_id, session=session).\
filter_by(key=key).\
first()
if not result:
raise exception.VolumeTypeExtraSpecsNotFound(
extra_specs_key=key, volume_type_id=volume_type_id)
return result
@require_context
def volume_type_extra_specs_update_or_create(context, volume_type_id,
specs):
session = get_session()
spec_ref = None
for key, value in specs.iteritems():
try:
spec_ref = volume_type_extra_specs_get_item(
context, volume_type_id, key, session)
except exception.VolumeTypeExtraSpecsNotFound, e:
spec_ref = models.VolumeTypeExtraSpecs()
spec_ref.update({"key": key, "value": value,
"volume_type_id": volume_type_id,
"deleted": 0})
spec_ref.save(session=session)
return specs
####################
def s3_image_get(context, image_id):
"""Find local s3 image represented by the provided id"""
result = model_query(context, models.S3Image, read_deleted="yes").\
filter_by(id=image_id).\
first()
if not result:
raise exception.ImageNotFound(image_id=image_id)
return result
def s3_image_get_by_uuid(context, image_uuid):
"""Find local s3 image represented by the provided uuid"""
result = model_query(context, models.S3Image, read_deleted="yes").\
filter_by(uuid=image_uuid).\
first()
if not result:
raise exception.ImageNotFound(image_id=image_uuid)
return result
def s3_image_create(context, image_uuid):
"""Create local s3 image represented by provided uuid"""
try:
s3_image_ref = models.S3Image()
s3_image_ref.update({'uuid': image_uuid})
s3_image_ref.save()
except Exception, e:
raise exception.DBError(e)
return s3_image_ref
####################
@require_admin_context
def sm_backend_conf_create(context, values):
backend_conf = models.SMBackendConf()
backend_conf.update(values)
backend_conf.save()
return backend_conf
@require_admin_context
def sm_backend_conf_update(context, sm_backend_id, values):
session = get_session()
with session.begin():
backend_conf = model_query(context, models.SMBackendConf,
session=session,
read_deleted="yes").\
filter_by(id=sm_backend_id).\
first()
if not backend_conf:
raise exception.NotFound(
_("No backend config with id %(sm_backend_id)s") % locals())
backend_conf.update(values)
backend_conf.save(session=session)
return backend_conf
@require_admin_context
def sm_backend_conf_delete(context, sm_backend_id):
# FIXME(sirp): for consistency, shouldn't this just mark as deleted with
# `purge` actually deleting the record?
session = get_session()
with session.begin():
model_query(context, models.SMBackendConf, session=session,
read_deleted="yes").\
filter_by(id=sm_backend_id).\
delete()
@require_admin_context
def sm_backend_conf_get(context, sm_backend_id):
result = model_query(context, models.SMBackendConf, read_deleted="yes").\
filter_by(id=sm_backend_id).\
first()
if not result:
raise exception.NotFound(_("No backend config with id "
"%(sm_backend_id)s") % locals())
return result
@require_admin_context
def sm_backend_conf_get_by_sr(context, sr_uuid):
session = get_session()
return model_query(context, models.SMBackendConf, read_deleted="yes").\
filter_by(sr_uuid=sr_uuid).\
first()
@require_admin_context
def sm_backend_conf_get_all(context):
return model_query(context, models.SMBackendConf, read_deleted="yes").\
all()
####################
def _sm_flavor_get_query(context, sm_flavor_label, session=None):
return model_query(context, models.SMFlavors, session=session,
read_deleted="yes").\
filter_by(label=sm_flavor_label)
@require_admin_context
def sm_flavor_create(context, values):
sm_flavor = models.SMFlavors()
sm_flavor.update(values)
sm_flavor.save()
return sm_flavor
@require_admin_context
def sm_flavor_update(context, sm_flavor_label, values):
sm_flavor = sm_flavor_get(context, sm_flavor_label)
sm_flavor.update(values)
sm_flavor.save()
return sm_flavor
@require_admin_context
def sm_flavor_delete(context, sm_flavor_label):
session = get_session()
with session.begin():
_sm_flavor_get_query(context, sm_flavor_label).delete()
@require_admin_context
def sm_flavor_get(context, sm_flavor_label):
result = _sm_flavor_get_query(context, sm_flavor_label).first()
if not result:
raise exception.NotFound(
_("No sm_flavor called %(sm_flavor)s") % locals())
return result
@require_admin_context
def sm_flavor_get_all(context):
return model_query(context, models.SMFlavors, read_deleted="yes").all()
###############################
def _sm_volume_get_query(context, volume_id, session=None):
return model_query(context, models.SMVolume, session=session,
read_deleted="yes").\
filter_by(id=volume_id)
def sm_volume_create(context, values):
sm_volume = models.SMVolume()
sm_volume.update(values)
sm_volume.save()
return sm_volume
def sm_volume_update(context, volume_id, values):
sm_volume = sm_volume_get(context, volume_id)
sm_volume.update(values)
sm_volume.save()
return sm_volume
def sm_volume_delete(context, volume_id):
session = get_session()
with session.begin():
_sm_volume_get_query(context, volume_id, session=session).delete()
def sm_volume_get(context, volume_id):
result = _sm_volume_get_query(context, volume_id).first()
if not result:
raise exception.NotFound(
_("No sm_volume with id %(volume_id)s") % locals())
return result
def sm_volume_get_all(context):
return model_query(context, models.SMVolume, read_deleted="yes").all()
################
def _aggregate_get_query(context, model_class, id_field, id,
session=None, read_deleted='yes'):
return model_query(context, model_class, session=session,
read_deleted=read_deleted).filter(id_field == id)
@require_admin_context
def aggregate_create(context, values, metadata=None):
session = get_session()
aggregate = _aggregate_get_query(context,
models.Aggregate,
models.Aggregate.name,
values['name'],
session=session,
read_deleted='yes').first()
values.setdefault('operational_state', aggregate_states.CREATED)
if not aggregate:
aggregate = models.Aggregate()
aggregate.update(values)
aggregate.save(session=session)
elif aggregate.deleted:
values['deleted'] = False
values['deleted_at'] = None
aggregate.update(values)
aggregate.save(session=session)
else:
raise exception.AggregateNameExists(aggregate_name=values['name'])
if metadata:
aggregate_metadata_add(context, aggregate.id, metadata)
return aggregate
@require_admin_context
def aggregate_get(context, aggregate_id, read_deleted='no'):
aggregate = _aggregate_get_query(context,
models.Aggregate,
models.Aggregate.id, aggregate_id,
read_deleted=read_deleted).first()
if not aggregate:
raise exception.AggregateNotFound(aggregate_id=aggregate_id)
return aggregate
@require_admin_context
def aggregate_get_by_host(context, host, read_deleted='no'):
aggregate_host = _aggregate_get_query(context,
models.AggregateHost,
models.AggregateHost.host,
host,
read_deleted='no').first()
if not aggregate_host:
raise exception.AggregateHostNotFound(host=host)
return aggregate_get(context, aggregate_host.aggregate_id, read_deleted)
@require_admin_context
def aggregate_update(context, aggregate_id, values):
session = get_session()
aggregate = _aggregate_get_query(context,
models.Aggregate,
models.Aggregate.id, aggregate_id,
session=session,
read_deleted='no').first()
if aggregate:
metadata = values.get('metadata')
if metadata is not None:
aggregate_metadata_add(context,
aggregate_id,
values.pop('metadata'),
set_delete=True)
with session.begin():
aggregate.update(values)
aggregate.save(session=session)
values['metadata'] = metadata
return aggregate
else:
raise exception.AggregateNotFound(aggregate_id=aggregate_id)
@require_admin_context
def aggregate_delete(context, aggregate_id):
query = _aggregate_get_query(context,
models.Aggregate,
models.Aggregate.id, aggregate_id,
read_deleted='no')
if query.first():
query.update({'deleted': True,
'deleted_at': utils.utcnow(),
'operational_state': aggregate_states.DISMISSED,
'updated_at': literal_column('updated_at')})
else:
raise exception.AggregateNotFound(aggregate_id=aggregate_id)
@require_admin_context
def aggregate_get_all(context, read_deleted='yes'):
return model_query(context,
models.Aggregate,
read_deleted=read_deleted).all()
@require_admin_context
@require_aggregate_exists
def aggregate_metadata_get(context, aggregate_id, read_deleted='no'):
rows = model_query(context,
models.AggregateMetadata,
read_deleted=read_deleted).\
filter_by(aggregate_id=aggregate_id).all()
return dict([(r['key'], r['value']) for r in rows])
@require_admin_context
@require_aggregate_exists
def aggregate_metadata_delete(context, aggregate_id, key):
query = _aggregate_get_query(context,
models.AggregateMetadata,
models.AggregateMetadata.aggregate_id,
aggregate_id, read_deleted='no').\
filter_by(key=key)
if query.first():
query.update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
else:
raise exception.AggregateMetadataNotFound(aggregate_id=aggregate_id,
metadata_key=key)
@require_admin_context
@require_aggregate_exists
def aggregate_metadata_get_item(context, aggregate_id, key,
session=None, read_deleted='yes'):
result = _aggregate_get_query(context,
models.AggregateMetadata,
models.AggregateMetadata.aggregate_id,
aggregate_id, session=session,
read_deleted=read_deleted).\
filter_by(key=key).first()
if not result:
raise exception.AggregateMetadataNotFound(metadata_key=key,
aggregate_id=aggregate_id)
return result
@require_admin_context
@require_aggregate_exists
def aggregate_metadata_add(context, aggregate_id, metadata, set_delete=False):
session = get_session()
if set_delete:
original_metadata = aggregate_metadata_get(context, aggregate_id)
for meta_key, meta_value in original_metadata.iteritems():
if meta_key not in metadata:
meta_ref = aggregate_metadata_get_item(context, aggregate_id,
meta_key, session)
meta_ref.update({'deleted': True})
meta_ref.save(session=session)
meta_ref = None
for meta_key, meta_value in metadata.iteritems():
item = {"value": meta_value}
try:
meta_ref = aggregate_metadata_get_item(context, aggregate_id,
meta_key, session)
if meta_ref.deleted:
item.update({'deleted': False, 'deleted_at': None})
except exception.AggregateMetadataNotFound:
meta_ref = models.AggregateMetadata()
item.update({"key": meta_key, "aggregate_id": aggregate_id})
meta_ref.update(item)
meta_ref.save(session=session)
return metadata
@require_admin_context
@require_aggregate_exists
def aggregate_host_get_all(context, aggregate_id, read_deleted='yes'):
rows = model_query(context,
models.AggregateHost,
read_deleted=read_deleted).\
filter_by(aggregate_id=aggregate_id).all()
return [r.host for r in rows]
@require_admin_context
@require_aggregate_exists
def aggregate_host_delete(context, aggregate_id, host):
query = _aggregate_get_query(context,
models.AggregateHost,
models.AggregateHost.aggregate_id,
aggregate_id,
read_deleted='no').filter_by(host=host)
if query.first():
query.update({'deleted': True,
'deleted_at': utils.utcnow(),
'updated_at': literal_column('updated_at')})
else:
raise exception.AggregateHostNotFound(aggregate_id=aggregate_id,
host=host)
@require_admin_context
@require_aggregate_exists
def aggregate_host_add(context, aggregate_id, host):
session = get_session()
host_ref = _aggregate_get_query(context,
models.AggregateHost,
models.AggregateHost.aggregate_id,
aggregate_id,
session=session,
read_deleted='yes').\
filter_by(host=host).first()
if not host_ref:
try:
host_ref = models.AggregateHost()
values = {"host": host, "aggregate_id": aggregate_id, }
host_ref.update(values)
host_ref.save(session=session)
except exception.DBError:
raise exception.AggregateHostConflict(host=host)
elif host_ref.deleted:
host_ref.update({'deleted': False, 'deleted_at': None})
host_ref.save(session=session)
else:
raise exception.AggregateHostExists(host=host,
aggregate_id=aggregate_id)
return host_ref
################
def instance_fault_create(context, values):
"""Create a new InstanceFault."""
fault_ref = models.InstanceFault()
fault_ref.update(values)
fault_ref.save()
return dict(fault_ref.iteritems())
def instance_fault_get_by_instance_uuids(context, instance_uuids):
"""Get all instance faults for the provided instance_uuids."""
rows = model_query(context, models.InstanceFault, read_deleted='no').\
filter(models.InstanceFault.instance_uuid.in_(
instance_uuids)).\
order_by(desc("created_at")).\
all()
output = {}
for instance_uuid in instance_uuids:
output[instance_uuid] = []
for row in rows:
data = dict(row.iteritems())
output[row['instance_uuid']].append(data)
return output
| 33.136561 | 79 | 0.622184 |
a287b878b79373953d8d12f969a161dc20c978a2 | 2,588 | py | Python | accounts/migrations/0001_initial.py | shakori999/Django_CRM | 82789878b679e68e993295fde0040b16a1c56767 | [
"Apache-2.0"
] | null | null | null | accounts/migrations/0001_initial.py | shakori999/Django_CRM | 82789878b679e68e993295fde0040b16a1c56767 | [
"Apache-2.0"
] | 2 | 2022-03-21T08:48:46.000Z | 2022-03-21T08:49:57.000Z | accounts/migrations/0001_initial.py | shakori999/Django_CRM | 82789878b679e68e993295fde0040b16a1c56767 | [
"Apache-2.0"
] | null | null | null | # Generated by Django 3.2.9 on 2021-11-10 05:24
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Customer',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30, null=True)),
('address', models.CharField(max_length=500, null=True)),
('wallet', models.IntegerField(null=True)),
('gifts', models.IntegerField(null=True)),
('phone', models.CharField(max_length=20, null=True)),
('email', models.CharField(max_length=20, null=True)),
('profile_pic', models.ImageField(blank=True, default='logo.png', null=True, upload_to='')),
('date_created', models.DateTimeField(auto_now_add=True, null=True)),
('user', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Order',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('phone', models.IntegerField()),
('price', models.IntegerField()),
('gifts', models.BooleanField(default=False)),
('location', models.TextField()),
('note', models.CharField(max_length=1000, null=True)),
('date_created', models.DateTimeField(auto_now_add=True, null=True)),
('platform', models.CharField(choices=[('FB', 'Facebook'), ('IN', 'Instgram')], max_length=20, null=True)),
('type', models.CharField(choices=[('Books', 'Books'), ('Clothes', 'Clothes'), ('Makeup', 'Makeup')], max_length=20, null=True)),
('status', models.CharField(choices=[('At Store', 'At Store'), ('In Stock', 'In Stock'), ('Shipping', 'Shipping'), ('Deliverd', 'Deliverd'), ('Rejected', 'Rejected'), ('Problem', 'Problem')], max_length=20, null=True)),
('customer', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.customer')),
],
),
]
| 51.76 | 235 | 0.590804 |
fb3faf6295041dcca04e5317e0852d279cff031f | 8,010 | py | Python | rq/utils.py | Chyroc/rq | 4b8e6156448ef2e7fafd4cf22fc869bb66286995 | [
"BSD-2-Clause-FreeBSD"
] | 2 | 2019-11-29T13:47:15.000Z | 2020-07-08T06:25:47.000Z | rq/utils.py | Chyroc/rq | 4b8e6156448ef2e7fafd4cf22fc869bb66286995 | [
"BSD-2-Clause-FreeBSD"
] | 6 | 2020-03-24T17:12:34.000Z | 2022-03-08T21:09:52.000Z | env/lib/python2.7/site-packages/rq/utils.py | Eric-Muthemba/qontroverse | 1f12d0e3bbdee628a88bac77dc53426ded220755 | [
"MIT"
] | 1 | 2020-04-23T09:01:13.000Z | 2020-04-23T09:01:13.000Z | # -*- coding: utf-8 -*-
"""
Miscellaneous helper functions.
The formatter for ANSI colored console output is heavily based on Pygments
terminal colorizing code, originally by Georg Brandl.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import calendar
import datetime
import importlib
import logging
import numbers
import sys
try:
from collections.abc import Iterable
except ImportError:
from collections import Iterable
from .compat import as_text, is_python_version, string_types
from .exceptions import TimeoutFormatError
class _Colorizer(object):
def __init__(self):
esc = "\x1b["
self.codes = {}
self.codes[""] = ""
self.codes["reset"] = esc + "39;49;00m"
self.codes["bold"] = esc + "01m"
self.codes["faint"] = esc + "02m"
self.codes["standout"] = esc + "03m"
self.codes["underline"] = esc + "04m"
self.codes["blink"] = esc + "05m"
self.codes["overline"] = esc + "06m"
dark_colors = ["black", "darkred", "darkgreen", "brown", "darkblue",
"purple", "teal", "lightgray"]
light_colors = ["darkgray", "red", "green", "yellow", "blue",
"fuchsia", "turquoise", "white"]
x = 30
for d, l in zip(dark_colors, light_colors):
self.codes[d] = esc + "%im" % x
self.codes[l] = esc + "%i;01m" % x
x += 1
del d, l, x
self.codes["darkteal"] = self.codes["turquoise"]
self.codes["darkyellow"] = self.codes["brown"]
self.codes["fuscia"] = self.codes["fuchsia"]
self.codes["white"] = self.codes["bold"]
if hasattr(sys.stdout, "isatty"):
self.notty = not sys.stdout.isatty()
else:
self.notty = True
def reset_color(self):
return self.codes["reset"]
def colorize(self, color_key, text):
if self.notty:
return text
else:
return self.codes[color_key] + text + self.codes["reset"]
def ansiformat(self, attr, text):
"""
Format ``text`` with a color and/or some attributes::
color normal color
*color* bold color
_color_ underlined color
+color+ blinking color
"""
result = []
if attr[:1] == attr[-1:] == '+':
result.append(self.codes['blink'])
attr = attr[1:-1]
if attr[:1] == attr[-1:] == '*':
result.append(self.codes['bold'])
attr = attr[1:-1]
if attr[:1] == attr[-1:] == '_':
result.append(self.codes['underline'])
attr = attr[1:-1]
result.append(self.codes[attr])
result.append(text)
result.append(self.codes['reset'])
return ''.join(result)
colorizer = _Colorizer()
def make_colorizer(color):
"""Creates a function that colorizes text with the given color.
For example:
green = make_colorizer('darkgreen')
red = make_colorizer('red')
Then, you can use:
print "It's either " + green('OK') + ' or ' + red('Oops')
"""
def inner(text):
return colorizer.colorize(color, text)
return inner
class ColorizingStreamHandler(logging.StreamHandler):
levels = {
logging.WARNING: make_colorizer('darkyellow'),
logging.ERROR: make_colorizer('darkred'),
logging.CRITICAL: make_colorizer('darkred'),
}
def __init__(self, exclude=None, *args, **kwargs):
self.exclude = exclude
if is_python_version((2, 6)):
logging.StreamHandler.__init__(self, *args, **kwargs)
else:
super(ColorizingStreamHandler, self).__init__(*args, **kwargs)
@property
def is_tty(self):
isatty = getattr(self.stream, 'isatty', None)
return isatty and isatty()
def format(self, record):
message = logging.StreamHandler.format(self, record)
if self.is_tty:
colorize = self.levels.get(record.levelno, lambda x: x)
# Don't colorize any traceback
parts = message.split('\n', 1)
parts[0] = " ".join([parts[0].split(" ", 1)[0], colorize(parts[0].split(" ", 1)[1])])
message = '\n'.join(parts)
return message
def import_attribute(name):
"""Return an attribute from a dotted path name (e.g. "path.to.func")."""
module_name, attribute = name.rsplit('.', 1)
module = importlib.import_module(module_name)
return getattr(module, attribute)
def utcnow():
return datetime.datetime.utcnow()
_TIMESTAMP_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ'
def utcformat(dt):
return dt.strftime(as_text(_TIMESTAMP_FORMAT))
def utcparse(string):
try:
return datetime.datetime.strptime(string, _TIMESTAMP_FORMAT)
except ValueError:
# This catches any jobs remain with old datetime format
return datetime.datetime.strptime(string, '%Y-%m-%dT%H:%M:%SZ')
def first(iterable, default=None, key=None):
"""
Return first element of `iterable` that evaluates true, else return None
(or an optional default value).
>>> first([0, False, None, [], (), 42])
42
>>> first([0, False, None, [], ()]) is None
True
>>> first([0, False, None, [], ()], default='ohai')
'ohai'
>>> import re
>>> m = first(re.match(regex, 'abc') for regex in ['b.*', 'a(.*)'])
>>> m.group(1)
'bc'
The optional `key` argument specifies a one-argument predicate function
like that used for `filter()`. The `key` argument, if supplied, must be
in keyword form. For example:
>>> first([1, 1, 3, 4, 5], key=lambda x: x % 2 == 0)
4
"""
if key is None:
for el in iterable:
if el:
return el
else:
for el in iterable:
if key(el):
return el
return default
def is_nonstring_iterable(obj):
"""Returns whether the obj is an iterable, but not a string"""
return isinstance(obj, Iterable) and not isinstance(obj, string_types)
def ensure_list(obj):
"""
When passed an iterable of objects, does nothing, otherwise, it returns
a list with just that object in it.
"""
return obj if is_nonstring_iterable(obj) else [obj]
def current_timestamp():
"""Returns current UTC timestamp"""
return calendar.timegm(datetime.datetime.utcnow().utctimetuple())
def enum(name, *sequential, **named):
values = dict(zip(sequential, range(len(sequential))), **named)
# NOTE: Yes, we *really* want to cast using str() here.
# On Python 2 type() requires a byte string (which is str() on Python 2).
# On Python 3 it does not matter, so we'll use str(), which acts as
# a no-op.
return type(str(name), (), values)
def backend_class(holder, default_name, override=None):
"""Get a backend class using its default attribute name or an override"""
if override is None:
return getattr(holder, default_name)
elif isinstance(override, string_types):
return import_attribute(override)
else:
return override
def parse_timeout(timeout):
"""Transfer all kinds of timeout format to an integer representing seconds"""
if not isinstance(timeout, numbers.Integral) and timeout is not None:
try:
timeout = int(timeout)
except ValueError:
digit, unit = timeout[:-1], (timeout[-1:]).lower()
unit_second = {'d': 86400, 'h': 3600, 'm': 60, 's': 1}
try:
timeout = int(digit) * unit_second[unit]
except (ValueError, KeyError):
raise TimeoutFormatError('Timeout must be an integer or a string representing an integer, or '
'a string with format: digits + unit, unit can be "d", "h", "m", "s", '
'such as "1h", "23m".')
return timeout
| 29.557196 | 112 | 0.585019 |
e8cb0b31e0caba4312fcf1badf4c4292eba4263b | 163 | py | Python | backend/server/MyAPI/admin.py | boorishboy/cross-validation-web-app | eee4d452bf2654a2070fb0f5499600302fe004a8 | [
"MIT"
] | null | null | null | backend/server/MyAPI/admin.py | boorishboy/cross-validation-web-app | eee4d452bf2654a2070fb0f5499600302fe004a8 | [
"MIT"
] | null | null | null | backend/server/MyAPI/admin.py | boorishboy/cross-validation-web-app | eee4d452bf2654a2070fb0f5499600302fe004a8 | [
"MIT"
] | null | null | null | from django.contrib import admin
from .models import Parameters, Results
# Register your models here.
admin.site.register(Parameters)
admin.site.register(Results)
| 27.166667 | 39 | 0.822086 |
46849fe7c1b65216084bf0fa5acd1b63a56a1466 | 539 | py | Python | examples/estimating_the_flat_value.py | kamilcieslik/test_house_price_lib | 98a9c9ada05b7cac1e9b835cc15031619cfa8e13 | [
"MIT"
] | null | null | null | examples/estimating_the_flat_value.py | kamilcieslik/test_house_price_lib | 98a9c9ada05b7cac1e9b835cc15031619cfa8e13 | [
"MIT"
] | null | null | null | examples/estimating_the_flat_value.py | kamilcieslik/test_house_price_lib | 98a9c9ada05b7cac1e9b835cc15031619cfa8e13 | [
"MIT"
] | null | null | null | import calculator.util
from calculator.prices_calculator import PricesCalculator
address = calculator.util.Address("", 49.95153, 18.609122)
calculator = PricesCalculator("AIzaSyBEmx5P3vl4ox4OU6nPgwTbU9k-_0Zm6Lo")
calculator.selected_address = address
calculator_result \
= calculator.calculate_house_price("blok", "pierwotny", "cegła", 1990, 25, False,
False, False, True, True, False, False)
print(str(calculator_result.house_price))
print(str(calculator_result.nearest_reference_city.name))
| 38.5 | 85 | 0.751391 |
9d2e34c267ea1efa4ee597f2a411bab411e2475f | 779 | py | Python | ITP2/ITP2_11_D.py | yu8ikmnbgt6y/MyAOJ | 474b21a2a0c25e1c1f3d6d66d2a2ea52aecaa39b | [
"Unlicense"
] | 1 | 2020-01-08T16:33:46.000Z | 2020-01-08T16:33:46.000Z | ITP2/ITP2_11_D.py | yu8ikmnbgt6y/MyAOJ | 474b21a2a0c25e1c1f3d6d66d2a2ea52aecaa39b | [
"Unlicense"
] | null | null | null | ITP2/ITP2_11_D.py | yu8ikmnbgt6y/MyAOJ | 474b21a2a0c25e1c1f3d6d66d2a2ea52aecaa39b | [
"Unlicense"
] | null | null | null | import sys
import io
import time
import pprint
input_txt = """
5 3
"""
sys.stdin = io.StringIO(input_txt);input()
#sys.stdin = open('in.test')
start = time.time()
# copy the below part and paste to the submission form.
# ---------function------------
from itertools import combinations
def calc_int(arr):
ret = 0
for i in arr:
ret += 1 << i
return ret
n, k = map(int, input().split())
subsets = []
for sub in combinations(range(n), k):
subsets.append((calc_int(sub), sub))
subsets.sort()
for sub in subsets:
print('{}: {}'.format(sub[0], ' '.join(map(str, sub[1])))) if len(sub[1]) != 0 else print(f'{sub[0]}:')
# -----------------------------
print("elapsed:", time.time() - start)
sys.stdin = sys.__stdin__ | 21.638889 | 108 | 0.563543 |
671bc984defc53f38b0f62e450772f089861ba2a | 1,670 | py | Python | requirement_cmp.py | WeLikeCode/python2-final | c5cdaab9463f03e56add2424fc38f9219c185da1 | [
"CC0-1.0"
] | null | null | null | requirement_cmp.py | WeLikeCode/python2-final | c5cdaab9463f03e56add2424fc38f9219c185da1 | [
"CC0-1.0"
] | null | null | null | requirement_cmp.py | WeLikeCode/python2-final | c5cdaab9463f03e56add2424fc38f9219c185da1 | [
"CC0-1.0"
] | null | null | null | import argparse
import sys
import pkg_resources
from pkg_resources import DistributionNotFound, VersionConflict
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('input', nargs='+', help="File name of path - only give 2")
args = parser.parse_args()
assert len(args.input) == 2
print(repr(args))
files = dict()
for f_in_name in args.input:
with open(f_in_name) as f_in:
files[f_in_name] = [x.replace("\n",'').replace("\r",'') for x in f_in.readlines()]
has_version = lambda ac_line : [ x for x in [ '~', '>', '<', '=' ] if x in ac_line ]
return_exit_code = 0
found_pkgs = 0
for a_line in files[args.input[0]]:
if len(has_version(a_line)) > 0 :
## needs a certain version or not
tmp_version_signs = has_version(a_line)
if len(tmp_version_signs) == 1 and tmp_version_signs[0] == '=': tmp_version_signs.append("=")
pkg_name, pkg_version = a_line.split(''.join(tmp_version_signs))
else:
pkg_name = a_line
pkg_version = -1
not_found = True
for b_line in files[args.input[1]]:
if pkg_name in b_line:
not_found = False
found_pkgs+=1
break
if not_found:
return_exit_code = 1
break
if found_pkgs != len(files[args.input[1]]):
return_exit_code = 1
print("Comparison - Exit code {}".format(return_exit_code))
sys.exit(return_exit_code)
#[ True for y in o_lines for x in ['=', '~', '>', '<'] if x in y]
| 29.821429 | 105 | 0.568263 |
dbc17b2f62d645c2c14d356813ac9c2a2b5bc5d0 | 6,688 | py | Python | pip/commands/list.py | graingert/pip | f61fb6a178eb46df811da3c729075c29f24a7080 | [
"MIT"
] | null | null | null | pip/commands/list.py | graingert/pip | f61fb6a178eb46df811da3c729075c29f24a7080 | [
"MIT"
] | null | null | null | pip/commands/list.py | graingert/pip | f61fb6a178eb46df811da3c729075c29f24a7080 | [
"MIT"
] | null | null | null | from __future__ import absolute_import
import logging
from pip.basecommand import Command
from pip.exceptions import DistributionNotFound
from pip.index import PackageFinder
from pip.req import InstallRequirement
from pip.utils import get_installed_distributions, dist_is_editable
from pip.cmdoptions import make_option_group, index_group
logger = logging.getLogger(__name__)
class ListCommand(Command):
"""
List installed packages, including editables.
Packages are listed in a case-insensitive sorted order.
"""
name = 'list'
usage = """
%prog [options]"""
summary = 'List installed packages.'
def __init__(self, *args, **kw):
super(ListCommand, self).__init__(*args, **kw)
cmd_opts = self.cmd_opts
cmd_opts.add_option(
'-o', '--outdated',
action='store_true',
default=False,
help='List outdated packages (excluding editables)')
cmd_opts.add_option(
'-u', '--uptodate',
action='store_true',
default=False,
help='List uptodate packages (excluding editables)')
cmd_opts.add_option(
'-e', '--editable',
action='store_true',
default=False,
help='List editable projects.')
cmd_opts.add_option(
'-l', '--local',
action='store_true',
default=False,
help=('If in a virtualenv that has global access, do not list '
'globally-installed packages.'),
)
self.cmd_opts.add_option(
'--user',
dest='user',
action='store_true',
default=False,
help='Only output packages installed in user-site.')
cmd_opts.add_option(
'--pre',
action='store_true',
default=False,
help=("Include pre-release and development versions. By default, "
"pip only finds stable versions."),
)
index_opts = make_option_group(index_group, self.parser)
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, cmd_opts)
def _build_package_finder(self, options, index_urls, session):
"""
Create a package finder appropriate to this list command.
"""
return PackageFinder(
find_links=options.find_links,
index_urls=index_urls,
allow_external=options.allow_external,
allow_unverified=options.allow_unverified,
allow_all_external=options.allow_all_external,
allow_all_prereleases=options.pre,
trusted_hosts=options.trusted_hosts,
process_dependency_links=options.process_dependency_links,
session=session,
)
def run(self, options, args):
if options.outdated:
self.run_outdated(options)
elif options.uptodate:
self.run_uptodate(options)
elif options.editable:
self.run_editables(options)
else:
self.run_listing(options)
def run_outdated(self, options):
for dist, version, typ in self.find_packages_latest_versions(options):
if version > dist.parsed_version:
logger.info(
'%s (Current: %s Latest: %s [%s])',
dist.project_name, dist.version, version, typ,
)
def find_packages_latest_versions(self, options):
index_urls = [options.index_url] + options.extra_index_urls
if options.no_index:
logger.info('Ignoring indexes: %s', ','.join(index_urls))
index_urls = []
dependency_links = []
for dist in get_installed_distributions(local_only=options.local,
user_only=options.user):
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(
dist.get_metadata_lines('dependency_links.txt'),
)
with self._build_session(options) as session:
finder = self._build_package_finder(options, index_urls, session)
finder.add_dependency_links(dependency_links)
installed_packages = get_installed_distributions(
local_only=options.local,
user_only=options.user,
include_editables=False,
)
for dist in installed_packages:
req = InstallRequirement.from_line(
dist.key, None, isolated=options.isolated_mode,
)
typ = 'unknown'
try:
link = finder.find_requirement(req, True)
# If link is None, means installed version is most
# up-to-date
if link is None:
continue
except DistributionNotFound:
continue
else:
remote_version = finder._link_package_versions(
link, req.name
).version
if link.is_wheel:
typ = 'wheel'
else:
typ = 'sdist'
yield dist, remote_version, typ
def run_listing(self, options):
installed_packages = get_installed_distributions(
local_only=options.local,
user_only=options.user,
)
self.output_package_listing(installed_packages)
def run_editables(self, options):
installed_packages = get_installed_distributions(
local_only=options.local,
user_only=options.user,
editables_only=True,
)
self.output_package_listing(installed_packages)
def output_package_listing(self, installed_packages):
installed_packages = sorted(
installed_packages,
key=lambda dist: dist.project_name.lower(),
)
for dist in installed_packages:
if dist_is_editable(dist):
line = '%s (%s, %s)' % (
dist.project_name,
dist.version,
dist.location,
)
else:
line = '%s (%s)' % (dist.project_name, dist.version)
logger.info(line)
def run_uptodate(self, options):
uptodate = []
for dist, version, typ in self.find_packages_latest_versions(options):
if dist.parsed_version == version:
uptodate.append(dist)
self.output_package_listing(uptodate)
| 34.833333 | 78 | 0.570873 |
49238b352b6018cd38d9806c6a3c6a3029cd07cd | 4,308 | py | Python | dashboard_api/widget_data/views.py | data61/Openboard | aaf7ef49e05c0771094efc6be811c6ae88055252 | [
"Apache-2.0"
] | 2 | 2017-08-29T23:05:51.000Z | 2019-04-02T21:11:35.000Z | dashboard_api/widget_data/views.py | data61/Openboard | aaf7ef49e05c0771094efc6be811c6ae88055252 | [
"Apache-2.0"
] | 1 | 2019-04-02T21:11:26.000Z | 2019-04-03T15:12:57.000Z | dashboard_api/widget_data/views.py | data61/Openboard | aaf7ef49e05c0771094efc6be811c6ae88055252 | [
"Apache-2.0"
] | null | null | null | # Copyright 2015,2016,2017 CSIRO
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from decimal import Decimal
from django.http import HttpResponse, HttpResponseNotFound, HttpResponseForbidden
from django.conf import settings
from widget_def.models import TileDefinition
from widget_def.view_utils import json_list, get_view_from_request, redirect_for_external_view
from widget_def.view_utils import OpenboardAPIView, OpenboardAPIException
from widget_data.api import *
# views.
class GetWidgetDataView(OpenboardAPIView):
lookup_view = True
lookup_widget = True
nocache = True
def api_method(self, request):
return api_get_widget_data(self.widget, self.view)
class GraphViewBase(OpenboardAPIView):
lookup_view = True
nocache = True
def check_request(self, request):
form = request.GET.get("form", "terse")
if form == "terse":
self.verbose = False
elif form == "verbose":
self.verbose = True
else:
raise OpenboardAPIException(HttpResponseNotFound("<p><b>Unknown form requested.</b></p>"))
class GetGraphDataView(GraphViewBase):
lookup_widget = True
def api_method(self, request):
return api_get_graph_data(self.widget,
view=self.view,
verbose=self.verbose)
class GetSingleGraphDataView(GraphViewBase):
def check_request(self, request):
self.graph = get_graph(view, self.kwargs.get(self.view, "widget_url"), self.kwargs.get("tile_url"))
if not graph:
raise OpenboardAPIException(HttpResponseNotFound("<p><b>This graph does not exist.</b></p>"))
super(GraphViewBase, self).check_request(request)
def api_method(self, request):
return api_get_single_graph_data(self.widget,
view=self.view,
verbose=self.verbose)
class GetRawDataView(OpenboardAPIView):
lookup_view = True
lookup_widget = True
nocache = True
def api_method(self, request):
return api_get_raw_data(self.widget, request, self.kwargs.get("rds_url"), view=self.view)
class MapDataViewBase(OpenboardAPIView):
lookup_view = True
nocache = True
def check_request(self, request):
if not self.window or self.window.view_override:
view_window = self.view.geo_window
if not view_window and not self.window:
raise OpenboardAPIException("No geowindow defined for this request")
elif view_window:
self.window = view_window
def api_method(self, request):
return api_geo_dataset(request, self.dataset, self.window)
class GetWidgetMapDataView(MapDataViewBase):
lookup_widget = True
def check_request(self, request):
try:
tile = TileDefinition.objects.get(widget=widget, url=tile_url, tile_type=TileDefinition.MAP)
except TileDefinition.DoesNotExist:
raise OpenboardAPIException(HttpResponseNotFound("Map tile %s does not exist" % tile_url))
try:
self.dataset = tile.geo_datasets.get(url=geo_dataset_url)
except GeoDataset.DoesNotExist:
raise OpenboardAPIException(HttpResponseNotFound("Map layer %s does not exist" % geo_dataset_url))
self.window = tile.geo_window
super(GetWidgetMapDataView, self).check_request(self, request)
class GetMapDataView(MapDataViewBase):
def check_request(self, request):
self.dataset = get_declared_geodataset(self.kwargs("geo_dataset_url"), view)
if self.dataset is None:
raise OpenboardAPIException(HttpResponseNotFound("Map layer %s does not exist" % kwarg.get("geo_dataset_url")))
super(GetWidgetMapDataView, self).check_request(self, request)
| 41.028571 | 123 | 0.697075 |
2282b0c3de971ebe15d3f1b8878f987600d48229 | 1,052 | py | Python | tools/clean_webconsole.py | ajesse11x/emscripten | baefce05a0c9b11b443195016c08025a386ba063 | [
"MIT"
] | 1 | 2020-03-07T09:20:03.000Z | 2020-03-07T09:20:03.000Z | tools/clean_webconsole.py | ajesse11x/emscripten | baefce05a0c9b11b443195016c08025a386ba063 | [
"MIT"
] | 1 | 2017-11-08T13:48:46.000Z | 2018-03-07T11:35:49.000Z | tools/clean_webconsole.py | ajesse11x/emscripten | baefce05a0c9b11b443195016c08025a386ba063 | [
"MIT"
] | 1 | 2020-02-17T01:24:51.000Z | 2020-02-17T01:24:51.000Z | """Removes timestamp and line info from a webgl log
"""
from __future__ import print_function
import os
import re
import sys
__rootpath__ = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
def path_from_root(*pathelems):
return os.path.join(__rootpath__, *pathelems)
def nice(x):
return '0x' + ('0' * (len(x) - 6)) + x[2:].upper()
repdata = open(path_from_root('system', 'include', 'GL', 'gl.h')).readlines() + ['\n'] + \
open(path_from_root('system', 'include', 'GL', 'glext.h')).readlines()
reps = {}
for rep in repdata:
rep = rep.replace('\t', ' ').replace('\n', '')
parts = [part for part in rep.split(' ') if part != '']
if len(parts) == 3 and parts[0] == '#define':
reps[nice(parts[2])] = '%s (%s)' % (parts[1], parts[2])
lines = sys.stdin.read().split('\n')
for line in lines:
if line.startswith('['):
line = line[15:]
line = line.split(' @ ')[0]
line = re.sub('(0x[\dabcdef]+)', lambda hexx: reps[nice(hexx.group(0))] if nice(hexx.group(0)) in reps else nice(hexx.group(0)), line)
print(line)
| 28.432432 | 136 | 0.621673 |
228e9f2e7bac71670ee29aaf7f8a044b4801f268 | 107 | py | Python | env/Lib/site-packages/plotly/offline/_plotlyjs_version.py | andresgreen-byte/Laboratorio-1--Inversion-de-Capital | 8a4707301d19c3826c31026c4077930bcd6a8182 | [
"MIT"
] | 7 | 2022-01-16T12:28:16.000Z | 2022-03-04T15:31:45.000Z | env/Lib/site-packages/plotly/offline/_plotlyjs_version.py | andresgreen-byte/Laboratorio-1--Inversion-de-Capital | 8a4707301d19c3826c31026c4077930bcd6a8182 | [
"MIT"
] | 14 | 2021-10-20T23:33:47.000Z | 2021-12-21T04:50:37.000Z | env/Lib/site-packages/plotly/offline/_plotlyjs_version.py | andresgreen-byte/Laboratorio-1--Inversion-de-Capital | 8a4707301d19c3826c31026c4077930bcd6a8182 | [
"MIT"
] | null | null | null | # DO NOT EDIT
# This file is generated by the updatebundle setup.py command
__plotlyjs_version__ = "2.8.3"
| 26.75 | 61 | 0.766355 |
2fa1b74248c0282bec504ca6327f48475f010552 | 2,972 | py | Python | chat-bot/storage.py | NepStark/mee6 | 4a7ba6b900277dd4e55c1e2a119d49ae9f98814b | [
"MIT"
] | 43 | 2018-02-17T21:53:51.000Z | 2022-03-28T08:40:14.000Z | chat-bot/storage.py | NepStark/mee6 | 4a7ba6b900277dd4e55c1e2a119d49ae9f98814b | [
"MIT"
] | 16 | 2018-02-18T01:09:32.000Z | 2021-07-01T00:36:29.000Z | chat-bot/storage.py | NepStark/mee6 | 4a7ba6b900277dd4e55c1e2a119d49ae9f98814b | [
"MIT"
] | 42 | 2018-03-16T01:36:49.000Z | 2022-02-22T07:35:45.000Z | import asyncio
import aioredis
from utils import parse_redis_url
class Storage():
"""Adds a prefix to Redis"""
def __init__(self, namespace, redis):
self.namespace = namespace
self.redis = redis
async def set(self, key, value, expire=0):
key = self.namespace + key
return await self.redis.set(
key,
value,
expire=expire
)
async def get(self, key):
key = self.namespace + key
return await self.redis.get(key)
async def smembers(self, key):
key = self.namespace + key
return await self.redis.smembers(key)
async def srem(self, key, value):
key = self.namespace + key
return await self.redis.srem(key, value)
async def sadd(self, key, member, *members):
key = self.namespace + key
return await self.redis.sadd(key, member, *members)
async def delete(self, key, *keys):
key = self.namespace + key
return await self.redis.delete(key, *keys)
async def sort(self, key, *get_patterns, by=None, offset=None, count=None,
asc=None, alpha=False, store=None):
key = self.namespace + key
if by:
by = self.namespace + by
return await self.redis.sort(key, *get_patterns, by=by, offset=offset,
count=None, asc=None, alpha=False,
store=None)
async def ttl(self, key):
key = self.namespace + key
return await self.redis.ttl(key)
async def expire(self, key, timeout):
key = self.namespace + key
return await self.redis.expire(key, timeout)
async def incr(self, key):
key = self.namespace + key
return await self.redis.incr(key)
async def incrby(self, key, amount):
key = self.namespace + key
return await self.redis.incrby(key, amount)
async def setnx(self, key, value):
key = self.namespace + key
return await self.redis.setnx(key, value)
async def lpush(self, key, value, *values):
key = self.namespace + key
return await self.redis.lpush(key, value, *values)
async def lpop(self, key, *values):
key = self.namespace + key
return await self.redis.lpop(key, *values)
async def lrange(self, key, start, stop):
key = self.namespace + key
return await self.redis.lrange(key, start, stop)
async def lrem(self, key, count, value):
key = self.namespace + key
return await self.redis.lrem(key, count, value)
async def lset(self, key, index, value):
key = self.namespace + key
return await self.redis.lset(key, index, value)
async def ltrim(self, start, stop):
return await self.redis.ltrim(start, stop)
async def rpush(self, key, value, *values):
key = self.namespace + key
return await self.redis.rpush(key, value, *values)
| 31.617021 | 78 | 0.597577 |
2b4807f27bbb85edc4db2a496c42239b2d594bda | 277 | py | Python | plugins/sign-in/__init__.py | fz6m/tomon-naixue | dfbdd69836f26d160cece34e204f9fb2ed731607 | [
"MIT"
] | 3 | 2020-08-23T17:43:09.000Z | 2020-08-31T04:43:42.000Z | plugins/sign-in/__init__.py | fz6m/tomon-naixue | dfbdd69836f26d160cece34e204f9fb2ed731607 | [
"MIT"
] | null | null | null | plugins/sign-in/__init__.py | fz6m/tomon-naixue | dfbdd69836f26d160cece34e204f9fb2ed731607 | [
"MIT"
] | null | null | null |
from aiotomon import get_bot
from .main import mainProgram
bot = get_bot()
@bot.on_message
async def _(ctx):
await mainProgram(ctx.content, bot, ctx.author.id,
ctx.channel_id, ctx.author.name,
ctx.guild_id, ctx.author.id)
| 19.785714 | 55 | 0.624549 |
9ebcfd924b92c378c6769ffb6ddcf7df835b071e | 9,347 | py | Python | app.py | drakylar/pcf_FORK_CHECK_GITLAB | 4e5e6934168857c319a56f7e474ea80705b0cdd3 | [
"MIT"
] | 2 | 2021-05-08T22:40:31.000Z | 2021-05-09T19:16:28.000Z | app.py | drakylar/pcf_FORK_CHECK_GITLAB | 4e5e6934168857c319a56f7e474ea80705b0cdd3 | [
"MIT"
] | null | null | null | app.py | drakylar/pcf_FORK_CHECK_GITLAB | 4e5e6934168857c319a56f7e474ea80705b0cdd3 | [
"MIT"
] | 3 | 2021-08-12T06:40:57.000Z | 2021-12-19T11:23:03.000Z | from flask import Flask, session, render_template, request
from flask import jsonify
from flask_apscheduler import APScheduler
from flask_session import Session
from flask_compress import Compress
from datetime import timedelta
from system.config_load import config_dict
from xml.sax.saxutils import escape
import json
import time
import logging
import urllib.parse
from os import remove
import glob
import secure
from functools import wraps
from system.db import Database
from flask_wtf.csrf import CSRFProtect, CSRFError
from system.forms import *
from os import environ, path
from shutil import copyfile
from routes.ui import routes as main_routes
from routes.api import api_bp
from flask_caching import Cache
global db
csrf = CSRFProtect()
# disable csrf-protection for http sniffer
csrf.exempt("routes.ui.tools.http_sniffer_capture_page")
# disable csrf-protection for interactive search fields
csrf.exempt("routes.ui.project.filter_host_port_form")
config = config_dict()
compress = Compress()
db = Database(config)
app = Flask(__name__,
static_folder=None,
template_folder='templates')
app.config['DATABASE'] = db
app.config['SESSION_PERMANENT'] = True
app.config['SESSION_TYPE'] = 'filesystem'
app.config['PERMANENT_SESSION_LIFETIME'] = timedelta(hours=5)
app.config['SECRET_KEY'] = config['main']['secret']
# enable jsonify pretty output
app.config['JSONIFY_PRETTYPRINT_REGULAR'] = True
app.register_blueprint(api_bp)
app.register_blueprint(main_routes)
# disable CSRF for API
csrf.exempt(api_bp)
sess = Session()
sess.init_app(app)
csrf.init_app(app)
compress.init_app(app)
# secure headers
hsts = secure.StrictTransportSecurity().preload().max_age(2592000)
secure_headers = secure.Secure(hsts=hsts)
@app.after_request
def set_secure_headers(response):
secure_headers.framework.flask(response)
return response
cache = Cache(config={'CACHE_TYPE': 'simple', "CACHE_DEFAULT_TIMEOUT": 300})
cache.init_app(app)
if config['logs']['logging'] == '1':
# output to log file
logging.basicConfig(handlers=[
logging.FileHandler(config['logs']['log_file']),
logging.StreamHandler()
]
)
def backup_database():
# if timer is fast anough
if environ['backup_status'] == '0':
environ['backup_status'] = '1'
file_dates = [int(file.split('backup_')[-1].split('.sqlite3')[0]) for
file in
glob.glob(config['backup']['db_backup_folder'] +
"backup_*.sqlite3")]
file_dates.sort()
while len(file_dates) >= int(config['backup']['db_backup_amount']):
# delete old file
old_date = file_dates[0]
old_backup_path = path.join(config['backup']['db_backup_folder'],
'backup_{}.sqlite3'.format(old_date))
remove(old_backup_path)
file_dates = [int(file.split('backup_')[-1].split('.sqlite3')[0])
for file in
glob.glob(config['backup']['db_backup_folder'] +
"backup_*.sqlite3")]
curr_time = int(time.time())
new_backup_path = path.join(config['backup']['db_backup_folder'],
'backup_{}.sqlite3'.format(curr_time))
copyfile(config['database']['path'], new_backup_path)
environ['backup_status'] = '0'
if config['backup']['db_backup'] == '1' and (not ('backup_loaded' in environ)):
# fix of double loading scheduler
environ['backup_loaded'] = '1'
environ['backup_status'] = '0'
hours = int(config['backup']['db_backup_hours'])
if config['database']['type'] == 'sqlite3':
scheduler = APScheduler()
scheduler.init_app(app)
scheduler.add_job(func=backup_database, trigger='interval',
id='backup_database',
weeks=int(config['backup']['db_backup_weeks']),
days=int(config['backup']['db_backup_days']),
hours=int(config['backup']['db_backup_hours']),
minutes=int(config['backup']['db_backup_minutes']),
seconds=int(config['backup']['db_backup_seconds']))
scheduler.start()
def ok_user_and_password(username, password):
return username == config['security']['basic_login'] and \
password == config['security']['basic_password']
def authenticate():
message = {'message': "Authenticate."}
resp = jsonify(message)
resp.status_code = 401
resp.headers['WWW-Authenticate'] = 'Basic realm="Main"'
return resp
def requires_authorization(f):
@wraps(f)
def decorated(*args, **kwargs):
if config['security']['basic_auth'] == '0':
return f(*args, **kwargs)
auth = request.authorization
if not auth or not ok_user_and_password(auth.username, auth.password):
return authenticate()
return f(*args, **kwargs)
return decorated
@app.errorhandler(404)
def page_not_found(e):
# note that we set the 404 status explicitly
return render_template('404.html'), 404
@app.errorhandler(405)
def page_not_found(e):
return render_template('405.html'), 405
@app.errorhandler(500)
def page_exception(e):
return render_template('500.html'), 500
def redirect(redirect_path):
response = jsonify()
response.status_code = 302
response.headers['location'] = redirect_path
response.autocorrect_location_header = False
return response
@app.errorhandler(CSRFError)
def handle_csrf_error(e):
return render_template('csrf.html', reason=e.description), 400
def check_session(fn):
@wraps(fn)
def decorated_view(*args, **kwargs):
url = request.path
if 'id' not in session:
return redirect(
'/logout?redirect={}'.format(urllib.parse.quote_plus(url)))
current_user = db.select_user_by_id(session['id'])
if not current_user:
return redirect('/logout')
kwargs['current_user'] = current_user[0]
return fn(*args, **kwargs)
return decorated_view
def check_team_access(fn):
@wraps(fn)
def decorated_view(*args, **kwargs):
team_id = kwargs['team_id']
user_teams = db.select_user_teams(session['id'])
current_team = {}
for found_team in user_teams:
if found_team['id'] == str(team_id):
current_team = found_team
if not current_team:
return redirect('/create_team')
kwargs['current_team'] = current_team
return fn(*args, **kwargs)
return decorated_view
def send_log_data(fn):
@wraps(fn)
def decorated_view(*args, **kwargs):
current_team = {}
current_project = {}
if 'current_team' in kwargs:
current_team = kwargs['current_team']
if 'current_project' in kwargs:
current_project = kwargs['current_project']
db.config_update(kwargs['current_user'],
current_team=current_team,
current_project=current_project)
return fn(*args, **kwargs)
return decorated_view
# init some global variables
@app.context_processor
def inject_stage_and_region():
return dict(db=db,
escape=lambda x: escape(str(x)),
json_unpack=json.loads,
json_pack=json.dumps,
format_date=lambda unix_time,
str_format: datetime.datetime.fromtimestamp(
int(unix_time)).strftime(str_format),
urlencode=urllib.parse.quote,
time=time.time,
open=open,
len=len,
is_valid_uuid=is_valid_uuid,
str=str,
debug=(config['main']['debug'] == '1'),
external_js=int(config['speedup']['external_js']),
external_css=int(config['speedup']['external_css']),
external_img=int(config['speedup']['external_img']),
one_file_js=int(config['speedup']['one_file_js']),
one_file_css=int(config['speedup']['one_file_css']),
date_format_template=config['design']['date_format_template'],
list_dict_value=lambda list_dict, key_name: [x[key_name] for x in list_dict],
list=list,
search_dict_list=lambda list_obj, key_name, key_val: key_val in [x[key_name] for x in list_obj],
list_crossing=lambda list1, list2: list(set(list1) & set(list2))
)
if __name__ == '__main__':
if config['ssl']['ssl'] == '1':
import ssl
context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
context.load_cert_chain(config['ssl']['cert'], config['ssl']['priv_key'])
app.run(
ssl_context=context,
host=config['network']['host'],
port=config['network']['port'],
debug=(config['main']['debug'] == '1'),
threaded=True)
else:
app.run(
host=config['network']['host'],
port=config['network']['port'],
debug=(config['main']['debug'] == '1'),
threaded=True)
else:
application = app | 30.545752 | 112 | 0.619878 |
17216f94822fc5113c3363c9d376c7677edcee0b | 6,682 | py | Python | tests/standalone/test_admin.py | Hornobster/django-cropduster | c8ee31111c125d534160fe06eb6a9f84fd4d8417 | [
"BSD-2-Clause",
"MIT"
] | 43 | 2015-01-27T11:12:37.000Z | 2021-03-14T01:51:34.000Z | tests/standalone/test_admin.py | Hornobster/django-cropduster | c8ee31111c125d534160fe06eb6a9f84fd4d8417 | [
"BSD-2-Clause",
"MIT"
] | 37 | 2015-01-05T16:25:39.000Z | 2021-12-08T18:15:40.000Z | tests/standalone/test_admin.py | Hornobster/django-cropduster | c8ee31111c125d534160fe06eb6a9f84fd4d8417 | [
"BSD-2-Clause",
"MIT"
] | 5 | 2015-05-27T21:01:50.000Z | 2021-04-29T07:41:33.000Z | from __future__ import absolute_import
import contextlib
import re
import time
from unittest import SkipTest
import os
import django
from django.core.files.storage import default_storage
from django.test import override_settings
import PIL.Image
from selenosis import AdminSelenosisTestCase
from cropduster.models import Image, Thumb
from tests.helpers import CropdusterTestCaseMediaMixin
from .models import Article
class TestStandaloneAdmin(CropdusterTestCaseMediaMixin, AdminSelenosisTestCase):
root_urlconf = 'tests.urls'
@property
def available_apps(self):
apps = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.staticfiles',
'django.contrib.admin',
'generic_plus',
'cropduster',
'cropduster.standalone',
'tests',
'tests.standalone',
'ckeditor',
'selenosis',
]
if self.has_grappelli:
apps.insert(0, 'grappelli')
return apps
def _pre_setup(self):
super(TestStandaloneAdmin, self)._pre_setup()
self.ckeditor_override = override_settings(
CKEDITOR_UPLOAD_PATH="%s/files/" % self.temp_media_root)
self.ckeditor_override.enable()
def _post_teardown(self):
super(TestStandaloneAdmin, self)._post_teardown()
self.ckeditor_override.disable()
def setUp(self):
if django.VERSION >= (2, 1):
raise SkipTest("django-ckeditor not compatible with this version of Django")
super(TestStandaloneAdmin, self).setUp()
self.is_s3 = os.environ.get('S3') == '1'
@contextlib.contextmanager
def switch_to_ckeditor_iframe(self):
with self.visible_selector('.cke_editor_cropduster_content_dialog iframe') as iframe:
time.sleep(1)
self.selenium.switch_to.frame(iframe)
yield iframe
self.selenium.switch_to.parent_frame()
@contextlib.contextmanager
def open_cropduster_ckeditor_dialog(self):
with self.clickable_selector('.cke_button__cropduster_icon') as el:
el.click()
with self.switch_to_ckeditor_iframe():
time.sleep(1)
with self.visible_selector('#id_image'):
yield
def toggle_caption_checkbox(self):
caption_checkbox_xpath = '//input[following-sibling::label[text()="Captioned image"]]'
with self.clickable_xpath(caption_checkbox_xpath) as checkbox:
checkbox.click()
time.sleep(0.2)
def cropduster_ckeditor_ok(self):
with self.clickable_selector('.cke_dialog_ui_button_ok') as ok:
ok.click()
time.sleep(2 if self.is_s3 else 0.2)
def test_basic_usage(self):
self.load_admin(Article)
with self.open_cropduster_ckeditor_dialog():
with self.visible_selector('#id_image') as el:
el.send_keys(os.path.join(self.TEST_IMG_DIR, 'img.png'))
with self.clickable_selector('#upload-button') as el:
el.click()
self.wait_until_visible_selector('#id_size-width')
self.toggle_caption_checkbox()
self.cropduster_ckeditor_ok()
if self.is_s3:
time.sleep(5)
content_html = self.selenium.execute_script('return $("#id_content").val()')
img_src_matches = re.search(r' src="([^"]+)"', content_html)
self.assertIsNotNone(img_src_matches, "Image not found in content: %s" % content_html)
image_url = img_src_matches.group(1)
image_hash = re.search(r'img/([0-9a-f]+)\.png', image_url).group(1)
try:
image = Image.objects.get(image='ckeditor/img/original.png')
except Image.DoesNotExist:
raise AssertionError("Image not found in database")
try:
thumb = Thumb.objects.get(name=image_hash, image=image)
except Thumb.DoesNotExist:
raise AssertionError("Thumb not found in database")
self.assertEqual(
list(Thumb.objects.all()), [thumb],
"Exactly one Thumb object should have been created")
self.assertHTMLEqual(
content_html,
u"""
<figure>
<img alt="" width="672" height="798" src="%s" />
<figcaption class="caption">Caption</figcaption>
</figure>
<p> </p>
""" % image_url)
def test_dialog_change_width(self):
"""
Test that changing the width in the cropduster CKEDITOR dialog produces
an image and html with the correct dimensions
"""
self.load_admin(Article)
with self.open_cropduster_ckeditor_dialog():
with self.visible_selector('#id_image') as el:
el.send_keys(os.path.join(self.TEST_IMG_DIR, 'img.png'))
with self.clickable_selector('#upload-button') as el:
el.click()
time.sleep(1)
with self.clickable_selector('#id_size-width') as el:
el.send_keys(300)
self.toggle_caption_checkbox()
self.cropduster_ckeditor_ok()
if self.is_s3:
time.sleep(5)
content_html = self.selenium.execute_script('return $("#id_content").val()')
img_src_matches = re.search(r' src="([^"]+)"', content_html)
self.assertIsNotNone(img_src_matches, "Image not found in content: %s" % content_html)
image_url = img_src_matches.group(1)
image_hash = re.search(r'img/([0-9a-f]+)\.png', image_url).group(1)
try:
image = Image.objects.get(image='ckeditor/img/original.png')
except Image.DoesNotExist:
raise AssertionError("Image not found in database")
try:
thumb = Thumb.objects.get(name=image_hash, image=image)
except Thumb.DoesNotExist:
raise AssertionError("Thumb not found in database")
self.assertEqual(
list(Thumb.objects.all()), [thumb],
"Exactly one Thumb object should have been created")
with default_storage.open("ckeditor/img/%s.png" % image_hash, mode='rb') as f:
self.assertEqual(PIL.Image.open(f).size, (300, 356))
self.assertHTMLEqual(
content_html,
u"""
<figure>
<img alt="" width="300" height="356" src="%s" />
<figcaption class="caption">Caption</figcaption>
</figure>
<p> </p>
""" % image_url)
| 34.266667 | 94 | 0.614487 |
5aaeed6cd417ae2b5b014398a4299ba0a12405f5 | 1,669 | py | Python | Python/klampt/control/blocks/estimators.py | mass2010chromium/Klampt | 4a50ac10daf636e4f2d7acb635db2292fc2c72b6 | [
"BSD-3-Clause"
] | null | null | null | Python/klampt/control/blocks/estimators.py | mass2010chromium/Klampt | 4a50ac10daf636e4f2d7acb635db2292fc2c72b6 | [
"BSD-3-Clause"
] | null | null | null | Python/klampt/control/blocks/estimators.py | mass2010chromium/Klampt | 4a50ac10daf636e4f2d7acb635db2292fc2c72b6 | [
"BSD-3-Clause"
] | null | null | null | from klampt.math import vectorops
from .core import Block
class Differentiator(Block):
"""Computes the derivative of some input using finite differences.
"""
def __init__(self,robot=None):
self.robot = robot
self.xlast = None
Block.__init__(self,['dt','x'],['dx'])
def __getstate__(self):
return {'last':self.xlast}
def __setstate__(self,state):
self.xlast = state['last']
def advance(self,dt,x):
if len(x)==0: return None
if self.xlast==None:
dx = [0]*len(x)
else:
if self.robot==None:
dx = vectorops.div(self.robot.sub(x,self.xlast),dt)
else:
assert(len(self.xlast)==len(x))
dx = vectorops.div(self.robot.interpolate_deriv(self.xlast,x),dt)
self.xlast = x
return dx
def signal(self,type,*args):
if type=='reset':
self.xlast=None
class Integrator(Block):
"""Computes the integral of some input using the
trapezoidal rule.
"""
def __init__(self):
self.integral = None
Block.__init__(self,['dt','x'],'Ix')
def __getstate__(self):
return self.integral
def __setstate__(self,state):
self.integral = state
def advance(self,dt,x):
if len(x)==0: return None
if self.integral is None:
self.integral = vectorops.mul(x,dt)
else:
self.integral = vectorops.madd(self.integral,x,dt)
result = vectorops.madd(self.integral,x,-0.5*dt)
return result
def signal(self,type,*inputs):
if type=='reset':
self.integral=None
| 29.280702 | 81 | 0.575195 |
6ee56bf6bef054dc56cf73b7cbd4c8d4ef013044 | 2,231 | py | Python | tensorflow/python/autograph/converters/call_trees_test.py | khodges42/tensorflow | cb011e1dd8d79757fea01be39e19cb1155681e7e | [
"Apache-2.0"
] | 3 | 2019-02-04T10:10:19.000Z | 2019-12-29T08:09:37.000Z | tensorflow/python/autograph/converters/call_trees_test.py | khodges42/tensorflow | cb011e1dd8d79757fea01be39e19cb1155681e7e | [
"Apache-2.0"
] | null | null | null | tensorflow/python/autograph/converters/call_trees_test.py | khodges42/tensorflow | cb011e1dd8d79757fea01be39e19cb1155681e7e | [
"Apache-2.0"
] | 6 | 2018-11-29T20:52:00.000Z | 2021-02-19T22:43:32.000Z | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for call_trees module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.autograph.converters import call_trees
from tensorflow.python.autograph.core import converter_testing
from tensorflow.python.platform import test
class CallTreesTest(converter_testing.TestCase):
def test_normal_function(self):
def test_fn(f):
return f() + 3
with self.converted(test_fn, call_trees, {}) as result:
self.assertEquals(
result.test_fn(None),
converter_testing.RESULT_OF_MOCK_CONVERTED_CALL + 3)
self.assertListEqual(self.dynamic_calls, [()])
def test_class_method(self):
class TestClass(object):
def test_method(self, a):
return self.other_method(a) + 1
tc = TestClass()
with self.converted(TestClass.test_method, call_trees, {}) as result:
self.assertEquals(converter_testing.RESULT_OF_MOCK_CONVERTED_CALL + 1,
result.test_method(tc, 1))
self.assertListEqual(self.dynamic_calls, [(1,)])
def test_object_method(self):
class TestClass(object):
def test_method(self, a):
return self.other_method(a) + 1
tc = TestClass()
with self.converted(tc.test_method, call_trees, {}) as result:
self.assertEquals(converter_testing.RESULT_OF_MOCK_CONVERTED_CALL + 1,
result.test_method(tc, 1))
self.assertListEqual(self.dynamic_calls, [(1,)])
if __name__ == '__main__':
test.main()
| 32.808824 | 80 | 0.694307 |
b99815894b07785318be64ec2d0493ab881f97bc | 3,857 | py | Python | src/market_watcher/market_watcher_cli.py | mcf-long-short/ibkr-options-volatility-trading | 2942456b1023618ac3f9741405029d9d3b08be13 | [
"MIT"
] | 28 | 2021-06-05T23:55:50.000Z | 2022-03-30T01:09:22.000Z | src/market_watcher/market_watcher_cli.py | sarangab/ibkr-options-volatility-trading | 2942456b1023618ac3f9741405029d9d3b08be13 | [
"MIT"
] | 1 | 2021-06-05T10:45:14.000Z | 2021-06-05T10:45:14.000Z | src/market_watcher/market_watcher_cli.py | sarangab/ibkr-options-volatility-trading | 2942456b1023618ac3f9741405029d9d3b08be13 | [
"MIT"
] | 8 | 2021-06-08T06:42:02.000Z | 2021-11-15T18:35:38.000Z | import click
from click.utils import echo
from market_watcher.version import VERSION
from market_watcher.config import context
from market_watcher.common import get_terget_stocks
from market_watcher.common import get_email_config, get_slack_config
from market_watcher.common import MarketWatcherEngine
from market_watcher.notifier import EmailNotifier, SlackNotifier
@click.group()
def cli():
"""MarketWatcher cli commands."""
echo(
"""
______ _ _ _ _ _
| ___ \ | | _ | || || | _ | |
| | _ | | ____ ____| | _ ____| |_| || || | ____| |_ ____| | _ ____ ____
| || || |/ _ |/ ___) | / ) _ ) _) ||_|| |/ _ | _)/ ___) || \ / _ )/ ___)
| || || ( ( | | | | |< ( (/ /| |_| |___| ( ( | | |_( (___| | | ( (/ /| |
|_||_||_|\_||_|_| |_| \_)____)\___)______|\_||_|\___)____)_| |_|\____)_|
MarketWatcher tool for finding investiments opportunities on Interacive Brokers
for volatility trading on equity market using long and short options strategy.
""" # noqa
)
echo(f"version: v{VERSION}")
echo("\n\n\n")
@cli.command()
def test_slack():
"""Sends dummy messages to test if Slack app has been configured properly."""
try:
echo("Testing Slack options-trading bot...")
config = get_slack_config()
slack_notifier = SlackNotifier(config)
echo("Sending message to #options-long-straddle channel...")
slack_notifier.send_message(config["long url"], "MarketWatcher: Test long!")
echo("Sending message to #options-short-straddle channel...")
slack_notifier.send_message(config["short url"], "MarketWatcher: Test short!")
except Exception as e:
echo("Slack testing failed!")
echo(e)
@cli.command()
@click.option(
"--notifier",
default="all",
help="Available options: email, slack, all",
)
def config(notifier):
"""Lists congiguration for slack and email notifiers."""
if notifier == "all" or notifier == "slack":
config = get_slack_config()
for env in config:
echo(f"{env}: {config[env]}")
if notifier == "all" or notifier == "email":
config = get_email_config()
for env in config:
echo(f"{env}: {config[env]}")
@cli.command()
@click.option(
"--stocks",
default=r"src/market_watcher/research/target_stocks.yaml",
help="Yaml file containing target stocks for long and short straddle option strategy..",
)
def start(stocks):
"""Starts the MarketWatcher."""
echo("Starting MarketWatcher...")
try:
context.running = True
echo("MarketWatcher started.")
echo(f"Reading target stocks from file: {stocks}")
target_stocks = get_terget_stocks(stocks)
notifiers = []
if context.state["email"]:
echo("Instantiating email notifier...")
notifiers.append(EmailNotifier(get_email_config()))
if context.state["slack"]:
echo("Instantiating slack notifier...")
notifiers.append(SlackNotifier(get_slack_config()))
echo("Instantiating MarketWatcher and running the engine.")
market_watcher_engine = MarketWatcherEngine(
target_stocks=target_stocks, notifiers=notifiers
)
market_watcher_engine.search_for_intestment_opportunities()
except ValueError as e:
echo(e)
@cli.command()
def stop():
"""Stops the MarketWatcher."""
echo("Stopping MarketWatcher...")
try:
context.running = False
echo("MarketWatcher stopped.")
except ValueError as e:
echo(e)
| 32.686441 | 109 | 0.581799 |
f15910541218771e6961f7a3486cc1de53fa5acf | 5,737 | py | Python | py-tk-balls/final_bounce.py | bjocallaghan/misc-junk | dbcc3fea55b230f6d37a81c3ad6c4040d519f754 | [
"MIT"
] | null | null | null | py-tk-balls/final_bounce.py | bjocallaghan/misc-junk | dbcc3fea55b230f6d37a81c3ad6c4040d519f754 | [
"MIT"
] | null | null | null | py-tk-balls/final_bounce.py | bjocallaghan/misc-junk | dbcc3fea55b230f6d37a81c3ad6c4040d519f754 | [
"MIT"
] | null | null | null | import Tkinter
from Tkinter import Tk, Canvas, Entry, Frame, Label
from time import sleep, time
from random import randint, random
import math
NUM_BALLS = 1
MIN_RADIUS = 2
MAX_RADIUS = 15
CANVAS_WIDTH = 1000
CANVAS_HEIGHT = 600
TARGET_FPS = 60.0
G_CONSTANT = -500.0
class Ball:
def __init__(self, time_step, **kwargs):
# location
self.x = kwargs.get('x', randint(MAX_RADIUS, CANVAS_WIDTH-MAX_RADIUS))
self.y = kwargs.get('y', randint(MAX_RADIUS, CANVAS_HEIGHT-MAX_RADIUS))
# delta/speed
self.dx = kwargs.get('dx', randint(-500, 500))
self.dy = kwargs.get('dy', randint(-200, 200))
# non-changing ball attributes
self.radius = kwargs.get('radius', randint(MIN_RADIUS, MAX_RADIUS))
self.time_step = time_step
# energy calculations
energy = abs(self.y * G_CONSTANT) + .5 * self.dy**2
self.max_speed = math.sqrt(2 * energy)
self.temp_max_speed = random() * self.max_speed # start: rand fraction
# initial coloration
self.hue = get_random_hue_values()
self.color = rgb_to_color(self.hue)
def update(self):
self.x += (self.dx * self.time_step)
if not (self.radius <= self.x <= CANVAS_WIDTH-self.radius):
self.dx *= -1
self.dy += G_CONSTANT * self.time_step
self.y += (self.dy * self.time_step)
if (self.y-self.radius) < 0:
self.y = self.radius
self.temp_max_speed *= .8
if self.temp_max_speed < .2 * self.max_speed:
self.temp_max_speed = self.max_speed
self.dy = self.temp_max_speed
hue_fraction = math.sqrt(1 - (abs(self.dy) / self.max_speed))
self.color = rgb_to_color([hue_fraction*x for x in self.hue])
def __str__(self):
return "<Ball at %i, %i>" % (self.x, self.y)
class VisualBall(Ball):
def __init__(self, canvas, time_step, **kwargs):
Ball.__init__(self, time_step, **kwargs)
self.canvas_handle = canvas.create_oval(tuple(self.bbox()),
fill=self.color)
def bbox(self):
return [int(n) for n in [self.x - self.radius,
(CANVAS_HEIGHT - self.y + self.radius),
self.x + self.radius,
(CANVAS_HEIGHT - self.y - self.radius)]]
def update(self):
Ball.update(self)
# tk-note: setter operations on canvas objects; non-intuitive
canvas.coords(self.canvas_handle, tuple(self.bbox()))
canvas.itemconfig(self.canvas_handle, fill=self.color)
class TimeStepCanvas(Canvas):
def __init__(self, parent, time_step, drop_limit, **kwargs):
Canvas.__init__(self, parent, **kwargs)
self.time_step = time_step
self.next_start = time() + self.time_step
self.consec_dropped = 0
self.drop_limit = drop_limit
def update(self):
if self.next_start > time() or self.consec_dropped >= self.drop_limit:
Canvas.update(self)
self.consec_dropped = 0
wasDrawn = True
else:
self.consec_dropped += 1
wasDrawn = False
waitTime = self.next_start - time()
if waitTime > 0:
sleep(waitTime)
self.next_start += self.time_step
return wasDrawn
class FPS_Canvas(TimeStepCanvas):
def __init__(self, parent, target_fps, drop_limit=10, **kwargs):
self.frame = Frame(parent)
TimeStepCanvas.__init__(self, self.frame, 1.0/target_fps,
drop_limit, **kwargs)
self.fps_readout = Entry(self.frame)
self.target_fps = target_fps
self.framesCounted = 0
self.framesInLastSecond = []
self.lastFiveMeasurements = []
def pack(self):
self.frame.pack()
TimeStepCanvas.pack(self)
self.fps_readout.pack(side=Tkinter.RIGHT)
Label(self.frame, text="FPS:").pack(side=Tkinter.RIGHT)
def update_fps_readout(self, value):
self.fps_readout.delete(0, Tkinter.END)
self.fps_readout.insert(0, '%.1f' % value)
def update(self):
if TimeStepCanvas.update(self):
self.framesInLastSecond.append(True)
else:
self.framesInLastSecond.append(False)
self.framesCounted += 1
if self.framesCounted == self.target_fps:
fps_measurement = len([x for x in self.framesInLastSecond if x])
self.lastFiveMeasurements.append(fps_measurement)
if len(self.lastFiveMeasurements) == 6:
self.lastFiveMeasurements = self.lastFiveMeasurements[1:]
avg = (float(sum(self.lastFiveMeasurements)) /
len(self.lastFiveMeasurements))
self.update_fps_readout(avg)
self.framesCounted = 0
self.framesInLastSecond = []
def get_random_hue_values():
rand_rgb_values = (randint(1, 255), randint(1, 255), randint(1, 255))
return [int(255*float(x)/max(rand_rgb_values)) for x in rand_rgb_values]
def rgb_to_color(rgb_values):
return '#%02x%02x%02x' % tuple(rgb_values)
if __name__ == '__main__':
# GUI stuff
window = Tk()
window.title("%d Bouncing Balls" % NUM_BALLS)
canvas = FPS_Canvas(window, TARGET_FPS, bg="white",
width=CANVAS_WIDTH, height=CANVAS_HEIGHT)
canvas.pack()
# balls
balls = []
for i in range(NUM_BALLS):
balls.append(VisualBall(canvas, 1.0/TARGET_FPS))
# animation loop
while True:
for ball in balls:
ball.update()
canvas.update()
window.mainloop() # not strictly necessary due to infinite loop
| 33.946746 | 79 | 0.605194 |
1590c00f1dbbd491961a1ecc78fd2a3a867e4929 | 2,885 | py | Python | Assignments/CV Assignment 2/Code/q3_harris_corner_detection.py | kshitijsriv/Computer-Vision-Spring-19 | 7e84726d51d30ecf8a22e890aced5a48e3b5b7e5 | [
"MIT"
] | null | null | null | Assignments/CV Assignment 2/Code/q3_harris_corner_detection.py | kshitijsriv/Computer-Vision-Spring-19 | 7e84726d51d30ecf8a22e890aced5a48e3b5b7e5 | [
"MIT"
] | null | null | null | Assignments/CV Assignment 2/Code/q3_harris_corner_detection.py | kshitijsriv/Computer-Vision-Spring-19 | 7e84726d51d30ecf8a22e890aced5a48e3b5b7e5 | [
"MIT"
] | null | null | null | import numpy as np
import cv2
import Assignment_1.pad_image as pad
from Assignment_1.gaussian_filter import apply_filter as convolve
import matplotlib.pyplot as plt
import copy
sobel_v = [
[-1, 0, 1],
[-2, 0, 2],
[-1, 0, 1]
]
sobel_h = [
[-1, -2, -1],
[0, 0, 0],
[1, 2, 1]
]
threshold = 0.08
def get_gradient(img):
sobel_vimg = convolve(img, sobel_v, 3)
sobel_himg = convolve(img, sobel_h, 3)
print("SHAPE", sobel_himg.shape, sobel_himg.shape)
return sobel_vimg, sobel_himg
# ref: https://stackoverflow.com/questions/3862225/implementing-a-harris-corner-detector
def harris_corner_detection(ix, iy):
ix2 = ix * ix
iy2 = iy * iy
ixy = ix * iy
ix2 = cv2.GaussianBlur(ix2, (7, 7), 1.5)
iy2 = cv2.GaussianBlur(iy2, (7, 7), 1.5)
ixy = cv2.GaussianBlur(ixy, (7, 7), 1.5)
c, l = ix.shape
width = ix.shape[1]
height = ix.shape[0]
result = np.zeros((height, width))
r = copy.deepcopy(result)
mx = 0
neighborhood = 3
for i in range(height):
for j in range(width):
m = np.array([
[ix2[i, j], ixy[i, j]],
[ixy[i, j], iy2[i, j]]
], dtype=np.float64)
r[i, j] = np.linalg.det(m) - threshold * (np.power(np.trace(m), 2))
if r[i, j] > mx:
mx = r[i, j]
for i in range(height - 1):
for j in range(width - 1):
window = np.array(r[(i - int(neighborhood / 2)):(i + int(neighborhood / 2) + 1),
(j - int(neighborhood / 2)):(neighborhood + int(neighborhood / 2) + 1)])
if np.all(window < r[i, j]) and r[i, j] > 0.01 * mx:
result[i, j] = 1
pr, pc = np.where(result == 1)
return np.array(list(zip(pr, pc)))
if __name__ == '__main__':
input_image = cv2.imread('corner_detection/chess.png')
gray = cv2.cvtColor(input_image, cv2.COLOR_BGR2GRAY)
print(gray.shape)
# ROTATE
# ref: https://www.tutorialkart.com/opencv/python/opencv-python-rotate-image/
# M = cv2.getRotationMatrix2D((int(gray.shape[1]/2), int(gray.shape[0]/2)), 90, 1.0)
# gray = cv2.warpAffine(gray, M, (gray.shape[1], gray.shape[0]))
# COMPRESS
# ref: https://stackoverflow.com/questions/4195453/how-to-resize-an-image-with-opencv2-0-and-python2-6
# gray = cv2.resize(gray, (0, 0), fx=0.5, fy=0.5)
# input_image = cv2.resize(input_image, (0, 0), fx=0.5, fy=0.5)
sobel_y, sobel_x = get_gradient(gray)
cv2.imwrite('sobel_x.png', sobel_x)
cv2.imwrite('sobel_y.png', sobel_y)
# corner_points = harris_corner_detection(sobel_x, sobel_y)
#
# for point in corner_points:
# input_image[point[0], point[1]] = [0, 0, 255]
#
# cv2.imshow('result', input_image)
# cv2.imwrite('harris_compressed_0.08.png', input_image)
cv2.waitKey(0)
cv2.destroyAllWindows()
| 30.052083 | 106 | 0.587175 |
85d2a2d89b369aeb820e9c02155cf99b21736482 | 6,040 | py | Python | extractors/ClassroomDetector.py | mjgm97/opengamedata-core | 10bbaaf8ffc43855298ecb8ad0940acc59475cc1 | [
"MIT"
] | 1 | 2021-01-19T06:50:41.000Z | 2021-01-19T06:50:41.000Z | extractors/ClassroomDetector.py | mjgm97/opengamedata-core | 10bbaaf8ffc43855298ecb8ad0940acc59475cc1 | [
"MIT"
] | 2 | 2020-11-30T04:20:24.000Z | 2021-02-03T18:44:34.000Z | extractors/ClassroomDetector.py | opengamedata/opengamedata-backend | b6b2aa6e7c80d1bf50f71ffba917672f80f7f948 | [
"MIT"
] | 1 | 2021-12-17T15:53:16.000Z | 2021-12-17T15:53:16.000Z | import csv
import datetime
import time
from operator import itemgetter
from collections import defaultdict
import winsound
class ClassroomDetector:
def __init__(self, data_table, dt_format = "%Y-%m-%d %H:%M:%S",
class_size_lower = 10, class_size_upper = 45, class_window = 600,
duration_lower = 180, duration_upper = 4200, hexads = 4, afk_duration = 60):
self.data_table = data_table
self.dt_format = dt_format
self.class_size_lower = class_size_lower
self.class_size_upper = class_size_upper
self.class_window = class_window
self.duration_lower = duration_lower
self.duration_upper = duration_upper
self.hexads = hexads
self.afk_duration = afk_duration
def process_main(self, _data, _dt_fmt=None):
'''
Shell function for handling various classroom identification operations.
'''
header = {v: k for k, v in enumerate(_data[0])}
prepro_data = self.preprocess(_data=_data, _header=header)
ip_session_dict = defaultdict(list)
last_timestamp = prepro_data[1][header['server_time']]
ids_to_exclude = []
for n, i in enumerate(prepro_data):
# if a session id changes, we need to record the new start time and old end time
if i[header['session_id']] != prepro_data[n-1][header['session_id']]:
old_key = prepro_data[n-1][header['remote_addr']] + "_" + prepro_data[n-1][header['session_id']]
ip_session_dict[old_key].append(prepro_data[n-1][header['server_time']])
new_key = i[header['remote_addr']] + "_" + i[header['session_id']]
ip_session_dict[new_key].append(i[header['server_time']])
# while we're within a session, if there's a long gap of inactivity, we need to exclude that session
elif i[header['server_time']] - last_timestamp > self.afk_duration:
ids_to_exclude.append(i[header['remote_addr']] + "_" + i[header['session_id']])
last_timestamp = i[header['server_time']]
# removing the afk sessions from our analysis
ip_session_dict.pop('remote_addr_session_id')
for i in ids_to_exclude:
try:
ip_session_dict.pop(i)
except:
pass
overlaps_bystart = defaultdict(list)
overlaps_byend = defaultdict(list)
for key in ip_session_dict:
session_start = int(ip_session_dict[key][0])
session_end = int(ip_session_dict[key][-1])
for key2 in ip_session_dict:
# overlap = False
if key2 != key:
if session_start <= int(ip_session_dict[key2][0]) <= session_end:
overlaps_bystart[key].append([key2,
session_start,
session_end,
ip_session_dict[key2][0],
ip_session_dict[key2][-1]])
# overlap = True
if session_start <= int(ip_session_dict[key2][-1]) <= session_end:
overlaps_byend[key].append(key2)
# overlap = True
return overlaps_bystart
def preprocess(self, _data, _header, _dt_fmt=None):
'''
Shortens remote_addr to the first three digits of session IP.
Converts server_time to UNIX
'''
_dt_fmt = _dt_fmt if _dt_fmt is not None else self.dt_format
header_list = _data[0]
for i in _data[1:]:
if self.hexads != 4:
ip_shortened = i[_header['remote_addr']].split(".")[:(4 - self.hexads) * -1]
i[_header['remote_addr']] = ".".join(ip_shortened)
i[_header['server_time']] = int(
time.mktime(datetime.datetime.strptime(i[_header['server_time']], _dt_fmt).timetuple()))
# sorting the file in preparation for iterating through it
_data = sorted(_data[1:], key=itemgetter(_header['remote_addr'], _header['session_id'], _header['server_time']))
_data = [x for x in _data[1:] if int(x[_header['event_custom']]) <= 16] # selecting only student actions
_data = [header_list] + _data # adding the header row back in
return _data
if __name__ == '__main__':
'''
various config settings:
file_input is input raw .csv files
delim_type is the delimiter for the raw; usually this is \t
dt_format is the datetime format from the "server_time" column
class_size is the bounds on the number of unique session IDs that are considered to be a class
class_window is the max duration between any two unique session starts to be considered a class
duration is the bounds on the duration of a given game session
hexads are the number of hexads in the ip that you want to look at
afk duration is the maximum amount of time between two user-generated actions to not be considered afk
'''
#file_input = "D:\\Field Day Lab\\Raw_LAKELAND_2019Dec_iter1.csv"
file_input = "D:\\Luke Swanson\\D_Documents\\!work\\FieldDay\\LAKELAND_20200301_to_20200331_a9720c1_raw.csv"
#file_input = "D:\\Field Day Lab\\LAKELAND_20200401_to_20200430\\LAKELAND_20200401_to_20200430_a9720c1_raw.csv"
file_output = file_input[:-4] + "_classroom_subset_new_code.csv"
delim_type = "\t"
with open(file_input) as f:
in_data = list(csv.reader(f,delimiter=delim_type))
class_detect = ClassroomDetector(in_data)
overlaps_bystart = class_detect.process_main(in_data)
writer = csv.writer(open("Lakeland IP Blocks By Timestamp.csv","w",newline=""))
for key in overlaps_bystart:
if len(overlaps_bystart[key]) > 10:
writer.writerow(key)
for val in overlaps_bystart[key]:
writer.writerow(val)
writer.writerow([]) | 47.1875 | 120 | 0.616225 |
df708255378b8948231fac11a51f6f28c5edd6c0 | 6,526 | py | Python | billy/web/public/templatetags/customtags.py | paultag/billy | 70f4c55d760552829a86b30baa6d6eac3f6dc47f | [
"BSD-3-Clause"
] | null | null | null | billy/web/public/templatetags/customtags.py | paultag/billy | 70f4c55d760552829a86b30baa6d6eac3f6dc47f | [
"BSD-3-Clause"
] | null | null | null | billy/web/public/templatetags/customtags.py | paultag/billy | 70f4c55d760552829a86b30baa6d6eac3f6dc47f | [
"BSD-3-Clause"
] | null | null | null | from decimal import Decimal
import re
import json
import urllib
from django import template
from django.utils.html import strip_tags
import pytz
from billy.core import settings
from billy.web.public.forms import get_region_select_form
from billy.web.public.views.utils import templatename
from billy.web.public.views.favorites import is_favorite
register = template.Library()
@register.inclusion_tag(templatename('region_select_form'))
def region_select_form(abbr=None):
return {'form': get_region_select_form({'abbr': abbr})}
@register.inclusion_tag(templatename('sources'))
def sources(obj):
return {'sources': obj['sources']}
@register.filter
def sources_urlize(url):
'''Django's urlize built-in template tag does a lot of other things,
like linking domain-only links, but it won't hyperlink ftp links,
so this is a more liberal replacement for source links.
'''
return '<a href="%s" rel="nofollow">%s</a>' % (url, url)
@register.filter
def plusfield(obj, key):
return obj.get('+' + key)
@register.filter
def party_noun(party, count=1):
try:
details = settings.PARTY_DETAILS[party]
if count == 1:
# singular
return details['noun']
else:
# try to get special plural, or add s to singular
try:
return details['plural_noun']
except KeyError:
return details['noun'] + 's'
except KeyError:
# if there's a KeyError just return the adjective with or without
# pluralization
if count == 1:
return party
else:
return party + 's'
@register.filter
def trunc(string):
if len(string) > 75:
return "%s [...]" % string[:75]
else:
return string
@register.filter
def underscore_field(obj, key):
return obj['_' + key]
@register.filter
def decimal_format(value, TWOPLACES=Decimal(100) ** -2):
'Format a decimal.Decimal like to 2 decimal places.'
if not isinstance(value, Decimal):
value = Decimal(str(value))
return value.quantize(TWOPLACES)
@register.tag
def striptags(parser, token):
nodelist = parser.parse(('end_striptags',))
parser.delete_first_token()
return StrippedTagsNode(nodelist)
@register.filter
def is_dev(luser):
return len(luser.groups.filter(name='developer')) == 1
class StrippedTagsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
output = strip_tags(self.nodelist.render(context))
return output
@register.tag
def squish_whitespace(parser, token):
nodelist = parser.parse(('end_squish_whitespace',))
parser.delete_first_token()
return SquishedWhitespaceNode(nodelist)
class SquishedWhitespaceNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
output = re.sub(u'\s+', ' ', self.nodelist.render(context))
output = re.sub(u'\n\s+', '', self.nodelist.render(context))
return output
def favorite(context, obj_id, obj_type, abbr=None, _is_favorite=None,
params=None):
'''Check whether the object with the given type and id is currently
favorited by the user. The test whether the user is authenticated
currently happens in the template.
abbr is can be specified in the invocation, since it won't be in the
request context on the user's favorites page.
Same for _is_favorite, which needs to be True.
Same for params, which needs to be passed as a url-encoded string from
the user homepage.
'''
request = context['request']
extra_spec = {}
# We need to allow the abbr to be passed in from the user favorites page,
# to come from the request context in the case of a search results page,
# and to default to 'all' for the all bills search.
abbr = abbr or context.get('abbr', 'all')
# use request.GET for params if not present
if not params:
_params = {}
params = [
(k, [unicode(v).encode('utf-8') for v in vv])
for (k, vv) in dict(request.GET).items()]
for k, v in params:
if len(v) == 1:
_params[k] = v.pop()
elif len(v) > 1:
_params[k] = v
params = urllib.urlencode(_params, doseq=True)
# If the requested page is a search results page with a query string,
# create an extra spec to help determine whether the search is
# currently favorited.
if request.GET and obj_type == "search":
search_text = request.GET.get('search_text')
if search_text:
extra_spec['search_text'] = search_text
extra_spec['search_params'] = params
if _is_favorite is None:
_is_favorite = is_favorite(obj_id, obj_type, request.user,
extra_spec=extra_spec)
else:
_is_favorite = (_is_favorite == 'is_favorite')
return dict(extra_spec,
obj_type=obj_type, obj_id=obj_id,
is_favorite=_is_favorite, request=request,
abbr=abbr or context['abbr'],
params=params)
register.inclusion_tag(
templatename('_favorite'), takes_context=True)(favorite)
register.inclusion_tag(
templatename('_favorite_short'),
takes_context=True, name='favorite_short')(favorite)
@register.inclusion_tag(templatename('_notification_preference'))
def notification_preference(obj_type, profile):
'''Display two radio buttons for turning notifications on or off.
The default value is is have alerts_on = True.
'''
default_alert_value = True
if not profile:
alerts_on = True
else:
notifications = profile.get('notifications', {})
alerts_on = notifications.get(obj_type, default_alert_value)
return dict(alerts_on=alerts_on, obj_type=obj_type)
@register.filter
def json_encode(data):
return json.dumps(data)
@register.filter
def event_time(event):
tz = pytz.timezone(event['timezone'])
localized = tz.localize(event['when'])
display_time = (localized + localized.utcoffset())
hours, minutes = display_time.hour, display_time.minute
# If the event's time is midnight, there was probably no
# exact time listed on the site, so don't display likely bogus time.
if (hours, minutes) == (0, 0):
return display_time.strftime('%A, %B %d, %Y')
return display_time.strftime('%A, %B %d, %Y, %I:%M %p %Z')
| 29.133929 | 77 | 0.658903 |
600af9a1bde01f78d2b4e6b1151402fa6b37336b | 23,676 | py | Python | venv/lib/python3.8/site-packages/azure/mgmt/synapse/operations/_ip_firewall_rules_operations.py | amcclead7336/Enterprise_Data_Science_Final | ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28 | [
"Unlicense",
"MIT"
] | null | null | null | venv/lib/python3.8/site-packages/azure/mgmt/synapse/operations/_ip_firewall_rules_operations.py | amcclead7336/Enterprise_Data_Science_Final | ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28 | [
"Unlicense",
"MIT"
] | null | null | null | venv/lib/python3.8/site-packages/azure/mgmt/synapse/operations/_ip_firewall_rules_operations.py | amcclead7336/Enterprise_Data_Science_Final | ccdc0aa08d4726bf82d71c11a1cc0c63eb301a28 | [
"Unlicense",
"MIT"
] | 2 | 2021-05-23T16:46:31.000Z | 2021-05-26T23:51:09.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrest.polling import LROPoller, NoPolling
from msrestazure.polling.arm_polling import ARMPolling
from .. import models
class IpFirewallRulesOperations(object):
"""IpFirewallRulesOperations operations.
You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: The API version to use for this operation. Constant value: "2019-06-01-preview".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2019-06-01-preview"
self.config = config
def list_by_workspace(
self, resource_group_name, workspace_name, custom_headers=None, raw=False, **operation_config):
"""Returns a list of firewall rules.
:param resource_group_name: The name of the resource group. The name
is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace
:type workspace_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of IpFirewallRuleInfo
:rtype:
~azure.mgmt.synapse.models.IpFirewallRuleInfoPaged[~azure.mgmt.synapse.models.IpFirewallRuleInfo]
:raises:
:class:`ErrorContractException<azure.mgmt.synapse.models.ErrorContractException>`
"""
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list_by_workspace.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1)
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def internal_paging(next_link=None):
request = prepare_request(next_link)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorContractException(self._deserialize, response)
return response
# Deserialize response
header_dict = None
if raw:
header_dict = {}
deserialized = models.IpFirewallRuleInfoPaged(internal_paging, self._deserialize.dependencies, header_dict)
return deserialized
list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/firewallRules'}
def _create_or_update_initial(
self, resource_group_name, workspace_name, rule_name, end_ip_address=None, start_ip_address=None, custom_headers=None, raw=False, **operation_config):
ip_firewall_rule_info = models.IpFirewallRuleInfo(end_ip_address=end_ip_address, start_ip_address=start_ip_address)
# Construct URL
url = self.create_or_update.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(ip_firewall_rule_info, 'IpFirewallRuleInfo')
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 201]:
raise models.ErrorContractException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('IpFirewallRuleInfo', response)
if response.status_code == 201:
deserialized = self._deserialize('IpFirewallRuleInfo', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update(
self, resource_group_name, workspace_name, rule_name, end_ip_address=None, start_ip_address=None, custom_headers=None, raw=False, polling=True, **operation_config):
"""Creates or updates a firewall rule.
:param resource_group_name: The name of the resource group. The name
is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace
:type workspace_name: str
:param rule_name: The IP firewall rule name
:type rule_name: str
:param end_ip_address: The end IP address of the firewall rule. Must
be IPv4 format. Must be greater than or equal to startIpAddress
:type end_ip_address: str
:param start_ip_address: The start IP address of the firewall rule.
Must be IPv4 format
:type start_ip_address: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns IpFirewallRuleInfo or
ClientRawResponse<IpFirewallRuleInfo> if raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.synapse.models.IpFirewallRuleInfo]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.synapse.models.IpFirewallRuleInfo]]
:raises:
:class:`ErrorContractException<azure.mgmt.synapse.models.ErrorContractException>`
"""
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
rule_name=rule_name,
end_ip_address=end_ip_address,
start_ip_address=start_ip_address,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('IpFirewallRuleInfo', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/firewallRules/{ruleName}'}
def _delete_initial(
self, resource_group_name, workspace_name, rule_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202, 204]:
raise models.ErrorContractException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('object', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def delete(
self, resource_group_name, workspace_name, rule_name, custom_headers=None, raw=False, polling=True, **operation_config):
"""Deletes a firewall rule.
:param resource_group_name: The name of the resource group. The name
is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace
:type workspace_name: str
:param rule_name: The IP firewall rule name
:type rule_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns object or
ClientRawResponse<object> if raw==True
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[object] or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[object]]
:raises:
:class:`ErrorContractException<azure.mgmt.synapse.models.ErrorContractException>`
"""
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
rule_name=rule_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('object', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/firewallRules/{ruleName}'}
def get(
self, resource_group_name, workspace_name, rule_name, custom_headers=None, raw=False, **operation_config):
"""Get a firewall rule.
:param resource_group_name: The name of the resource group. The name
is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace
:type workspace_name: str
:param rule_name: The IP firewall rule name
:type rule_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: IpFirewallRuleInfo or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.synapse.models.IpFirewallRuleInfo or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorContractException<azure.mgmt.synapse.models.ErrorContractException>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorContractException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('IpFirewallRuleInfo', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/firewallRules/{ruleName}'}
def _replace_all_initial(
self, resource_group_name, workspace_name, ip_firewall_rules=None, custom_headers=None, raw=False, **operation_config):
request = models.ReplaceAllIpFirewallRulesRequest(ip_firewall_rules=ip_firewall_rules)
# Construct URL
url = self.replace_all.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str', min_length=1)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(request, 'ReplaceAllIpFirewallRulesRequest')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200, 202]:
raise models.ErrorContractException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ReplaceAllFirewallRulesOperationResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def replace_all(
self, resource_group_name, workspace_name, ip_firewall_rules=None, custom_headers=None, raw=False, polling=True, **operation_config):
"""Replaces firewall rules.
:param resource_group_name: The name of the resource group. The name
is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace
:type workspace_name: str
:param ip_firewall_rules: IP firewall rule properties
:type ip_firewall_rules: dict[str,
~azure.mgmt.synapse.models.IpFirewallRuleProperties]
:param dict custom_headers: headers that will be added to the request
:param bool raw: The poller return type is ClientRawResponse, the
direct response alongside the deserialized response
:param polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:return: An instance of LROPoller that returns
ReplaceAllFirewallRulesOperationResponse or
ClientRawResponse<ReplaceAllFirewallRulesOperationResponse> if
raw==True
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.synapse.models.ReplaceAllFirewallRulesOperationResponse]
or
~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.synapse.models.ReplaceAllFirewallRulesOperationResponse]]
:raises:
:class:`ErrorContractException<azure.mgmt.synapse.models.ErrorContractException>`
"""
raw_result = self._replace_all_initial(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
ip_firewall_rules=ip_firewall_rules,
custom_headers=custom_headers,
raw=True,
**operation_config
)
def get_long_running_output(response):
deserialized = self._deserialize('ReplaceAllFirewallRulesOperationResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
lro_delay = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **operation_config)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
replace_all.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/replaceAllIpFirewallRules'}
| 48.219959 | 189 | 0.681069 |
ee21728ebee724e1c90f2c54846b8f889e090988 | 752 | py | Python | org/bobink/my_eval/tokan.py | bobink/my_python_eval | 9cc67f24600cc642b14f18b99eb1350e77cf87c2 | [
"Apache-2.0"
] | null | null | null | org/bobink/my_eval/tokan.py | bobink/my_python_eval | 9cc67f24600cc642b14f18b99eb1350e77cf87c2 | [
"Apache-2.0"
] | null | null | null | org/bobink/my_eval/tokan.py | bobink/my_python_eval | 9cc67f24600cc642b14f18b99eb1350e77cf87c2 | [
"Apache-2.0"
] | null | null | null | from enum import Enum
class TokanType(Enum):
PLUS = 1
MINUS = 2
TIMES = 3
DIV = 4
LEFT_PARENTHESIS = 5
RIGHT_PARENTHESIS = 6
VALUE = 7
class Tokan:
def __init__(self, __type: TokanType, __value: int):
self.__type = __type
self.__value = __value
def get_type(self) -> TokanType:
return self.__type
def get_value(self) -> int:
return self.__value
def __eq__(self, other):
if not isinstance(other, Tokan):
# don't attempt to compare against unrelated types
return NotImplemented
return self.__type == other.__type and self.__value == other.__value
def __repr__(self):
return str(self.__type) + ' ' + str(self.__value)
| 22.117647 | 76 | 0.615691 |
988a5fefd6be7bf6bb01c2bba8c82bd65612b547 | 6,367 | py | Python | catkin_ws/src/arg_nctu/kaku/duckietown_kaku/src/ros_smach_server.py | dimension4TW/Duckie-Servant-Robot | 0dbeddf353e5407bc395dc36817775f8ce991de1 | [
"CC-BY-2.0"
] | 12 | 2016-04-14T12:21:46.000Z | 2021-06-18T07:51:40.000Z | catkin_ws/src/arg_nctu/kaku/duckietown_kaku/src/ros_smach_server.py | dimension4TW/Duckie-Servant-Robot | 0dbeddf353e5407bc395dc36817775f8ce991de1 | [
"CC-BY-2.0"
] | 4 | 2017-03-12T15:59:10.000Z | 2017-05-21T16:14:23.000Z | catkin_ws/src/arg_nctu/kaku/duckietown_kaku/src/ros_smach_server.py | dimension4TW/Duckie-Servant-Robot | 0dbeddf353e5407bc395dc36817775f8ce991de1 | [
"CC-BY-2.0"
] | 113 | 2016-05-03T06:11:42.000Z | 2019-06-01T14:37:38.000Z | #!/usr/bin/env python
import rospy
import smach
import smach_ros
from geometry_msgs.msg import Twist, Point, PoseArray, Pose
from gazebo_msgs.srv import GetModelState
from duckietown_kaku.msg import path_followingAction, path_followingGoal, gripper_modeAction, gripper_modeGoal, gripper_grabAction, gripper_grabGoal
from std_msgs.msg import Bool, Float32
class decide_next_object(smach.State):
def __init__(self):
smach.State.__init__(self, outcomes=['finished'])
self.index = 0
print "================== now object ===================",self.index
def execute(self, userdata):
rospy.loginfo('DECIDE NEXT OBJECT')
self.index += 1
print "===================== index is %d =================" %self.index
return 'finished'
def way_point1():
waypoints = PoseArray()
# waypoint_name = ["coke_can", "coke_can_0", "coke_can_1", "coke_can_2"]
waypoint_name = ["my_cylinder", "my_cylinder_0", "my_cylinder_1", "my_cylinder_2", "Stop Sign"]
for i in range(5):
new_points = Pose()
rospy.wait_for_service('/gazebo/get_model_state')
try:
get_model_state = rospy.ServiceProxy('/gazebo/get_model_state', GetModelState)
model_state = get_model_state(waypoint_name[i],"")
new_points.position.x = model_state.pose.position.x
new_points.position.y = model_state.pose.position.y
waypoints.poses.append(new_points)
# print new_points
# waypoints.poses.position.x.append(model_state.pose.position.x)
# waypoints.poses.position.y.append(model_state.pose.position.y)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
# print waypoints.poses
return waypoints
def way_point2():
waypoints = PoseArray()
# waypoint_name = ["coke_can", "coke_can_0", "coke_can_1", "coke_can_2"]
waypoint_name = ["my_cylinder_3", "my_cylinder_4", "my_cylinder_5"]
for i in range(3):
new_points = Pose()
rospy.wait_for_service('/gazebo/get_model_state')
try:
get_model_state = rospy.ServiceProxy('/gazebo/get_model_state', GetModelState)
model_state = get_model_state(waypoint_name[i],"")
new_points.position.x = model_state.pose.position.x
new_points.position.y = model_state.pose.position.y
waypoints.poses.append(new_points)
# print new_points
# waypoints.poses.position.x.append(model_state.pose.position.x)
# waypoints.poses.position.y.append(model_state.pose.position.y)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
# print waypoints.poses
return waypoints
def stop_point_get():
stop_point = PoseArray()
new_points = Pose()
rospy.wait_for_service('/gazebo/get_model_state')
try:
get_model_state = rospy.ServiceProxy('/gazebo/get_model_state', GetModelState)
model_state = get_model_state("Stop Sign","")
new_points.position.x = model_state.pose.position.x
new_points.position.y = model_state.pose.position.y
stop_point.poses.append(new_points)
except rospy.ServiceException, e:
print "Service call failed: %s"%e
return stop_point
def object_point_get():
object_point = Point()
rospy.wait_for_service('/gazebo/get_model_state')
try:
get_model_state = rospy.ServiceProxy('/gazebo/get_model_state', GetModelState)
model_state = get_model_state("my_cylinder_2","")
object_point.x = model_state.pose.position.x
object_point.y = model_state.pose.position.y
except rospy.ServiceException, e:
print "Service call failed: %s"%e
return object_point
def main():
rospy.init_node("ros_smach_server")
waypoints1 = way_point1()
waypoints2 = way_point2()
stop_points = stop_point_get()
object_point = waypoints1.poses[0:3]
object_index = decide_next_object()
# object_point = object_point_get()
# print waypoints.poses
sm = smach.StateMachine(outcomes=['succeeded','aborted','preempted'])
with sm:
# smach.StateMachine.add('PATH_FOLLOWING', smach_ros.SimpleActionState("path_following_action", path_followingAction, goal=path_followingGoal(waypoints=waypoints)),{'succeeded':'GRIPPER_MODE'})
sm_sub_grab = smach.StateMachine(outcomes=['succeeded','aborted','preempted'])
with sm_sub_grab:
smach.StateMachine.add('APPROACH_OBJECT',smach_ros.SimpleActionState("gripper_mode_action", gripper_modeAction, goal=gripper_modeGoal(object_point=object_point[object_index.index].position)),{'succeeded':'GRASP_OBJECT'})
smach.StateMachine.add('GRASP_OBJECT',smach_ros.SimpleActionState("gripper_grab_action", gripper_grabAction, goal=gripper_grabGoal(grasping_state=True)))#,{'succeeded':'succeeded'})
smach.StateMachine.add('GRIPPER_MODE', sm_sub_grab,transitions={'succeeded':'GO_TO_DESTINATION'})
sm_sub_go_destination = smach.StateMachine(outcomes=['succeeded','aborted','preempted'])
with sm_sub_go_destination:
smach.StateMachine.add('GO_TO_STOP_SIGN', smach_ros.SimpleActionState("path_following_action", path_followingAction, goal=path_followingGoal(waypoints=waypoints1)),{'succeeded':'DROP_OBJECT'})
smach.StateMachine.add('DROP_OBJECT',smach_ros.SimpleActionState("gripper_grab_action", gripper_grabAction, goal=gripper_grabGoal(grasping_state=False)))#,{'succeeded':'succeeded'})
smach.StateMachine.add('GO_TO_DESTINATION', sm_sub_go_destination,transitions={'succeeded':'RESTART_AND_REDO'})
sm_sub_init = smach.StateMachine(outcomes=['succeeded','aborted','preempted'])
with sm_sub_init:
smach.StateMachine.add('DECIDE_NEXT_OBJECT', decide_next_object(),{'finished':'GO_TO_CHECKPOINT'})
smach.StateMachine.add('GO_TO_CHECKPOINT',smach_ros.SimpleActionState("path_following_action", path_followingAction, goal=path_followingGoal(waypoints=waypoints2)))#,{'succeeded':'succeeded'})
smach.StateMachine.add('RESTART_AND_REDO', sm_sub_init,transitions={'succeeded':'GRIPPER_MODE'})
sis = smach_ros.IntrospectionServer('smach_server', sm, '/SM_ROOT')
sis.start()
sm.execute()
rospy.on_shutdown(onShutdown)
rospy.spin()
sis.stop()
def onShutdown(self):
rospy.loginfo("[ros_smach_server] Shutdown.")
if __name__=="__main__":
main()
| 44.215278 | 223 | 0.710853 |
6b6759df6215a2aa900edd22fb3483fd8678d4d6 | 3,224 | py | Python | delfin/tests/unit/drivers/hitachi/hus/test_hus.py | guankecheng/netapp | 0852bd9f14d47d8f92beceb91bbd8158994f8ab1 | [
"Apache-2.0"
] | null | null | null | delfin/tests/unit/drivers/hitachi/hus/test_hus.py | guankecheng/netapp | 0852bd9f14d47d8f92beceb91bbd8158994f8ab1 | [
"Apache-2.0"
] | null | null | null | delfin/tests/unit/drivers/hitachi/hus/test_hus.py | guankecheng/netapp | 0852bd9f14d47d8f92beceb91bbd8158994f8ab1 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 The SODA Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import TestCase, mock
from delfin.tests.unit.drivers.hitachi.hus import test_constans
from delfin import context
from delfin.drivers.hitachi.hus_110.hitachi_hus import HitachiHUSDriver
from delfin.drivers.utils.cli_client import NaviClient
class Request:
def __init__(self):
self.environ = {'delfin.context': context.RequestContext()}
pass
class TestHUSStorageDriver(TestCase):
NaviClient.exec = mock.Mock(
side_effect=[test_constans.STORAGE_NAME_INFO])
hus_client = HitachiHUSDriver(**test_constans.ACCESS_INFO)
def test_get_storage(self):
NaviClient.exec = mock.Mock(
side_effect=[test_constans.STORAGE_INFO,
test_constans.DISK_INFO,
test_constans.POOL_INFO,
test_constans.POOL_DETAIL_INFO,
test_constans.RAID_GROUP_INFO])
data = self.hus_client.get_storage(context)
self.assertEqual(data['name'], 'HUS110_91110206')
def test_list_storage_pools(self):
NaviClient.exec = mock.Mock(
side_effect=[test_constans.POOL_INFO,
test_constans.POOL_DETAIL_INFO,
test_constans.RAID_GROUP_INFO,
test_constans.RAID_GROUP_DETAIL_INFO])
data = self.hus_client.list_storage_pools(context)
self.assertEqual(data[0]['name'], '2')
def test_list_volumes(self):
NaviClient.exec = mock.Mock(
side_effect=[
test_constans.VOLUMES_INFO,
test_constans.POOL_INFO,
test_constans.POOL_DETAIL_INFO])
data = self.hus_client.list_volumes(context)
self.assertEqual(data[0]['name'], '1')
def test_list_controllers(self):
NaviClient.exec = mock.Mock(
side_effect=[
test_constans.STATUS_INFO,
test_constans.STORAGE_INFO])
data = self.hus_client.list_controllers(context)
self.assertEqual(data[0]['name'], 'controller0')
def test_list_ports(self):
NaviClient.exec = mock.Mock(
side_effect=[
test_constans.ISCSI_PORT_INFO,
test_constans.PORT_INFO,
test_constans.STATUS_INFO,
test_constans.WWN_INFO])
data = self.hus_client.list_ports(context)
self.assertEqual(data[0]['name'], '0A')
def test_list_disks(self):
NaviClient.exec = mock.Mock(
side_effect=[test_constans.DISK_INFO])
data = self.hus_client.list_disks(context)
self.assertEqual(data[0]['name'], '0')
| 38.380952 | 74 | 0.656017 |
30cd067bd51166d69fdf65a137a172c994588a4b | 13,973 | py | Python | services/director-v2/src/simcore_service_director_v2/api/routes/computations.py | Surfict/osparc-simcore | 1e0b89574ec17ecb089674f9e5daa83d624430c8 | [
"MIT"
] | null | null | null | services/director-v2/src/simcore_service_director_v2/api/routes/computations.py | Surfict/osparc-simcore | 1e0b89574ec17ecb089674f9e5daa83d624430c8 | [
"MIT"
] | 16 | 2021-10-04T20:31:52.000Z | 2022-03-14T04:31:25.000Z | services/director-v2/src/simcore_service_director_v2/api/routes/computations.py | Surfict/osparc-simcore | 1e0b89574ec17ecb089674f9e5daa83d624430c8 | [
"MIT"
] | null | null | null | # pylint: disable=too-many-arguments
import logging
from typing import Any, List
import networkx as nx
from fastapi import APIRouter, Depends, HTTPException
from models_library.projects import ProjectAtDB, ProjectID
from models_library.projects_state import RunningState
from starlette import status
from starlette.requests import Request
from tenacity import (
before_sleep_log,
retry,
retry_if_result,
stop_after_delay,
wait_random,
)
from ...core.errors import PipelineNotFoundError, ProjectNotFoundError, SchedulerError
from ...models.domains.comp_pipelines import CompPipelineAtDB
from ...models.domains.comp_tasks import CompTaskAtDB
from ...models.schemas.comp_tasks import (
ComputationTaskCreate,
ComputationTaskDelete,
ComputationTaskOut,
ComputationTaskStop,
)
from ...models.schemas.constants import UserID
from ...modules.comp_scheduler.base_scheduler import BaseCompScheduler
from ...modules.db.repositories.comp_pipelines import CompPipelinesRepository
from ...modules.db.repositories.comp_tasks import CompTasksRepository
from ...modules.db.repositories.projects import ProjectsRepository
from ...modules.director_v0 import DirectorV0Client
from ...utils.async_utils import run_sequentially_in_context
from ...utils.computations import (
get_pipeline_state_from_task_states,
is_pipeline_running,
is_pipeline_stopped,
)
from ...utils.dags import (
compute_pipeline_details,
create_complete_dag,
create_complete_dag_from_tasks,
create_minimal_computational_graph_based_on_selection,
find_computational_node_cycles,
)
from ..dependencies.database import get_repository
from ..dependencies.director_v0 import get_director_v0_client
from ..dependencies.scheduler import get_scheduler
router = APIRouter()
log = logging.getLogger(__file__)
PIPELINE_ABORT_TIMEOUT_S = 10
@router.post(
"",
summary="Create and optionally start a new computation",
response_model=ComputationTaskOut,
status_code=status.HTTP_201_CREATED,
)
# NOTE: in case of a burst of calls to that endpoint, we might end up in a weird state.
@run_sequentially_in_context(target_args=["job.project_id"])
async def create_computation(
job: ComputationTaskCreate,
request: Request,
project_repo: ProjectsRepository = Depends(get_repository(ProjectsRepository)),
computation_pipelines: CompPipelinesRepository = Depends(
get_repository(CompPipelinesRepository)
),
computation_tasks: CompTasksRepository = Depends(
get_repository(CompTasksRepository)
),
director_client: DirectorV0Client = Depends(get_director_v0_client),
scheduler: BaseCompScheduler = Depends(get_scheduler),
) -> ComputationTaskOut:
log.debug(
"User %s is creating a new computation from project %s",
job.user_id,
job.project_id,
)
try:
# get the project
project: ProjectAtDB = await project_repo.get_project(job.project_id)
# FIXME: this could not be valid anymore if the user deletes the project in between right?
# check if current state allow to modify the computation
comp_tasks: List[CompTaskAtDB] = await computation_tasks.get_comp_tasks(
job.project_id
)
pipeline_state = get_pipeline_state_from_task_states(comp_tasks)
if is_pipeline_running(pipeline_state):
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=f"Projet {job.project_id} already started, current state is {pipeline_state}",
)
# create the complete DAG graph
complete_dag = create_complete_dag(project.workbench)
# find the minimal viable graph to be run
computational_dag = await create_minimal_computational_graph_based_on_selection(
complete_dag=complete_dag,
selected_nodes=job.subgraph or [],
force_restart=job.force_restart or False,
)
# ok so put the tasks in the db
await computation_pipelines.upsert_pipeline(
job.user_id, project.uuid, computational_dag, job.start_pipeline or False
)
inserted_comp_tasks = await computation_tasks.upsert_tasks_from_project(
project,
director_client,
list(computational_dag.nodes()) if job.start_pipeline else [],
)
if job.start_pipeline:
if not computational_dag.nodes():
# 2 options here: either we have cycles in the graph or it's really done
list_of_cycles = find_computational_node_cycles(complete_dag)
if list_of_cycles:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=f"Project {job.project_id} contains cycles with computational services which are currently not supported! Please remove them.",
)
# there is nothing else to be run here, so we are done
raise HTTPException(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
detail=f"Project {job.project_id} has no computational services, or contains cycles",
)
await scheduler.run_new_pipeline(
job.user_id,
job.project_id,
job.cluster_id
or request.app.state.settings.DASK_SCHEDULER.DASK_DEFAULT_CLUSTER_ID,
)
return ComputationTaskOut(
id=job.project_id,
state=RunningState.PUBLISHED
if job.start_pipeline
else RunningState.NOT_STARTED,
pipeline_details=await compute_pipeline_details(
complete_dag, computational_dag, inserted_comp_tasks
),
url=f"{request.url}/{job.project_id}",
stop_url=f"{request.url}/{job.project_id}:stop"
if job.start_pipeline
else None,
)
except ProjectNotFoundError as e:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) from e
@router.get(
"/{project_id}",
summary="Returns a computation pipeline state",
response_model=ComputationTaskOut,
status_code=status.HTTP_202_ACCEPTED,
)
async def get_computation(
user_id: UserID,
project_id: ProjectID,
request: Request,
project_repo: ProjectsRepository = Depends(get_repository(ProjectsRepository)),
computation_pipelines: CompPipelinesRepository = Depends(
get_repository(CompPipelinesRepository)
),
computation_tasks: CompTasksRepository = Depends(
get_repository(CompTasksRepository)
),
) -> ComputationTaskOut:
log.debug("User %s getting computation status for project %s", user_id, project_id)
try:
# check that project actually exists
await project_repo.get_project(project_id)
# NOTE: Here it is assumed the project exists in comp_tasks/comp_pipeline
# get the project pipeline
pipeline_at_db: CompPipelineAtDB = await computation_pipelines.get_pipeline(
project_id
)
pipeline_dag: nx.DiGraph = pipeline_at_db.get_graph()
# get the project task states
all_comp_tasks: List[CompTaskAtDB] = await computation_tasks.get_all_tasks(
project_id
)
# create the complete DAG graph
complete_dag = create_complete_dag_from_tasks(all_comp_tasks)
# filter the tasks by the effective pipeline
filtered_tasks = [
t for t in all_comp_tasks if str(t.node_id) in list(pipeline_dag.nodes())
]
pipeline_state = get_pipeline_state_from_task_states(filtered_tasks)
log.debug(
"Computational task status by user %s for project %s is %s",
user_id,
project_id,
pipeline_state,
)
task_out = ComputationTaskOut(
id=project_id,
state=pipeline_state,
pipeline_details=await compute_pipeline_details(
complete_dag, pipeline_dag, all_comp_tasks
),
url=f"{request.url.remove_query_params('user_id')}",
stop_url=f"{request.url.remove_query_params('user_id')}:stop"
if is_pipeline_running(pipeline_state)
else None,
)
return task_out
except (ProjectNotFoundError, PipelineNotFoundError) as e:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) from e
@router.post(
"/{project_id}:stop",
summary="Stops a computation pipeline",
response_model=ComputationTaskOut,
status_code=status.HTTP_202_ACCEPTED,
)
async def stop_computation_project(
comp_task_stop: ComputationTaskStop,
project_id: ProjectID,
request: Request,
project_repo: ProjectsRepository = Depends(get_repository(ProjectsRepository)),
computation_pipelines: CompPipelinesRepository = Depends(
get_repository(CompPipelinesRepository)
),
computation_tasks: CompTasksRepository = Depends(
get_repository(CompTasksRepository)
),
scheduler: BaseCompScheduler = Depends(get_scheduler),
) -> ComputationTaskOut:
log.debug(
"User %s stopping computation for project %s",
comp_task_stop.user_id,
project_id,
)
try:
# check the project exists
await project_repo.get_project(project_id)
# get the project pipeline
pipeline_at_db: CompPipelineAtDB = await computation_pipelines.get_pipeline(
project_id
)
pipeline_dag: nx.DiGraph = pipeline_at_db.get_graph()
# get the project task states
tasks: List[CompTaskAtDB] = await computation_tasks.get_all_tasks(project_id)
# create the complete DAG graph
complete_dag = create_complete_dag_from_tasks(tasks)
# filter the tasks by the effective pipeline
filtered_tasks = [
t for t in tasks if str(t.node_id) in list(pipeline_dag.nodes())
]
pipeline_state = get_pipeline_state_from_task_states(filtered_tasks)
if is_pipeline_running(pipeline_state):
await scheduler.stop_pipeline(comp_task_stop.user_id, project_id)
return ComputationTaskOut(
id=project_id,
state=pipeline_state,
pipeline_details=await compute_pipeline_details(
complete_dag, pipeline_dag, tasks
),
url=f"{str(request.url).rstrip(':stop')}",
)
except ProjectNotFoundError as e:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) from e
except SchedulerError as e:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) from e
@router.delete(
"/{project_id}",
summary="Deletes a computation pipeline",
response_model=None,
status_code=status.HTTP_204_NO_CONTENT,
)
async def delete_pipeline(
comp_task_stop: ComputationTaskDelete,
project_id: ProjectID,
project_repo: ProjectsRepository = Depends(get_repository(ProjectsRepository)),
computation_pipelines: CompPipelinesRepository = Depends(
get_repository(CompPipelinesRepository)
),
computation_tasks: CompTasksRepository = Depends(
get_repository(CompTasksRepository)
),
scheduler: BaseCompScheduler = Depends(get_scheduler),
) -> None:
try:
# get the project
project: ProjectAtDB = await project_repo.get_project(project_id)
# check if current state allow to stop the computation
comp_tasks: List[CompTaskAtDB] = await computation_tasks.get_comp_tasks(
project_id
)
pipeline_state = get_pipeline_state_from_task_states(comp_tasks)
if is_pipeline_running(pipeline_state):
if not comp_task_stop.force:
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail=f"Projet {project_id} is currently running and cannot be deleted, current state is {pipeline_state}",
)
# abort the pipeline first
try:
await scheduler.stop_pipeline(comp_task_stop.user_id, project_id)
except SchedulerError as e:
log.warning(
"Project %s could not be stopped properly.\n reason: %s",
project_id,
e,
)
def return_last_value(retry_state: Any) -> Any:
"""return the result of the last call attempt"""
return retry_state.outcome.result()
@retry(
stop=stop_after_delay(PIPELINE_ABORT_TIMEOUT_S),
wait=wait_random(0, 2),
retry_error_callback=return_last_value,
retry=retry_if_result(lambda result: result is False),
reraise=False,
before_sleep=before_sleep_log(log, logging.INFO),
)
async def check_pipeline_stopped() -> bool:
comp_tasks: List[CompTaskAtDB] = await computation_tasks.get_comp_tasks(
project_id
)
pipeline_state = get_pipeline_state_from_task_states(
comp_tasks,
)
return is_pipeline_stopped(pipeline_state)
# wait for the pipeline to be stopped
if not await check_pipeline_stopped():
log.error(
"pipeline %s could not be stopped properly after %ss",
project_id,
PIPELINE_ABORT_TIMEOUT_S,
)
# delete the pipeline now
await computation_tasks.delete_tasks_from_project(project)
await computation_pipelines.delete_pipeline(project_id)
except ProjectNotFoundError as e:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e)) from e
| 38.493113 | 158 | 0.674444 |
0db6e90fadcb7d95b4650be7a5dff8a69cdcc7eb | 970 | py | Python | kubernetes/test/test_v1beta2_replica_set.py | Prahladk09/python-1 | 2dfb3035535e4be52ba549f1ff47acbe573b73f6 | [
"Apache-2.0"
] | 1 | 2020-04-13T09:54:21.000Z | 2020-04-13T09:54:21.000Z | kubernetes/test/test_v1beta2_replica_set.py | Prahladk09/python-1 | 2dfb3035535e4be52ba549f1ff47acbe573b73f6 | [
"Apache-2.0"
] | 1 | 2019-08-15T14:27:17.000Z | 2019-08-15T14:28:07.000Z | kubernetes/test/test_v1beta2_replica_set.py | Prahladk09/python-1 | 2dfb3035535e4be52ba549f1ff47acbe573b73f6 | [
"Apache-2.0"
] | 2 | 2020-08-05T03:06:48.000Z | 2020-08-05T16:08:21.000Z | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.14.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1beta2_replica_set import V1beta2ReplicaSet
class TestV1beta2ReplicaSet(unittest.TestCase):
""" V1beta2ReplicaSet unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1beta2ReplicaSet(self):
"""
Test V1beta2ReplicaSet
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubernetes.client.models.v1beta2_replica_set.V1beta2ReplicaSet()
pass
if __name__ == '__main__':
unittest.main()
| 21.555556 | 105 | 0.71134 |
0deefcbb57509edb6b8038c0c18d2d01cd8ebefc | 9,419 | py | Python | docs/conf.py | zopefoundation/zope.filerepresentation | 961701bcab86a96a9a82b8bf42600d680841e753 | [
"ZPL-2.1"
] | 2 | 2019-03-18T16:32:16.000Z | 2019-10-23T22:32:39.000Z | docs/conf.py | zopefoundation/zope.filerepresentation | 961701bcab86a96a9a82b8bf42600d680841e753 | [
"ZPL-2.1"
] | 5 | 2016-03-24T15:22:32.000Z | 2020-03-31T12:39:45.000Z | docs/conf.py | zopefoundation/zope.filerepresentation | 961701bcab86a96a9a82b8bf42600d680841e753 | [
"ZPL-2.1"
] | 1 | 2015-04-03T08:50:07.000Z | 2015-04-03T08:50:07.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# zope.filerepresentation documentation build configuration file, created by
# sphinx-quickstart on Wed Feb 25 11:38:35 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
import os
import sys
import pkg_resources
sys.path.append(os.path.abspath('../src'))
rqmt = pkg_resources.require('zope.filerepresentation')[0]
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'repoze.sphinx.autointerface',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'zope.filerepresentation'
copyright = '2016, Zope Foundation and Contributors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
# The short X.Y version.
version = '%s.%s' % tuple(map(int, rqmt.version.split('.')[:2]))
# The full version, including alpha/beta/rc tags.
release = rqmt.version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'zopefilerepresentationdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'zopefilerepresentation.tex', 'zope.filerepresentation Documentation',
'Zope Foundation and Contributors', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'zopefilerepresentation', 'zope.filerepresentation Documentation',
['Zope Foundation and Contributors'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'zopefilerepresentation', 'zope.filerepresentation Documentation',
'Zope Foundation and Contributors', 'zopefilerepresentation',
'Interfaces for file-system and file-system-like representations of objects, such as file-system synchronization, FTP, PUT, and WebDAV.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'https://docs.python.org/': None,
'https://zopeschema.readthedocs.io/en/latest/': None,
'https://zopeinterface.readthedocs.io/en/latest/': None,
}
extlinks = {'issue': ('https://github.com/zopefoundation/zope.filerepresentation/issues/%s',
'issue #'),
'pr': ('https://github.com/zopefoundation/zope.filerepresentation/pull/%s',
'pull request #')}
autodoc_default_flags = ['members', 'show-inheritance']
autoclass_content = 'both'
| 32.933566 | 140 | 0.719291 |
946a31ed6ea2efa5e706e9e1345633524236d482 | 61 | py | Python | backend/app/common.py | Polsaker/mateapp | 8dfce3b642e8b7a68e74f22864aad8cee5b65239 | [
"MIT"
] | null | null | null | backend/app/common.py | Polsaker/mateapp | 8dfce3b642e8b7a68e74f22864aad8cee5b65239 | [
"MIT"
] | null | null | null | backend/app/common.py | Polsaker/mateapp | 8dfce3b642e8b7a68e74f22864aad8cee5b65239 | [
"MIT"
] | null | null | null | from flask_jwt_extended import JWTManager
JWT = JWTManager() | 20.333333 | 41 | 0.836066 |
8ca088913fdcc7bd4dd281421fa534429f5a55f3 | 115 | py | Python | inference/example_webserver.py | zhoujinhai/MeshCNN | d76259079b39bc5c66113bda54b52da4fd64de11 | [
"MIT"
] | null | null | null | inference/example_webserver.py | zhoujinhai/MeshCNN | d76259079b39bc5c66113bda54b52da4fd64de11 | [
"MIT"
] | null | null | null | inference/example_webserver.py | zhoujinhai/MeshCNN | d76259079b39bc5c66113bda54b52da4fd64de11 | [
"MIT"
] | null | null | null | import webserver
if __name__ == '__main__':
server = webserver.MeshWebServer()
server.run(port=8000)
| 19.166667 | 39 | 0.678261 |
de20fb84d220e309fe57cf8a9f881c27b45bcf07 | 2,373 | py | Python | huaweicloud-sdk-sdrs/huaweicloudsdksdrs/v1/model/list_protected_instances_project_tags_request.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 64 | 2020-06-12T07:05:07.000Z | 2022-03-30T03:32:50.000Z | huaweicloud-sdk-sdrs/huaweicloudsdksdrs/v1/model/list_protected_instances_project_tags_request.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 11 | 2020-07-06T07:56:54.000Z | 2022-01-11T11:14:40.000Z | huaweicloud-sdk-sdrs/huaweicloudsdksdrs/v1/model/list_protected_instances_project_tags_request.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 24 | 2020-06-08T11:42:13.000Z | 2022-03-04T06:44:08.000Z | # coding: utf-8
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ListProtectedInstancesProjectTagsRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
}
attribute_map = {
}
def __init__(self):
"""ListProtectedInstancesProjectTagsRequest - a model defined in huaweicloud sdk"""
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ListProtectedInstancesProjectTagsRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 27.593023 | 91 | 0.54488 |
7298c3a28b0ca3d6d2fe8c6a8806e0c9efbd0735 | 112 | py | Python | gym_ucv/__init__.py | mkisantal/gym-ucv-control | 979ecbf0942ce81fab8aa7a179b154194bd023f0 | [
"MIT"
] | null | null | null | gym_ucv/__init__.py | mkisantal/gym-ucv-control | 979ecbf0942ce81fab8aa7a179b154194bd023f0 | [
"MIT"
] | null | null | null | gym_ucv/__init__.py | mkisantal/gym-ucv-control | 979ecbf0942ce81fab8aa7a179b154194bd023f0 | [
"MIT"
] | null | null | null | from gym.envs.registration import register
register(
id='ucv-v0',
entry_point='gym_ucv.envs:UcvEnv',
)
| 16 | 42 | 0.714286 |
b836d2a335ae0f4146e9577ff6a90561c9e5feec | 439 | py | Python | src/Cleaner.py | arnavkohli/Reddgram | d91c914d1c575405a162c62385303827d675e2f5 | [
"MIT"
] | 1 | 2019-08-24T15:54:34.000Z | 2019-08-24T15:54:34.000Z | src/Cleaner.py | arnavkohli/Reddgram | d91c914d1c575405a162c62385303827d675e2f5 | [
"MIT"
] | 5 | 2021-03-19T02:57:31.000Z | 2022-03-11T23:57:28.000Z | src/Cleaner.py | arnavkohli/Reddgram | d91c914d1c575405a162c62385303827d675e2f5 | [
"MIT"
] | null | null | null | import os
class Cleaner:
@staticmethod
def clean(path=os.getcwd()):
"""
Removes all files which do not have
a .py extension.
"""
l = os.listdir(path)
to_delete = []
for item in l:
if ('.' in item and 'html' not in item) or os.path.isdir(path + '/{}'.format(item)):
pass
else:
to_delete.append(item)
for d in to_delete:
print ('Deleting: {}'.format(d))
os.remove(d)
print ('Cleaner finished.') | 18.291667 | 87 | 0.610478 |
f9a79a7210f2e47a43d2a6f96df0e67da9dc36c7 | 1,354 | py | Python | scripts/conservation/reformat.py | dbmi-bgm/cgap-annotation-server | 05d022f254b5e3057abf13aa9c8bdae5eb8b6e3a | [
"MIT"
] | 1 | 2021-05-27T14:27:47.000Z | 2021-05-27T14:27:47.000Z | scripts/conservation/reformat.py | dbmi-bgm/cgap-annotation-server | 05d022f254b5e3057abf13aa9c8bdae5eb8b6e3a | [
"MIT"
] | 8 | 2020-02-11T20:06:10.000Z | 2020-09-28T20:03:17.000Z | scripts/conservation/reformat.py | dbmi-bgm/cgap-annotation-server | 05d022f254b5e3057abf13aa9c8bdae5eb8b6e3a | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# reformat.py
# made by Daniel Minseok Kwon
# 2020-01-28 04:36:09
#########################
import sys
import os
import time
SVRNAME = os.uname()[1]
if "MBI" in SVRNAME.upper():
sys_path = "/Users/pcaso/bin/python_lib"
elif SVRNAME == "T7":
sys_path = "/ms1/bin/python_lib"
else:
sys_path = "/home/mk446/bin/python_lib"
sys.path.append(sys_path)
def reformat(tsi):
out = path + "tmp2/" + tsi.split('/')[-1].replace('.tsi.gz', '') + '.bed'
f = open(out, 'w')
for line in file_util.gzopen(tsi):
if tsi.endswith('.gz'):
line = line.decode('UTF-8')
arr = line.split('\t')
arr[-1] = arr[-1].strip()
if line[0] == '#':
spos = "SPOS"
epos = "EPOS"
else:
spos = str(int(arr[1]) - 1)
epos = arr[1]
cont = [arr[0], spos, epos, arr[3].replace('|','\t')]
f.write('\t'.join(cont) + '\n')
f.close()
print("Saved", out)
time.sleep(120)
proc_util.run_cmd('tabixgzbed ' + out)
# /home/mk446/mutanno/SRC/scripts/conservation/reformat.py
if __name__ == "__main__":
import file_util
import proc_util
tsi = sys.argv[1]
path = "/home/mk446/mutanno/DATASOURCE/CONSERVATION/"
# tsi = path + "conservation_scores.hg38.chrM.tsi"
reformat(tsi)
| 25.54717 | 77 | 0.554653 |
4fe090f35eac4024d828ca6877472d8fe54fb7fe | 31,747 | py | Python | odebase.py | zed-ee/grader-simulator | e8a7f14008bad1aac2363dd46afa313c6d49ac03 | [
"MIT"
] | 1 | 2019-07-18T23:19:49.000Z | 2019-07-18T23:19:49.000Z | odebase.py | zed-ee/grader-simulator | e8a7f14008bad1aac2363dd46afa313c6d49ac03 | [
"MIT"
] | null | null | null | odebase.py | zed-ee/grader-simulator | e8a7f14008bad1aac2363dd46afa313c6d49ac03 | [
"MIT"
] | null | null | null | import sys, random, time
from math import degrees, sin, cos, pi, sqrt, pow, atan
from direct.showbase import PythonUtil as PU
#import ode as ode
#import direct.directbase.DirectStart
from pandac.PandaModules import PerspectiveLens
from pandac.PandaModules import TransparencyAttrib,GeomVertexReader,GeomVertexFormat,GeomVertexData,Geom,GeomVertexWriter,GeomTriangles,GeomNode
from pandac.PandaModules import Vec3,Vec4,Point3
from pandac.PandaModules import OdeWorld, OdeSimpleSpace, OdeJointGroup, OdeSpace, OdeBallJoint, OdeHinge2Joint, OdeQuadTreeSpace, OdeHashSpace
from pandac.PandaModules import OdeBody, OdeMass, OdeSphereGeom, OdeBoxGeom, OdePlaneGeom, OdeCylinderGeom, OdeCappedCylinderGeom, OdeTriMeshGeom, OdeTriMeshData
from pandac.PandaModules import BitMask32, Quat, Mat4
from pandac.PandaModules import PandaSystem
#from random import randint, random
import random
def getLength(v1,v2):
return sqrt(pow(v1[0]-v2[0],2)+pow(v1[1]-v2[1],2)+pow(v1[2]-v2[2],2))
def getCenter(v1,v2):
return [ (v1[0]+v2[0])/2, (v1[1]+v2[1])/2, (v1[2]+v2[2])/2]
class ODEBallJoint(OdeBallJoint):
def __init__(self, world, model=None, renderparent=None, scale=None):
OdeBallJoint.__init__(self, world)
if model != None:
self.np = model.copyTo(renderparent)
self.np.setP(90)
self.np.flattenStrong()
self.scale = scale
else:
self.np = None
self.bodies = 0
def destroy(self):
self.detach()
OdeBallJoint.destroy(self)
if self.np != None:
self.np.removeNode()
def attach(self, b1, b2):
OdeBallJoint.attach(self, b1,b2)
self.body1 = b1
if b2 == None:
self.bodies = 1
else:
self.bodies = 2
self.body2 = b2
def Render(self):
if self.np==None or self.bodies == 0:
return
#v = self.getAnchor()
if self.bodies == 1:
v = self.getAnchor2()
else:
#v = self.getBody(1).getPosition()
v = self.body2.getPosition()
#vb = self.getBody(0).getPosition()
vb = self.body1.getPosition()
#if self.bodies == 2:
# print v, vb
v = Vec3(v[0],v[1],v[2])
vb = Vec3(vb[0],vb[1],vb[2])
c = (vb+v) / 2
vd = vb-v
l = vd.length()
#self.np.setScale(self.scale[0], self.scale[1], self.scale[2] * l)
self.np.setScale(self.scale[0], self.scale[1] *l, self.scale[0])
self.np.setPos(c[0],c[1],c[2])
self.np.lookAt(vb[0],vb[1],vb[2])
##class ODEBallJointOld(OdeBallJoint):
## def __init__(self, world, model=None, renderparent=None, scale=None):
## OdeBallJoint.__init__(self, world)
## if model != None:
## self.np = renderparent.attachNewNode("dummy")
## np = model.copyTo(self.np)
## np.setP(90)
## self.np2 = np
##
## self.scale = scale
## else:
## self.np = None
##
## def destroy(self):
## self.detach()
## OdeBallJoint.destroy(self)
## if self.np != None:
## self.np.removeNode()
##
## def Render(self):
## if self.np==None:
## return
## #v = self.getAnchor()
## v = self.getAnchor2()
## vb = self.getBody(0).getPosition()
## v = Vec3(v[0],v[1],v[2])
## vb = Vec3(vb[0],vb[1],vb[2])
## c = (vb+v) / 2
## vd = vb-v
## l = vd.length()
## #self.np.setScale(self.scale[0], self.scale[1], self.scale[2] * l)
## #self.np.setScale(self.scale[0], self.scale[1] *l, self.scale[2])
## self.np2.setScale(self.scale[0], self.scale[1], self.scale[2] * l)
## self.np.setPos(c[0],c[1],c[2])
## self.np.lookAt(vb[0],vb[1],vb[2])
class ODEobjbase:
def storeProps(self, realObj, mass, surfaceId, collideBits, categoryBits):
self.realObj=realObj
self.mass=mass
self.surfaceId = surfaceId
self.geom.getSpace().setSurfaceType(self.geom, surfaceId)
self.geom.setCollideBits(BitMask32(collideBits))
self.geom.setCategoryBits(BitMask32(categoryBits))
def isDynamic(self):
return hasattr(self,'body')
def delRealobj(self):
if self.realObj != None:
self.realObj.removeNode()
self.realObj = None
def destroy(self):
if hasattr(self,'body'):
#self.body.destroy()
del self.body
self.geom.getSpace().remove(self.geom)
del self.geom
if hasattr(self,'visualizer'):
self.visualizer.removeNode()
self.delRealobj()
def getOBB(self,collObj):
''' get the Oriented Bounding Box '''
# save object's parent and transformation
parent=collObj.getParent()
trans=collObj.getTransform()
# ODE need everything in world's coordinate space,
# so bring the object directly under render, but keep the transformation
collObj.wrtReparentTo(render)
# get the tight bounds before any rotation
collObj.setHpr(0,0,0)
bounds=collObj.getTightBounds()
print `bounds`
offset=collObj.getBounds().getCenter()-collObj.getPos()
# bring object to it's parent and restore it's transformation
collObj.reparentTo(parent)
collObj.setTransform(trans)
# (max - min) bounds
box=bounds[1]-bounds[0]
# print bounds[0], bounds[1]
return [box[0],box[1],box[2]], [offset[0],offset[1],offset[2]]
class ODEbox(ODEobjbase):
def __init__(self, world, space, realObj=None, collObj=None,
density=0, surfaceId=0, collideBits=0, categoryBits=0):
if realObj==None:
obj=collObj
else:
obj=realObj
if collObj==None:
collObj=realObj
boundingBox, offset=self.getOBB(collObj)
#print boundingBox, offset
## if offset==Vec3(0):
## fCentered=True
## else:
## realGeom= OdeBoxGeom(None, lengths=boundingBox)
## self.geom = Ode.GeomTransform(space)
## self.geom.setGeom(realGeom)
## realGeom.setPosition(offset)
## nonCenteredO=1
## print 'NON-CENTERED ORIGIN'
##
## if density: # create body if the object is dynamic, otherwise don't
## self.body = ode.Body(world)
## M = ode.Mass()
## M.setBox(density, *boundingBox)
## if nonCenteredO:
## M.translate(offset)
self.geom = OdeBoxGeom(space, *boundingBox)
if density > 0: # create body if the object is dynamic, otherwise don't
self.body = OdeBody(world)
M = OdeMass()
M.setBox(density, *boundingBox)
self.body.setMass(M)
self.geom.setBody(self.body)
#self.geom.setOffsetPosition(*offset)
#print offset
mass = M.getMagnitude()
else:
mass = 0
# synchronize ODE geom's transformation according to the real object's
self.geom.setPosition(obj.getPos(render))
self.geom.setQuaternion(obj.getQuat(render))
# store object's properties
self.storeProps(realObj, mass, surfaceId, collideBits, categoryBits)
class ODEcylinder(ODEobjbase):
def __init__(self, world, space, realObj=None, collObj=None,
density=0, direction=3, radius=1, length=1, surfaceId=0, collideBits=0, categoryBits=0):
if realObj==None:
obj=collObj
else:
obj=realObj
if collObj==None:
collObj=realObj
self.geom = OdeCylinderGeom(space, radius, length)
if density > 0: # create body if the object is dynamic, otherwise don't
self.body = OdeBody(world)
M = OdeMass()
M.setCylinder(density, direction, radius, length)
self.body.setMass(M)
self.geom.setBody(self.body)
#self.geom.setOffsetPosition(*offset)
#print offset
mass = M.getMagnitude()
else:
mass = 0
# synchronize ODE geom's transformation according to the real object's
self.geom.setPosition(obj.getPos(render))
self.geom.setQuaternion(obj.getQuat(render))
# store object's properties
self.storeProps(realObj, mass, surfaceId, collideBits, categoryBits)
class ODEcylinder2(ODEobjbase):
def __init__(self, world, space, realObj=None, collObj=None,
density=0, direction=3, radius=1, length=1, surfaceId=0, collideBits=0, categoryBits=0):
if realObj==None:
obj=collObj
else:
obj=realObj
if collObj==None:
collObj=realObj
self.geom = OdeSphereGeom(space, radius)
if density > 0: # create body if the object is dynamic, otherwise don't
self.body = OdeBody(world)
M = OdeMass()
M.setCylinder(density, direction, radius, length)
self.body.setMass(M)
self.geom.setBody(self.body)
#self.geom.setOffsetPosition(*offset)
#print offset
mass = M.getMagnitude()
else:
mass = 0
# synchronize ODE geom's transformation according to the real object's
self.geom.setPosition(obj.getPos(render))
self.geom.setQuaternion(obj.getQuat(render))
# store object's properties
self.storeProps(realObj, mass, surfaceId, collideBits, categoryBits)
class ODECappedCylinder(ODEobjbase):
def __init__(self, world, space, realObj=None, collObj=None,
density=0, direction=3,radius=1, length=1, surfaceId=0, collideBits=0, categoryBits=0):
if realObj==None:
obj=collObj
else:
obj=realObj
if collObj==None:
collObj=realObj
self.geom = OdeCappedCylinderGeom(space, radius, length)
if density > 0: # create body if the object is dynamic, otherwise don't
self.body = OdeBody(world)
M = OdeMass()
M.setCapsule(density, direction, radius, length)
self.body.setMass(M)
self.geom.setBody(self.body)
#self.geom.setOffsetPosition(*offset)
#print offset
mass = M.getMagnitude()
else:
mass = 0
# synchronize ODE geom's transformation according to the real object's
self.geom.setPosition(obj.getPos(render))
self.geom.setQuaternion(obj.getQuat(render))
# store object's properties
self.storeProps(realObj, mass, surfaceId, collideBits, categoryBits)
class ODEsphere(ODEobjbase):
def __init__(self, world, space, realObj=None, collObj=None,
density=0, surfaceId=0, collideBits=0, categoryBits=0):
if realObj==None:
obj=collObj
else:
obj=realObj
if collObj==None:
collObj=realObj
boundingBox, offset=self.getOBB(collObj)
r = boundingBox[0]/2
self.geom = OdeSphereGeom(space, r)
if density > 0: # create body if the object is dynamic, otherwise don't
self.body = OdeBody(world)
M = OdeMass()
M.setSphere(density, r)
self.body.setMass(M)
self.geom.setBody(self.body)
mass = M.getMagnitude()
else:
mass = 0
# synchronize ODE geom's transformation according to the real object's
self.geom.setPosition(obj.getPos(render))
self.geom.setQuaternion(obj.getQuat(render))
# store object's properties
self.storeProps(realObj, mass, surfaceId, collideBits, categoryBits)
class ODEtrimesh(ODEobjbase):
def __init__(self, world, space, realObj=None, collObj=None,
mass=0, surfaceId=0, collideBits=0, categoryBits=0):
if realObj==None:
obj=collObj
else:
obj=realObj
if collObj==None:
collObj=realObj
modelTrimesh = OdeTriMeshData(obj, True)
self.geom = OdeTriMeshGeom(space, modelTrimesh)
if mass > 0: # create body if the object is dynamic, otherwise don't
self.body = OdeBody(world)
M = OdeMass()
boundingBox, offset=self.getOBB(collObj)
#print boundingBox
size = max(0.01,max(boundingBox[0],boundingBox[1],boundingBox[2])/2)
M.setSphereTotal(mass, size)
self.body.setMass(M)
#self.body.setGravityMode(1)
#print M
self.geom.setBody(self.body)
# synchronize ODE geom's transformation according to the real object's
self.geom.setPosition(obj.getPos(render))
self.geom.setQuaternion(obj.getQuat(render))
# store object's properties
self.storeProps(realObj, mass, surfaceId, collideBits, categoryBits)
# make a rectangular room using boxes
def MakeRoomBoxes(minpos, maxpos, thickness):
# six pieces
xmid = (maxpos[0] + minpos[0]) / 2
ymid = (maxpos[1] + minpos[1]) / 2
zmid = (maxpos[2] + minpos[2]) / 2
xl = (maxpos[0] - minpos[0])
yl = (maxpos[1] - minpos[1])
zl = (maxpos[2] - minpos[2])
return [
[maxpos[0]+thickness/2,ymid,zmid,thickness,yl,zl],
[minpos[0]-thickness/2,ymid,zmid,thickness,yl,zl],
[xmid, ymid, maxpos[2]+thickness/2, xl, yl, thickness],
[xmid, ymid, minpos[2]-thickness/2, xl, yl, thickness],
[xmid, maxpos[1]+thickness/2, zmid, xl, thickness, zl],
[xmid, minpos[1]-thickness/2, zmid, xl, thickness, zl],
]
def MakeRoom(node,box,odeworld,collideBits,categoryBits,minpos,maxpos,thickness,sides=6):
boxes = MakeRoomBoxes(minpos,maxpos,thickness)
room = node.attachNewNode("roomwalls")
objlist=[]
for i in range(sides):
b = boxes[i]
x,y,z,sx,sy,sz = b
bNP = box.copyTo(room)
bNP.setPos(x,y,z)
bNP.setScale(sx,sy,sz)
b_ode = ODEbox(odeworld.world,odeworld.space,bNP, None, 0, 0, collideBits,categoryBits)
b_ode.delRealobj()
odeworld.AddObject(b_ode)
objlist.append(b_ode)
return room,objlist
class ODEWorld_Simple():
SIMPLESPACE = 1
HASHSPACE = 2
def __init__(self, spacetype=SIMPLESPACE):
major = PandaSystem.getMajorVersion()
minor = PandaSystem.getMinorVersion()
self.supportEvent = (major == 1 and minor > 5) or (major > 2)
if self.supportEvent:
self.collisionMap = {}
self.InitODE(spacetype)
self.listener = set()
# debugging
#if self.supportEvent:
# self.space.setCollisionEvent("ode-collision")
# base.accept("ode-collision", self.onCollision)
self.count = 0
self.totaltime1 = 0.0
self.totaltime2 = 0.0
#################################################
# this functions are obsoleted, only for 1.5.4
def setNotifier(self, object):
self.listener.add(object)
def removeNotifier(self, object):
self.listener.remove(object)
def notify(self, collisions):
for obj in self.listener:
obj.odeEvent(collisions)
# this function are obsoleted, only for 1.5.4
#################################################
# this function is for 1.6
def setCollisionNotifier(self, odeobject, func):
if self.supportEvent:
if len(self.collisionMap) == 0:
self.space.setCollisionEvent("ode-collision")
base.accept("ode-collision", self.onCollision)
id = int(str(odeobject.geom).split(" ")[-1].rstrip(")"), 16)
#print id
self.collisionMap[id] = (odeobject, func)
def EnableODETask(self, task=2):
if task == 2:
#taskMgr.doMethodLater(0.5, simulationTask, "Physics Simulation", extraArgs=[self], appendTask=True)
taskMgr.doMethodLater(0.5, self.simulationTask2, "ODESimulation")
elif task == 3:
taskMgr.doMethodLater(0.5, self.simulationTask3, "ODESimulation")
elif task == 4: # debug
#self.stepSize = 1.0 / 40.0
taskMgr.doMethodLater(0.5, self.simulationTask4, "ODESimulation")
elif task == 5: # debug
taskMgr.doMethodLater(0.5, self.simulationTask5, "ODESimulation")
else:
taskMgr.remove("ODESimulation")
def InitODE(self, spacetype):
world = OdeWorld()
self.world = world
# Create a space and add a contactgroup to it to add the contact joints
if spacetype == self.SIMPLESPACE:
space = OdeSimpleSpace()
elif spacetype == self.HASHSPACE:
space = OdeHashSpace()
self.InitODEwithSpace(space)
def InitODEwithSpace(self, space):
space.setAutoCollideWorld(self.world)
contactgroup = OdeJointGroup()
space.setAutoCollideJointGroup(contactgroup)
self.space = space
self.contactgroup = contactgroup
#self.objectlist = {}
self.objectlist = set()
# Create an accumulator to track the time since the sim
# has been running
# This stepSize makes the simulation run at 90 frames per second
self.deltaTimeAccumulator = 0.0
#self.stepSize = 1.0 / 200.0
#self.stepSize = 1.0 / 90.0
self.stepSize = 1.0 / 50.0
#s = self.world.getQuickStepNumIterations()
#self.world.setQuickStepNumIterations(1000)
#print s
def AddObject(self, odeobject):
#self.objectlist[odeobject] = odeobject
self.objectlist.add(odeobject)
def RemoveObject(self, odeobject):
#del self.objectlist[odeobject]
if self.supportEvent:
if hasattr(odeobject, "geom") and odeobject.geom in self.collisionMap:
del self.collisionMap[odeobject.geom]
if len(self.collisionMap) == 0:
self.space.setCollisionEvent("")
self.objectlist.remove(odeobject)
def DestroyAllObjects(self):
if self.supportEvent:
self.collisionMap.clear()
self.space.setCollisionEvent("")
for odeobject in self.objectlist:
odeobject.destroy()
self.objectlist.clear()
# The task for our simulation
def simulationTask1(self, task):
#self.space.autoCollide() # Setup the contact joints
# Step the simulation and set the new positions
self.world.quickStep(globalClock.getDt())
cc = self.space.autoCollide() # Setup the contact joints
if not self.supportEvent:
# this is obsoleted, only for 1.5.4
collisions = []
if cc > 0:
for i in range(cc):
p = Vec3(self.space.getContactData(i*3+0),self.space.getContactData(i*3+1),self.space.getContactData(i*3+2))
collisions.append(p)
#for b_ode in room.balls:
for b_ode in self.objectlist:
if isinstance(b_ode, ODEobjbase):
if b_ode.isDynamic():
np = b_ode.realObj
if np != None:
#body = b_ode.body
body = b_ode.geom
np.setPosQuat(render, body.getPosition(), Quat(body.getQuaternion()))
self.contactgroup.empty() # Clear the contact joints
if not self.supportEvent:
self.notify(collisions)
return task.cont
# The task for our simulation
def simulationTask2(self, task):
# Set the force on the body to push it off the ridge
# Add the deltaTime for the task to the accumulator
self.deltaTimeAccumulator +=globalClock.getDt()
if self.deltaTimeAccumulator < self.stepSize:
return task.cont
self.space.autoCollide() # Setup the contact joints
collisions=[]
while self.deltaTimeAccumulator > self.stepSize:
# Remove a stepSize from the accumulator until
# the accumulated time is less than the stepsize
self.deltaTimeAccumulator -= self.stepSize
# Step the simulation
self.world.quickStep(self.stepSize)
cc=self.space.autoCollide() # Setup the contact joints
if not self.supportEvent:
# this is obsoleted, only for 1.5.4
if cc > 0:
for i in range(cc):
p = Vec3(self.space.getContactData(i*3+0),self.space.getContactData(i*3+1),self.space.getContactData(i*3+2))
collisions.append(p)
break
#cc = self.space.autoCollide() # Setup the contact joints
for b_ode in self.objectlist:
if isinstance(b_ode, ODEobjbase):
if b_ode.isDynamic():
np = b_ode.realObj
if np != None:
#body = b_ode.body
body = b_ode.geom
np.setPosQuat(body.getPosition(), Quat(body.getQuaternion()))
if b_ode.mass > 0 and hasattr(b_ode, "motionfriction"):
v = b_ode.body.getLinearVel()
ma = -b_ode.motionfriction * b_ode.mass
b_ode.body.addForce(ma*v[0],ma*v[1],ma*v[2])
v = b_ode.body.getAngularVel()
ma = (1-b_ode.angularfriction)
b_ode.body.setAngularVel(ma*v[0],ma*v[1],ma*v[2])
else:
# a joint ?
b_ode.Render()
#for contact in self.contactgroup:
# print contact
self.contactgroup.empty() # Clear the contact joints
if not self.supportEvent:
self.notify(collisions)
return task.cont
# The task for our simulation
def simulationTask2Save(self, task):
# Set the force on the body to push it off the ridge
# Add the deltaTime for the task to the accumulator
self.deltaTimeAccumulator +=globalClock.getDt()
if self.deltaTimeAccumulator < self.stepSize:
return task.cont
self.space.autoCollide() # Setup the contact joints
collisions=[]
while self.deltaTimeAccumulator > self.stepSize:
# Remove a stepSize from the accumulator until
# the accumulated time is less than the stepsize
self.deltaTimeAccumulator -= self.stepSize
# Step the simulation
self.world.quickStep(self.stepSize)
cc=self.space.autoCollide() # Setup the contact joints
if not self.supportEvent:
# this is obsoleted, only for 1.5.4
if cc > 0:
for i in range(cc):
p = Vec3(self.space.getContactData(i*3+0),self.space.getContactData(i*3+1),self.space.getContactData(i*3+2))
collisions.append(p)
#cc = self.space.autoCollide() # Setup the contact joints
for b_ode in self.objectlist:
if isinstance(b_ode, ODEobjbase):
if b_ode.isDynamic():
np = b_ode.realObj
if np != None:
#body = b_ode.body
body = b_ode.geom
np.setPosQuat(render, body.getPosition(), Quat(body.getQuaternion()))
if b_ode.mass > 0 and hasattr(b_ode, "motionfriction"):
v = b_ode.body.getLinearVel()
ma = -b_ode.motionfriction * b_ode.mass
b_ode.body.addForce(ma*v[0],ma*v[1],ma*v[2])
v = b_ode.body.getAngularVel()
ma = (1-b_ode.angularfriction)
b_ode.body.setAngularVel(ma*v[0],ma*v[1],ma*v[2])
else:
# a joint ?
b_ode.Render()
#for contact in self.contactgroup:
# print contact
self.contactgroup.empty() # Clear the contact joints
if not self.supportEvent:
self.notify(collisions)
return task.cont
def simulationTask3(self,task):
iterations = 5
#We limit the maximum time not to receive explosion of physic system if application stuck
dt=globalClock.getDt()
if dt>0.02: dt=0.02
dt=dt / iterations * 3
#Some iterations for the more stable simulation
for i in xrange(iterations):
self.world.quickStep(dt)
cc=self.space.autoCollide()
#Sync the box with the bodies
for b_ode in self.objectlist:
if isinstance(b_ode, ODEobjbase):
if b_ode.isDynamic():
np = b_ode.realObj
if np != None:
#body = b_ode.body
body = b_ode.geom
np.setPosQuat(render, body.getPosition(), Quat(body.getQuaternion()))
if b_ode.mass > 0 and hasattr(b_ode, "motionfriction"):
v = b_ode.body.getLinearVel()
ma = -b_ode.motionfriction * b_ode.mass
b_ode.body.addForce(ma*v[0],ma*v[1],ma*v[2])
v = b_ode.body.getAngularVel()
ma = (1-b_ode.angularfriction)
b_ode.body.setAngularVel(ma*v[0],ma*v[1],ma*v[2])
else:
# a joint ?
b_ode.Render()
self.contactgroup.empty()
return task.cont
# Setup collision event
def onCollision(self, entry):
geom1 = entry.getGeom1()
geom2 = entry.getGeom2()
id1 = int(str(geom1).split(" ")[-1].rstrip(")"), 16)
id2 = int(str(geom2).split(" ")[-1].rstrip(")"), 16)
if id1 in self.collisionMap:
id = id1
geomn = geom2
elif id2 in self.collisionMap:
id = id2
geomn = geom1
else:
return
odeobject, func = self.collisionMap[id]
func(odeobject, geomn, entry)
#points = entry.getContactPoints()
#body1 = entry.getBody1()
#body2 = entry.getBody2()
# The debug task for performance test
def simulationTask4(self, task):
# Set the force on the body to push it off the ridge
# Add the deltaTime for the task to the accumulator
self.deltaTimeAccumulator +=globalClock.getDt()
if self.deltaTimeAccumulator < self.stepSize:
return task.cont
#self.space.autoCollide() # Setup the contact joints
collisions=[]
while self.deltaTimeAccumulator > self.stepSize:
# Remove a stepSize from the accumulator until
# the accumulated time is less than the stepsize
self.deltaTimeAccumulator -= self.stepSize
# Step the simulation
t1 = time.time()
#for i in range(100):
self.world.quickStep(self.stepSize)
t12 = time.time()
t2 = time.time()
cc = self.space.autoCollide() # Setup the contact joints
t22 = time.time()
self.count += 1
self.totaltime1 += t12 - t1
self.totaltime2 += t22 - t2
#print t2,t1
#cc=self.space.autoCollide() # Setup the contact joints
if self.count > 200:
print "quickStep %f %0.3f" % (self.totaltime1, self.totaltime1 * 1000 / self.count)
print "autocollide %f %0.3f" % (self.totaltime2, self.totaltime2 * 1000 / self.count)
#print "cc %0.1f" % (self.totaltime2 / self.count)
self.count = 0
self.totaltime1 = 0.0
self.totaltime2 = 0.0
#if not self.supportEvent:
# this is obsoleted, only for 1.5.4
# if cc > 0:
# for i in range(cc):
# p = Vec3(self.space.getContactData(i*3+0),self.space.getContactData(i*3+1),self.space.getContactData(i*3+2))
# collisions.append(p)
#cc = self.space.autoCollide() # Setup the contact joints
if True:
for b_ode in self.objectlist:
if isinstance(b_ode, ODEobjbase):
if b_ode.isDynamic():
np = b_ode.realObj
if np != None:
#body = b_ode.body
body = b_ode.geom
#np.setPosQuat(render, body.getPosition(), Quat(body.getQuaternion()))
np.setPosQuat(body.getPosition(), Quat(body.getQuaternion()))
if False and b_ode.mass > 0 and hasattr(b_ode, "motionfriction"):
v = b_ode.body.getLinearVel()
ma = -b_ode.motionfriction * b_ode.mass
b_ode.body.addForce(ma*v[0],ma*v[1],ma*v[2])
v = b_ode.body.getAngularVel()
ma = (1-b_ode.angularfriction)
b_ode.body.setAngularVel(ma*v[0],ma*v[1],ma*v[2])
else:
# a joint ?
b_ode.Render()
#for contact in self.contactgroup:
# print contact
self.contactgroup.empty() # Clear the contact joints
if not self.supportEvent:
self.notify(collisions)
return task.cont
# The task for our simulation
def simulationTask5(self, task):
# Set the force on the body to push it off the ridge
# Add the deltaTime for the task to the accumulator
self.deltaTimeAccumulator +=globalClock.getDt()
if self.deltaTimeAccumulator < self.stepSize:
return task.cont
collisions=[]
while self.deltaTimeAccumulator > self.stepSize:
# Remove a stepSize from the accumulator until
# the accumulated time is less than the stepsize
self.deltaTimeAccumulator -= self.stepSize
# Step the simulation
self.space.autoCollide() # Setup the contact joints
self.world.quickStep(self.stepSize)
for b_ode in self.objectlist:
if isinstance(b_ode, ODEobjbase):
if b_ode.isDynamic():
np = b_ode.realObj
if np != None:
#body = b_ode.body
body = b_ode.geom
np.setPosQuat(body.getPosition(), Quat(body.getQuaternion()))
#if b_ode.mass > 0 and hasattr(b_ode, "motionfriction"):
# v = b_ode.body.getLinearVel()
# ma = -b_ode.motionfriction * b_ode.mass
# b_ode.body.addForce(ma*v[0],ma*v[1],ma*v[2])
# v = b_ode.body.getAngularVel()
# ma = (1-b_ode.angularfriction)
# b_ode.body.setAngularVel(ma*v[0],ma*v[1],ma*v[2])
else:
# a joint ?
b_ode.Render()
#for contact in self.contactgroup:
# print contact
self.contactgroup.empty() # Clear the contact joints
if not self.supportEvent:
self.notify(collisions)
return task.cont
class ODEWorld_AutoHash(ODEWorld_Simple):
def __init__(self):
ODEWorld_Simple.__init__(self, ODEWorld_Simple.HASHSPACE)
| 39.388337 | 161 | 0.565313 |
3df32f16208c9eb761c41976b7f9102e323ebbe5 | 3,331 | py | Python | events/settings.py | asmuratbek/events | bcc045f75a4c46d8a1a444279911f6f73662b9a1 | [
"MIT"
] | null | null | null | events/settings.py | asmuratbek/events | bcc045f75a4c46d8a1a444279911f6f73662b9a1 | [
"MIT"
] | null | null | null | events/settings.py | asmuratbek/events | bcc045f75a4c46d8a1a444279911f6f73662b9a1 | [
"MIT"
] | null | null | null | """
Django settings for events project.
Generated by 'django-admin startproject' using Django 2.2.6.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'i$4b!(q+lpv7g8wz5x^nb0-x1^6fi+rhu!0kikxtj$d1m!zbkq'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'registration',
'rest_framework',
'markdown',
'factory',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'events.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'events.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'postgres',
'USER': 'postgres',
'HOST': '127.0.0.1',
'PORT': 5432,
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'ru-RU'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
try:
from events.settings_local import *
except ImportError:
pass
| 25.821705 | 91 | 0.68628 |
33ccb256c4c1a3a5673f595a8edb0b66c2288df2 | 1,266 | py | Python | dockstream/utils/execute_external/Omega.py | niladell/DockStream | 75f06d24a95699cdc06fe1ea021e213e1d9fa5b3 | [
"Apache-2.0"
] | 34 | 2021-08-05T06:28:30.000Z | 2022-03-17T02:42:49.000Z | dockstream/utils/execute_external/Omega.py | niladell/DockStream | 75f06d24a95699cdc06fe1ea021e213e1d9fa5b3 | [
"Apache-2.0"
] | 9 | 2021-08-31T10:35:51.000Z | 2022-02-03T08:57:58.000Z | dockstream/utils/execute_external/Omega.py | niladell/DockStream | 75f06d24a95699cdc06fe1ea021e213e1d9fa5b3 | [
"Apache-2.0"
] | 10 | 2021-08-12T02:32:11.000Z | 2022-01-19T11:51:33.000Z | from dockstream.utils.enums.Omega_enums import OmegaExecutablesEnum
from dockstream.utils.execute_external.execute import ExecutorBase
_OE = OmegaExecutablesEnum()
class OmegaExecutor(ExecutorBase):
"""For the execution of the "OMEGA"."""
def __init__(self, prefix_execution=None, binary_location=None):
super().__init__(prefix_execution=prefix_execution, binary_location=binary_location)
def execute(self, command: str, arguments: list, check=True, location=None):
# check, whether a proper executable is provided
if command not in [_OE.OMEGA]:
raise ValueError("Command must be a valid parameter in the internal OMEGA dictionary.")
return super().execute(command=command,
arguments=arguments,
check=check,
location=location)
def is_available(self):
try:
result = self.execute(command=_OE.OMEGA,
arguments=[_OE.HELP_SIMPLE],
check=False)
if _OE.OMEGA_HELP_IDENTIFICATION_STRING in result.stdout:
return True
return False
except Exception as e:
return False | 38.363636 | 99 | 0.617694 |
491ce8bfe3ba13ff7f971d39b6b3ff347062f6c9 | 430 | py | Python | hackerrank/hkr_concatenate.py | FelixTheC/hackerrank_exercises | 24eedbedebd122c53fd2cb6018cc3535d0d4c6a0 | [
"MIT"
] | null | null | null | hackerrank/hkr_concatenate.py | FelixTheC/hackerrank_exercises | 24eedbedebd122c53fd2cb6018cc3535d0d4c6a0 | [
"MIT"
] | null | null | null | hackerrank/hkr_concatenate.py | FelixTheC/hackerrank_exercises | 24eedbedebd122c53fd2cb6018cc3535d0d4c6a0 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@created: 01.12.19
@author: felix
"""
import numpy as np
if __name__ == '__main__':
lists_n = []
lists_m = []
n, m, p = input().split(' ')
for i in range(int(n)):
lists_n.append([int(i) for i in input() if i != ' '])
for i in range(int(m)):
lists_m.append([int(i) for i in input() if i != ' '])
print(np.concatenate((lists_n, lists_m)))
| 22.631579 | 61 | 0.544186 |
50e3647699b45179a4bdd0e0ac45cbc61e294cfb | 49,136 | py | Python | env/lib/python3.6/site-packages/scipy/ndimage/filters.py | anthowen/duplify | 846d01c1b21230937fdf0281b0cf8c0b08a8c24e | [
"MIT"
] | 69 | 2019-02-18T12:07:35.000Z | 2022-03-12T10:38:32.000Z | venv/lib/python3.7/site-packages/scipy/ndimage/filters.py | John1001Song/Big-Data-Robo-Adviser | 9444dce96954c546333d5aecc92a06c3bfd19aa5 | [
"MIT"
] | 12 | 2018-12-06T22:06:49.000Z | 2022-02-25T17:40:44.000Z | venv/lib/python3.7/site-packages/scipy/ndimage/filters.py | John1001Song/Big-Data-Robo-Adviser | 9444dce96954c546333d5aecc92a06c3bfd19aa5 | [
"MIT"
] | 28 | 2019-03-22T01:07:13.000Z | 2022-02-21T16:38:27.000Z | # Copyright (C) 2003-2005 Peter J. Verveer
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# 3. The name of the author may not be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import division, print_function, absolute_import
import warnings
import math
import numpy
from . import _ni_support
from . import _nd_image
from . import _ni_docstrings
from scipy.misc import doccer
from scipy._lib._version import NumpyVersion
__all__ = ['correlate1d', 'convolve1d', 'gaussian_filter1d', 'gaussian_filter',
'prewitt', 'sobel', 'generic_laplace', 'laplace',
'gaussian_laplace', 'generic_gradient_magnitude',
'gaussian_gradient_magnitude', 'correlate', 'convolve',
'uniform_filter1d', 'uniform_filter', 'minimum_filter1d',
'maximum_filter1d', 'minimum_filter', 'maximum_filter',
'rank_filter', 'median_filter', 'percentile_filter',
'generic_filter1d', 'generic_filter']
@_ni_docstrings.docfiller
def correlate1d(input, weights, axis=-1, output=None, mode="reflect",
cval=0.0, origin=0):
"""Calculate a one-dimensional correlation along the given axis.
The lines of the array along the given axis are correlated with the
given weights.
Parameters
----------
%(input)s
weights : array
One-dimensional sequence of numbers.
%(axis)s
%(output)s
%(mode)s
%(cval)s
%(origin)s
Examples
--------
>>> from scipy.ndimage import correlate1d
>>> correlate1d([2, 8, 0, 4, 1, 9, 9, 0], weights=[1, 3])
array([ 8, 26, 8, 12, 7, 28, 36, 9])
"""
input = numpy.asarray(input)
if numpy.iscomplexobj(input):
raise TypeError('Complex type not supported')
output = _ni_support._get_output(output, input)
weights = numpy.asarray(weights, dtype=numpy.float64)
if weights.ndim != 1 or weights.shape[0] < 1:
raise RuntimeError('no filter weights given')
if not weights.flags.contiguous:
weights = weights.copy()
axis = _ni_support._check_axis(axis, input.ndim)
if (len(weights) // 2 + origin < 0) or (len(weights) // 2 +
origin > len(weights)):
raise ValueError('invalid origin')
mode = _ni_support._extend_mode_to_code(mode)
_nd_image.correlate1d(input, weights, axis, output, mode, cval,
origin)
return output
@_ni_docstrings.docfiller
def convolve1d(input, weights, axis=-1, output=None, mode="reflect",
cval=0.0, origin=0):
"""Calculate a one-dimensional convolution along the given axis.
The lines of the array along the given axis are convolved with the
given weights.
Parameters
----------
%(input)s
weights : ndarray
One-dimensional sequence of numbers.
%(axis)s
%(output)s
%(mode)s
%(cval)s
%(origin)s
Returns
-------
convolve1d : ndarray
Convolved array with same shape as input
Examples
--------
>>> from scipy.ndimage import convolve1d
>>> convolve1d([2, 8, 0, 4, 1, 9, 9, 0], weights=[1, 3])
array([14, 24, 4, 13, 12, 36, 27, 0])
"""
weights = weights[::-1]
origin = -origin
if not len(weights) & 1:
origin -= 1
return correlate1d(input, weights, axis, output, mode, cval, origin)
def _gaussian_kernel1d(sigma, order, radius):
"""
Computes a 1D Gaussian convolution kernel.
"""
if order < 0:
raise ValueError('order must be non-negative')
p = numpy.polynomial.Polynomial([0, 0, -0.5 / (sigma * sigma)])
x = numpy.arange(-radius, radius + 1)
phi_x = numpy.exp(p(x), dtype=numpy.double)
phi_x /= phi_x.sum()
if order > 0:
q = numpy.polynomial.Polynomial([1])
p_deriv = p.deriv()
for _ in range(order):
# f(x) = q(x) * phi(x) = q(x) * exp(p(x))
# f'(x) = (q'(x) + q(x) * p'(x)) * phi(x)
q = q.deriv() + q * p_deriv
phi_x *= q(x)
return phi_x
@_ni_docstrings.docfiller
def gaussian_filter1d(input, sigma, axis=-1, order=0, output=None,
mode="reflect", cval=0.0, truncate=4.0):
"""One-dimensional Gaussian filter.
Parameters
----------
%(input)s
sigma : scalar
standard deviation for Gaussian kernel
%(axis)s
order : int, optional
An order of 0 corresponds to convolution with a Gaussian
kernel. A positive order corresponds to convolution with
that derivative of a Gaussian.
%(output)s
%(mode)s
%(cval)s
truncate : float, optional
Truncate the filter at this many standard deviations.
Default is 4.0.
Returns
-------
gaussian_filter1d : ndarray
Examples
--------
>>> from scipy.ndimage import gaussian_filter1d
>>> gaussian_filter1d([1.0, 2.0, 3.0, 4.0, 5.0], 1)
array([ 1.42704095, 2.06782203, 3. , 3.93217797, 4.57295905])
>>> gaussian_filter1d([1.0, 2.0, 3.0, 4.0, 5.0], 4)
array([ 2.91948343, 2.95023502, 3. , 3.04976498, 3.08051657])
>>> import matplotlib.pyplot as plt
>>> np.random.seed(280490)
>>> x = np.random.randn(101).cumsum()
>>> y3 = gaussian_filter1d(x, 3)
>>> y6 = gaussian_filter1d(x, 6)
>>> plt.plot(x, 'k', label='original data')
>>> plt.plot(y3, '--', label='filtered, sigma=3')
>>> plt.plot(y6, ':', label='filtered, sigma=6')
>>> plt.legend()
>>> plt.grid()
>>> plt.show()
"""
sd = float(sigma)
# make the radius of the filter equal to truncate standard deviations
lw = int(truncate * sd + 0.5)
# Since we are calling correlate, not convolve, revert the kernel
weights = _gaussian_kernel1d(sigma, order, lw)[::-1]
return correlate1d(input, weights, axis, output, mode, cval, 0)
@_ni_docstrings.docfiller
def gaussian_filter(input, sigma, order=0, output=None,
mode="reflect", cval=0.0, truncate=4.0):
"""Multidimensional Gaussian filter.
Parameters
----------
%(input)s
sigma : scalar or sequence of scalars
Standard deviation for Gaussian kernel. The standard
deviations of the Gaussian filter are given for each axis as a
sequence, or as a single number, in which case it is equal for
all axes.
order : int or sequence of ints, optional
The order of the filter along each axis is given as a sequence
of integers, or as a single number. An order of 0 corresponds
to convolution with a Gaussian kernel. A positive order
corresponds to convolution with that derivative of a Gaussian.
%(output)s
%(mode_multiple)s
%(cval)s
truncate : float
Truncate the filter at this many standard deviations.
Default is 4.0.
Returns
-------
gaussian_filter : ndarray
Returned array of same shape as `input`.
Notes
-----
The multidimensional filter is implemented as a sequence of
one-dimensional convolution filters. The intermediate arrays are
stored in the same data type as the output. Therefore, for output
types with a limited precision, the results may be imprecise
because intermediate results may be stored with insufficient
precision.
Examples
--------
>>> from scipy.ndimage import gaussian_filter
>>> a = np.arange(50, step=2).reshape((5,5))
>>> a
array([[ 0, 2, 4, 6, 8],
[10, 12, 14, 16, 18],
[20, 22, 24, 26, 28],
[30, 32, 34, 36, 38],
[40, 42, 44, 46, 48]])
>>> gaussian_filter(a, sigma=1)
array([[ 4, 6, 8, 9, 11],
[10, 12, 14, 15, 17],
[20, 22, 24, 25, 27],
[29, 31, 33, 34, 36],
[35, 37, 39, 40, 42]])
>>> from scipy import misc
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> plt.gray() # show the filtered result in grayscale
>>> ax1 = fig.add_subplot(121) # left side
>>> ax2 = fig.add_subplot(122) # right side
>>> ascent = misc.ascent()
>>> result = gaussian_filter(ascent, sigma=5)
>>> ax1.imshow(ascent)
>>> ax2.imshow(result)
>>> plt.show()
"""
input = numpy.asarray(input)
output = _ni_support._get_output(output, input)
orders = _ni_support._normalize_sequence(order, input.ndim)
sigmas = _ni_support._normalize_sequence(sigma, input.ndim)
modes = _ni_support._normalize_sequence(mode, input.ndim)
axes = list(range(input.ndim))
axes = [(axes[ii], sigmas[ii], orders[ii], modes[ii])
for ii in range(len(axes)) if sigmas[ii] > 1e-15]
if len(axes) > 0:
for axis, sigma, order, mode in axes:
gaussian_filter1d(input, sigma, axis, order, output,
mode, cval, truncate)
input = output
else:
output[...] = input[...]
return output
@_ni_docstrings.docfiller
def prewitt(input, axis=-1, output=None, mode="reflect", cval=0.0):
"""Calculate a Prewitt filter.
Parameters
----------
%(input)s
%(axis)s
%(output)s
%(mode_multiple)s
%(cval)s
Examples
--------
>>> from scipy import ndimage, misc
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> plt.gray() # show the filtered result in grayscale
>>> ax1 = fig.add_subplot(121) # left side
>>> ax2 = fig.add_subplot(122) # right side
>>> ascent = misc.ascent()
>>> result = ndimage.prewitt(ascent)
>>> ax1.imshow(ascent)
>>> ax2.imshow(result)
>>> plt.show()
"""
input = numpy.asarray(input)
axis = _ni_support._check_axis(axis, input.ndim)
output = _ni_support._get_output(output, input)
modes = _ni_support._normalize_sequence(mode, input.ndim)
correlate1d(input, [-1, 0, 1], axis, output, modes[axis], cval, 0)
axes = [ii for ii in range(input.ndim) if ii != axis]
for ii in axes:
correlate1d(output, [1, 1, 1], ii, output, modes[ii], cval, 0,)
return output
@_ni_docstrings.docfiller
def sobel(input, axis=-1, output=None, mode="reflect", cval=0.0):
"""Calculate a Sobel filter.
Parameters
----------
%(input)s
%(axis)s
%(output)s
%(mode_multiple)s
%(cval)s
Examples
--------
>>> from scipy import ndimage, misc
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> plt.gray() # show the filtered result in grayscale
>>> ax1 = fig.add_subplot(121) # left side
>>> ax2 = fig.add_subplot(122) # right side
>>> ascent = misc.ascent()
>>> result = ndimage.sobel(ascent)
>>> ax1.imshow(ascent)
>>> ax2.imshow(result)
>>> plt.show()
"""
input = numpy.asarray(input)
axis = _ni_support._check_axis(axis, input.ndim)
output = _ni_support._get_output(output, input)
modes = _ni_support._normalize_sequence(mode, input.ndim)
correlate1d(input, [-1, 0, 1], axis, output, modes[axis], cval, 0)
axes = [ii for ii in range(input.ndim) if ii != axis]
for ii in axes:
correlate1d(output, [1, 2, 1], ii, output, modes[ii], cval, 0)
return output
@_ni_docstrings.docfiller
def generic_laplace(input, derivative2, output=None, mode="reflect",
cval=0.0,
extra_arguments=(),
extra_keywords=None):
"""
N-dimensional Laplace filter using a provided second derivative function.
Parameters
----------
%(input)s
derivative2 : callable
Callable with the following signature::
derivative2(input, axis, output, mode, cval,
*extra_arguments, **extra_keywords)
See `extra_arguments`, `extra_keywords` below.
%(output)s
%(mode_multiple)s
%(cval)s
%(extra_keywords)s
%(extra_arguments)s
"""
if extra_keywords is None:
extra_keywords = {}
input = numpy.asarray(input)
output = _ni_support._get_output(output, input)
axes = list(range(input.ndim))
if len(axes) > 0:
modes = _ni_support._normalize_sequence(mode, len(axes))
derivative2(input, axes[0], output, modes[0], cval,
*extra_arguments, **extra_keywords)
for ii in range(1, len(axes)):
tmp = derivative2(input, axes[ii], output.dtype, modes[ii], cval,
*extra_arguments, **extra_keywords)
output += tmp
else:
output[...] = input[...]
return output
@_ni_docstrings.docfiller
def laplace(input, output=None, mode="reflect", cval=0.0):
"""N-dimensional Laplace filter based on approximate second derivatives.
Parameters
----------
%(input)s
%(output)s
%(mode_multiple)s
%(cval)s
Examples
--------
>>> from scipy import ndimage, misc
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> plt.gray() # show the filtered result in grayscale
>>> ax1 = fig.add_subplot(121) # left side
>>> ax2 = fig.add_subplot(122) # right side
>>> ascent = misc.ascent()
>>> result = ndimage.laplace(ascent)
>>> ax1.imshow(ascent)
>>> ax2.imshow(result)
>>> plt.show()
"""
def derivative2(input, axis, output, mode, cval):
return correlate1d(input, [1, -2, 1], axis, output, mode, cval, 0)
return generic_laplace(input, derivative2, output, mode, cval)
@_ni_docstrings.docfiller
def gaussian_laplace(input, sigma, output=None, mode="reflect",
cval=0.0, **kwargs):
"""Multidimensional Laplace filter using gaussian second derivatives.
Parameters
----------
%(input)s
sigma : scalar or sequence of scalars
The standard deviations of the Gaussian filter are given for
each axis as a sequence, or as a single number, in which case
it is equal for all axes.
%(output)s
%(mode_multiple)s
%(cval)s
Extra keyword arguments will be passed to gaussian_filter().
Examples
--------
>>> from scipy import ndimage, misc
>>> import matplotlib.pyplot as plt
>>> ascent = misc.ascent()
>>> fig = plt.figure()
>>> plt.gray() # show the filtered result in grayscale
>>> ax1 = fig.add_subplot(121) # left side
>>> ax2 = fig.add_subplot(122) # right side
>>> result = ndimage.gaussian_laplace(ascent, sigma=1)
>>> ax1.imshow(result)
>>> result = ndimage.gaussian_laplace(ascent, sigma=3)
>>> ax2.imshow(result)
>>> plt.show()
"""
input = numpy.asarray(input)
def derivative2(input, axis, output, mode, cval, sigma, **kwargs):
order = [0] * input.ndim
order[axis] = 2
return gaussian_filter(input, sigma, order, output, mode, cval,
**kwargs)
return generic_laplace(input, derivative2, output, mode, cval,
extra_arguments=(sigma,),
extra_keywords=kwargs)
@_ni_docstrings.docfiller
def generic_gradient_magnitude(input, derivative, output=None,
mode="reflect", cval=0.0,
extra_arguments=(), extra_keywords=None):
"""Gradient magnitude using a provided gradient function.
Parameters
----------
%(input)s
derivative : callable
Callable with the following signature::
derivative(input, axis, output, mode, cval,
*extra_arguments, **extra_keywords)
See `extra_arguments`, `extra_keywords` below.
`derivative` can assume that `input` and `output` are ndarrays.
Note that the output from `derivative` is modified inplace;
be careful to copy important inputs before returning them.
%(output)s
%(mode_multiple)s
%(cval)s
%(extra_keywords)s
%(extra_arguments)s
"""
if extra_keywords is None:
extra_keywords = {}
input = numpy.asarray(input)
output = _ni_support._get_output(output, input)
axes = list(range(input.ndim))
if len(axes) > 0:
modes = _ni_support._normalize_sequence(mode, len(axes))
derivative(input, axes[0], output, modes[0], cval,
*extra_arguments, **extra_keywords)
numpy.multiply(output, output, output)
for ii in range(1, len(axes)):
tmp = derivative(input, axes[ii], output.dtype, modes[ii], cval,
*extra_arguments, **extra_keywords)
numpy.multiply(tmp, tmp, tmp)
output += tmp
# This allows the sqrt to work with a different default casting
numpy.sqrt(output, output, casting='unsafe')
else:
output[...] = input[...]
return output
@_ni_docstrings.docfiller
def gaussian_gradient_magnitude(input, sigma, output=None,
mode="reflect", cval=0.0, **kwargs):
"""Multidimensional gradient magnitude using Gaussian derivatives.
Parameters
----------
%(input)s
sigma : scalar or sequence of scalars
The standard deviations of the Gaussian filter are given for
each axis as a sequence, or as a single number, in which case
it is equal for all axes..
%(output)s
%(mode_multiple)s
%(cval)s
Extra keyword arguments will be passed to gaussian_filter().
Returns
-------
gaussian_gradient_magnitude : ndarray
Filtered array. Has the same shape as `input`.
Examples
--------
>>> from scipy import ndimage, misc
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> plt.gray() # show the filtered result in grayscale
>>> ax1 = fig.add_subplot(121) # left side
>>> ax2 = fig.add_subplot(122) # right side
>>> ascent = misc.ascent()
>>> result = ndimage.gaussian_gradient_magnitude(ascent, sigma=5)
>>> ax1.imshow(ascent)
>>> ax2.imshow(result)
>>> plt.show()
"""
input = numpy.asarray(input)
def derivative(input, axis, output, mode, cval, sigma, **kwargs):
order = [0] * input.ndim
order[axis] = 1
return gaussian_filter(input, sigma, order, output, mode,
cval, **kwargs)
return generic_gradient_magnitude(input, derivative, output, mode,
cval, extra_arguments=(sigma,),
extra_keywords=kwargs)
def _correlate_or_convolve(input, weights, output, mode, cval, origin,
convolution):
input = numpy.asarray(input)
if numpy.iscomplexobj(input):
raise TypeError('Complex type not supported')
origins = _ni_support._normalize_sequence(origin, input.ndim)
weights = numpy.asarray(weights, dtype=numpy.float64)
wshape = [ii for ii in weights.shape if ii > 0]
if len(wshape) != input.ndim:
raise RuntimeError('filter weights array has incorrect shape.')
if convolution:
weights = weights[tuple([slice(None, None, -1)] * weights.ndim)]
for ii in range(len(origins)):
origins[ii] = -origins[ii]
if not weights.shape[ii] & 1:
origins[ii] -= 1
for origin, lenw in zip(origins, wshape):
if (lenw // 2 + origin < 0) or (lenw // 2 + origin > lenw):
raise ValueError('invalid origin')
if not weights.flags.contiguous:
weights = weights.copy()
output = _ni_support._get_output(output, input)
mode = _ni_support._extend_mode_to_code(mode)
_nd_image.correlate(input, weights, output, mode, cval, origins)
return output
@_ni_docstrings.docfiller
def correlate(input, weights, output=None, mode='reflect', cval=0.0,
origin=0):
"""
Multi-dimensional correlation.
The array is correlated with the given kernel.
Parameters
----------
%(input)s
weights : ndarray
array of weights, same number of dimensions as input
%(output)s
%(mode_multiple)s
%(cval)s
%(origin_multiple)s
See Also
--------
convolve : Convolve an image with a kernel.
"""
return _correlate_or_convolve(input, weights, output, mode, cval,
origin, False)
@_ni_docstrings.docfiller
def convolve(input, weights, output=None, mode='reflect', cval=0.0,
origin=0):
"""
Multidimensional convolution.
The array is convolved with the given kernel.
Parameters
----------
%(input)s
weights : array_like
Array of weights, same number of dimensions as input
%(output)s
%(mode_multiple)s
cval : scalar, optional
Value to fill past edges of input if `mode` is 'constant'. Default
is 0.0
%(origin_multiple)s
Returns
-------
result : ndarray
The result of convolution of `input` with `weights`.
See Also
--------
correlate : Correlate an image with a kernel.
Notes
-----
Each value in result is :math:`C_i = \\sum_j{I_{i+k-j} W_j}`, where
W is the `weights` kernel,
j is the n-D spatial index over :math:`W`,
I is the `input` and k is the coordinate of the center of
W, specified by `origin` in the input parameters.
Examples
--------
Perhaps the simplest case to understand is ``mode='constant', cval=0.0``,
because in this case borders (i.e. where the `weights` kernel, centered
on any one value, extends beyond an edge of `input`.
>>> a = np.array([[1, 2, 0, 0],
... [5, 3, 0, 4],
... [0, 0, 0, 7],
... [9, 3, 0, 0]])
>>> k = np.array([[1,1,1],[1,1,0],[1,0,0]])
>>> from scipy import ndimage
>>> ndimage.convolve(a, k, mode='constant', cval=0.0)
array([[11, 10, 7, 4],
[10, 3, 11, 11],
[15, 12, 14, 7],
[12, 3, 7, 0]])
Setting ``cval=1.0`` is equivalent to padding the outer edge of `input`
with 1.0's (and then extracting only the original region of the result).
>>> ndimage.convolve(a, k, mode='constant', cval=1.0)
array([[13, 11, 8, 7],
[11, 3, 11, 14],
[16, 12, 14, 10],
[15, 6, 10, 5]])
With ``mode='reflect'`` (the default), outer values are reflected at the
edge of `input` to fill in missing values.
>>> b = np.array([[2, 0, 0],
... [1, 0, 0],
... [0, 0, 0]])
>>> k = np.array([[0,1,0], [0,1,0], [0,1,0]])
>>> ndimage.convolve(b, k, mode='reflect')
array([[5, 0, 0],
[3, 0, 0],
[1, 0, 0]])
This includes diagonally at the corners.
>>> k = np.array([[1,0,0],[0,1,0],[0,0,1]])
>>> ndimage.convolve(b, k)
array([[4, 2, 0],
[3, 2, 0],
[1, 1, 0]])
With ``mode='nearest'``, the single nearest value in to an edge in
`input` is repeated as many times as needed to match the overlapping
`weights`.
>>> c = np.array([[2, 0, 1],
... [1, 0, 0],
... [0, 0, 0]])
>>> k = np.array([[0, 1, 0],
... [0, 1, 0],
... [0, 1, 0],
... [0, 1, 0],
... [0, 1, 0]])
>>> ndimage.convolve(c, k, mode='nearest')
array([[7, 0, 3],
[5, 0, 2],
[3, 0, 1]])
"""
return _correlate_or_convolve(input, weights, output, mode, cval,
origin, True)
@_ni_docstrings.docfiller
def uniform_filter1d(input, size, axis=-1, output=None,
mode="reflect", cval=0.0, origin=0):
"""Calculate a one-dimensional uniform filter along the given axis.
The lines of the array along the given axis are filtered with a
uniform filter of given size.
Parameters
----------
%(input)s
size : int
length of uniform filter
%(axis)s
%(output)s
%(mode)s
%(cval)s
%(origin)s
Examples
--------
>>> from scipy.ndimage import uniform_filter1d
>>> uniform_filter1d([2, 8, 0, 4, 1, 9, 9, 0], size=3)
array([4, 3, 4, 1, 4, 6, 6, 3])
"""
input = numpy.asarray(input)
if numpy.iscomplexobj(input):
raise TypeError('Complex type not supported')
axis = _ni_support._check_axis(axis, input.ndim)
if size < 1:
raise RuntimeError('incorrect filter size')
output = _ni_support._get_output(output, input)
if (size // 2 + origin < 0) or (size // 2 + origin >= size):
raise ValueError('invalid origin')
mode = _ni_support._extend_mode_to_code(mode)
_nd_image.uniform_filter1d(input, size, axis, output, mode, cval,
origin)
return output
@_ni_docstrings.docfiller
def uniform_filter(input, size=3, output=None, mode="reflect",
cval=0.0, origin=0):
"""Multi-dimensional uniform filter.
Parameters
----------
%(input)s
size : int or sequence of ints, optional
The sizes of the uniform filter are given for each axis as a
sequence, or as a single number, in which case the size is
equal for all axes.
%(output)s
%(mode_multiple)s
%(cval)s
%(origin_multiple)s
Returns
-------
uniform_filter : ndarray
Filtered array. Has the same shape as `input`.
Notes
-----
The multi-dimensional filter is implemented as a sequence of
one-dimensional uniform filters. The intermediate arrays are stored
in the same data type as the output. Therefore, for output types
with a limited precision, the results may be imprecise because
intermediate results may be stored with insufficient precision.
Examples
--------
>>> from scipy import ndimage, misc
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> plt.gray() # show the filtered result in grayscale
>>> ax1 = fig.add_subplot(121) # left side
>>> ax2 = fig.add_subplot(122) # right side
>>> ascent = misc.ascent()
>>> result = ndimage.uniform_filter(ascent, size=20)
>>> ax1.imshow(ascent)
>>> ax2.imshow(result)
>>> plt.show()
"""
input = numpy.asarray(input)
output = _ni_support._get_output(output, input)
sizes = _ni_support._normalize_sequence(size, input.ndim)
origins = _ni_support._normalize_sequence(origin, input.ndim)
modes = _ni_support._normalize_sequence(mode, input.ndim)
axes = list(range(input.ndim))
axes = [(axes[ii], sizes[ii], origins[ii], modes[ii])
for ii in range(len(axes)) if sizes[ii] > 1]
if len(axes) > 0:
for axis, size, origin, mode in axes:
uniform_filter1d(input, int(size), axis, output, mode,
cval, origin)
input = output
else:
output[...] = input[...]
return output
@_ni_docstrings.docfiller
def minimum_filter1d(input, size, axis=-1, output=None,
mode="reflect", cval=0.0, origin=0):
"""Calculate a one-dimensional minimum filter along the given axis.
The lines of the array along the given axis are filtered with a
minimum filter of given size.
Parameters
----------
%(input)s
size : int
length along which to calculate 1D minimum
%(axis)s
%(output)s
%(mode)s
%(cval)s
%(origin)s
Notes
-----
This function implements the MINLIST algorithm [1]_, as described by
Richard Harter [2]_, and has a guaranteed O(n) performance, `n` being
the `input` length, regardless of filter size.
References
----------
.. [1] http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.42.2777
.. [2] http://www.richardhartersworld.com/cri/2001/slidingmin.html
Examples
--------
>>> from scipy.ndimage import minimum_filter1d
>>> minimum_filter1d([2, 8, 0, 4, 1, 9, 9, 0], size=3)
array([2, 0, 0, 0, 1, 1, 0, 0])
"""
input = numpy.asarray(input)
if numpy.iscomplexobj(input):
raise TypeError('Complex type not supported')
axis = _ni_support._check_axis(axis, input.ndim)
if size < 1:
raise RuntimeError('incorrect filter size')
output = _ni_support._get_output(output, input)
if (size // 2 + origin < 0) or (size // 2 + origin >= size):
raise ValueError('invalid origin')
mode = _ni_support._extend_mode_to_code(mode)
_nd_image.min_or_max_filter1d(input, size, axis, output, mode, cval,
origin, 1)
return output
@_ni_docstrings.docfiller
def maximum_filter1d(input, size, axis=-1, output=None,
mode="reflect", cval=0.0, origin=0):
"""Calculate a one-dimensional maximum filter along the given axis.
The lines of the array along the given axis are filtered with a
maximum filter of given size.
Parameters
----------
%(input)s
size : int
Length along which to calculate the 1-D maximum.
%(axis)s
%(output)s
%(mode)s
%(cval)s
%(origin)s
Returns
-------
maximum1d : ndarray, None
Maximum-filtered array with same shape as input.
None if `output` is not None
Notes
-----
This function implements the MAXLIST algorithm [1]_, as described by
Richard Harter [2]_, and has a guaranteed O(n) performance, `n` being
the `input` length, regardless of filter size.
References
----------
.. [1] http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.42.2777
.. [2] http://www.richardhartersworld.com/cri/2001/slidingmin.html
Examples
--------
>>> from scipy.ndimage import maximum_filter1d
>>> maximum_filter1d([2, 8, 0, 4, 1, 9, 9, 0], size=3)
array([8, 8, 8, 4, 9, 9, 9, 9])
"""
input = numpy.asarray(input)
if numpy.iscomplexobj(input):
raise TypeError('Complex type not supported')
axis = _ni_support._check_axis(axis, input.ndim)
if size < 1:
raise RuntimeError('incorrect filter size')
output = _ni_support._get_output(output, input)
if (size // 2 + origin < 0) or (size // 2 + origin >= size):
raise ValueError('invalid origin')
mode = _ni_support._extend_mode_to_code(mode)
_nd_image.min_or_max_filter1d(input, size, axis, output, mode, cval,
origin, 0)
return output
def _min_or_max_filter(input, size, footprint, structure, output, mode,
cval, origin, minimum):
if (size is not None) and (footprint is not None):
warnings.warn("ignoring size because footprint is set", UserWarning, stacklevel=3)
if structure is None:
if footprint is None:
if size is None:
raise RuntimeError("no footprint provided")
separable = True
else:
footprint = numpy.asarray(footprint, dtype=bool)
if not footprint.any():
raise ValueError("All-zero footprint is not supported.")
if footprint.all():
size = footprint.shape
footprint = None
separable = True
else:
separable = False
else:
structure = numpy.asarray(structure, dtype=numpy.float64)
separable = False
if footprint is None:
footprint = numpy.ones(structure.shape, bool)
else:
footprint = numpy.asarray(footprint, dtype=bool)
input = numpy.asarray(input)
if numpy.iscomplexobj(input):
raise TypeError('Complex type not supported')
output = _ni_support._get_output(output, input)
origins = _ni_support._normalize_sequence(origin, input.ndim)
if separable:
sizes = _ni_support._normalize_sequence(size, input.ndim)
modes = _ni_support._normalize_sequence(mode, input.ndim)
axes = list(range(input.ndim))
axes = [(axes[ii], sizes[ii], origins[ii], modes[ii])
for ii in range(len(axes)) if sizes[ii] > 1]
if minimum:
filter_ = minimum_filter1d
else:
filter_ = maximum_filter1d
if len(axes) > 0:
for axis, size, origin, mode in axes:
filter_(input, int(size), axis, output, mode, cval, origin)
input = output
else:
output[...] = input[...]
else:
fshape = [ii for ii in footprint.shape if ii > 0]
if len(fshape) != input.ndim:
raise RuntimeError('footprint array has incorrect shape.')
for origin, lenf in zip(origins, fshape):
if (lenf // 2 + origin < 0) or (lenf // 2 + origin >= lenf):
raise ValueError('invalid origin')
if not footprint.flags.contiguous:
footprint = footprint.copy()
if structure is not None:
if len(structure.shape) != input.ndim:
raise RuntimeError('structure array has incorrect shape')
if not structure.flags.contiguous:
structure = structure.copy()
mode = _ni_support._extend_mode_to_code(mode)
_nd_image.min_or_max_filter(input, footprint, structure, output,
mode, cval, origins, minimum)
return output
@_ni_docstrings.docfiller
def minimum_filter(input, size=None, footprint=None, output=None,
mode="reflect", cval=0.0, origin=0):
"""Calculate a multi-dimensional minimum filter.
Parameters
----------
%(input)s
%(size_foot)s
%(output)s
%(mode_multiple)s
%(cval)s
%(origin_multiple)s
Returns
-------
minimum_filter : ndarray
Filtered array. Has the same shape as `input`.
Examples
--------
>>> from scipy import ndimage, misc
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> plt.gray() # show the filtered result in grayscale
>>> ax1 = fig.add_subplot(121) # left side
>>> ax2 = fig.add_subplot(122) # right side
>>> ascent = misc.ascent()
>>> result = ndimage.minimum_filter(ascent, size=20)
>>> ax1.imshow(ascent)
>>> ax2.imshow(result)
>>> plt.show()
"""
return _min_or_max_filter(input, size, footprint, None, output, mode,
cval, origin, 1)
@_ni_docstrings.docfiller
def maximum_filter(input, size=None, footprint=None, output=None,
mode="reflect", cval=0.0, origin=0):
"""Calculate a multi-dimensional maximum filter.
Parameters
----------
%(input)s
%(size_foot)s
%(output)s
%(mode_multiple)s
%(cval)s
%(origin_multiple)s
Returns
-------
maximum_filter : ndarray
Filtered array. Has the same shape as `input`.
Examples
--------
>>> from scipy import ndimage, misc
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> plt.gray() # show the filtered result in grayscale
>>> ax1 = fig.add_subplot(121) # left side
>>> ax2 = fig.add_subplot(122) # right side
>>> ascent = misc.ascent()
>>> result = ndimage.maximum_filter(ascent, size=20)
>>> ax1.imshow(ascent)
>>> ax2.imshow(result)
>>> plt.show()
"""
return _min_or_max_filter(input, size, footprint, None, output, mode,
cval, origin, 0)
@_ni_docstrings.docfiller
def _rank_filter(input, rank, size=None, footprint=None, output=None,
mode="reflect", cval=0.0, origin=0, operation='rank'):
if (size is not None) and (footprint is not None):
warnings.warn("ignoring size because footprint is set", UserWarning, stacklevel=3)
input = numpy.asarray(input)
if numpy.iscomplexobj(input):
raise TypeError('Complex type not supported')
origins = _ni_support._normalize_sequence(origin, input.ndim)
if footprint is None:
if size is None:
raise RuntimeError("no footprint or filter size provided")
sizes = _ni_support._normalize_sequence(size, input.ndim)
footprint = numpy.ones(sizes, dtype=bool)
else:
footprint = numpy.asarray(footprint, dtype=bool)
fshape = [ii for ii in footprint.shape if ii > 0]
if len(fshape) != input.ndim:
raise RuntimeError('filter footprint array has incorrect shape.')
for origin, lenf in zip(origins, fshape):
if (lenf // 2 + origin < 0) or (lenf // 2 + origin >= lenf):
raise ValueError('invalid origin')
if not footprint.flags.contiguous:
footprint = footprint.copy()
filter_size = numpy.where(footprint, 1, 0).sum()
if operation == 'median':
rank = filter_size // 2
elif operation == 'percentile':
percentile = rank
if percentile < 0.0:
percentile += 100.0
if percentile < 0 or percentile > 100:
raise RuntimeError('invalid percentile')
if percentile == 100.0:
rank = filter_size - 1
else:
rank = int(float(filter_size) * percentile / 100.0)
if rank < 0:
rank += filter_size
if rank < 0 or rank >= filter_size:
raise RuntimeError('rank not within filter footprint size')
if rank == 0:
return minimum_filter(input, None, footprint, output, mode, cval,
origins)
elif rank == filter_size - 1:
return maximum_filter(input, None, footprint, output, mode, cval,
origins)
else:
output = _ni_support._get_output(output, input)
mode = _ni_support._extend_mode_to_code(mode)
_nd_image.rank_filter(input, rank, footprint, output, mode, cval,
origins)
return output
@_ni_docstrings.docfiller
def rank_filter(input, rank, size=None, footprint=None, output=None,
mode="reflect", cval=0.0, origin=0):
"""Calculate a multi-dimensional rank filter.
Parameters
----------
%(input)s
rank : int
The rank parameter may be less then zero, i.e., rank = -1
indicates the largest element.
%(size_foot)s
%(output)s
%(mode_multiple)s
%(cval)s
%(origin_multiple)s
Returns
-------
rank_filter : ndarray
Filtered array. Has the same shape as `input`.
Examples
--------
>>> from scipy import ndimage, misc
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> plt.gray() # show the filtered result in grayscale
>>> ax1 = fig.add_subplot(121) # left side
>>> ax2 = fig.add_subplot(122) # right side
>>> ascent = misc.ascent()
>>> result = ndimage.rank_filter(ascent, rank=42, size=20)
>>> ax1.imshow(ascent)
>>> ax2.imshow(result)
>>> plt.show()
"""
return _rank_filter(input, rank, size, footprint, output, mode, cval,
origin, 'rank')
@_ni_docstrings.docfiller
def median_filter(input, size=None, footprint=None, output=None,
mode="reflect", cval=0.0, origin=0):
"""
Calculate a multidimensional median filter.
Parameters
----------
%(input)s
%(size_foot)s
%(output)s
%(mode_multiple)s
%(cval)s
%(origin_multiple)s
Returns
-------
median_filter : ndarray
Filtered array. Has the same shape as `input`.
Examples
--------
>>> from scipy import ndimage, misc
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> plt.gray() # show the filtered result in grayscale
>>> ax1 = fig.add_subplot(121) # left side
>>> ax2 = fig.add_subplot(122) # right side
>>> ascent = misc.ascent()
>>> result = ndimage.median_filter(ascent, size=20)
>>> ax1.imshow(ascent)
>>> ax2.imshow(result)
>>> plt.show()
"""
return _rank_filter(input, 0, size, footprint, output, mode, cval,
origin, 'median')
@_ni_docstrings.docfiller
def percentile_filter(input, percentile, size=None, footprint=None,
output=None, mode="reflect", cval=0.0, origin=0):
"""Calculate a multi-dimensional percentile filter.
Parameters
----------
%(input)s
percentile : scalar
The percentile parameter may be less then zero, i.e.,
percentile = -20 equals percentile = 80
%(size_foot)s
%(output)s
%(mode_multiple)s
%(cval)s
%(origin_multiple)s
Returns
-------
percentile_filter : ndarray
Filtered array. Has the same shape as `input`.
Examples
--------
>>> from scipy import ndimage, misc
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> plt.gray() # show the filtered result in grayscale
>>> ax1 = fig.add_subplot(121) # left side
>>> ax2 = fig.add_subplot(122) # right side
>>> ascent = misc.ascent()
>>> result = ndimage.percentile_filter(ascent, percentile=20, size=20)
>>> ax1.imshow(ascent)
>>> ax2.imshow(result)
>>> plt.show()
"""
return _rank_filter(input, percentile, size, footprint, output, mode,
cval, origin, 'percentile')
@_ni_docstrings.docfiller
def generic_filter1d(input, function, filter_size, axis=-1,
output=None, mode="reflect", cval=0.0, origin=0,
extra_arguments=(), extra_keywords=None):
"""Calculate a one-dimensional filter along the given axis.
`generic_filter1d` iterates over the lines of the array, calling the
given function at each line. The arguments of the line are the
input line, and the output line. The input and output lines are 1D
double arrays. The input line is extended appropriately according
to the filter size and origin. The output line must be modified
in-place with the result.
Parameters
----------
%(input)s
function : {callable, scipy.LowLevelCallable}
Function to apply along given axis.
filter_size : scalar
Length of the filter.
%(axis)s
%(output)s
%(mode)s
%(cval)s
%(origin)s
%(extra_arguments)s
%(extra_keywords)s
Notes
-----
This function also accepts low-level callback functions with one of
the following signatures and wrapped in `scipy.LowLevelCallable`:
.. code:: c
int function(double *input_line, npy_intp input_length,
double *output_line, npy_intp output_length,
void *user_data)
int function(double *input_line, intptr_t input_length,
double *output_line, intptr_t output_length,
void *user_data)
The calling function iterates over the lines of the input and output
arrays, calling the callback function at each line. The current line
is extended according to the border conditions set by the calling
function, and the result is copied into the array that is passed
through ``input_line``. The length of the input line (after extension)
is passed through ``input_length``. The callback function should apply
the filter and store the result in the array passed through
``output_line``. The length of the output line is passed through
``output_length``. ``user_data`` is the data pointer provided
to `scipy.LowLevelCallable` as-is.
The callback function must return an integer error status that is zero
if something went wrong and one otherwise. If an error occurs, you should
normally set the python error status with an informative message
before returning, otherwise a default error message is set by the
calling function.
In addition, some other low-level function pointer specifications
are accepted, but these are for backward compatibility only and should
not be used in new code.
"""
if extra_keywords is None:
extra_keywords = {}
input = numpy.asarray(input)
if numpy.iscomplexobj(input):
raise TypeError('Complex type not supported')
output = _ni_support._get_output(output, input)
if filter_size < 1:
raise RuntimeError('invalid filter size')
axis = _ni_support._check_axis(axis, input.ndim)
if (filter_size // 2 + origin < 0) or (filter_size // 2 + origin >=
filter_size):
raise ValueError('invalid origin')
mode = _ni_support._extend_mode_to_code(mode)
_nd_image.generic_filter1d(input, function, filter_size, axis, output,
mode, cval, origin, extra_arguments,
extra_keywords)
return output
@_ni_docstrings.docfiller
def generic_filter(input, function, size=None, footprint=None,
output=None, mode="reflect", cval=0.0, origin=0,
extra_arguments=(), extra_keywords=None):
"""Calculate a multi-dimensional filter using the given function.
At each element the provided function is called. The input values
within the filter footprint at that element are passed to the function
as a 1D array of double values.
Parameters
----------
%(input)s
function : {callable, scipy.LowLevelCallable}
Function to apply at each element.
%(size_foot)s
%(output)s
%(mode_multiple)s
%(cval)s
%(origin_multiple)s
%(extra_arguments)s
%(extra_keywords)s
Notes
-----
This function also accepts low-level callback functions with one of
the following signatures and wrapped in `scipy.LowLevelCallable`:
.. code:: c
int callback(double *buffer, npy_intp filter_size,
double *return_value, void *user_data)
int callback(double *buffer, intptr_t filter_size,
double *return_value, void *user_data)
The calling function iterates over the elements of the input and
output arrays, calling the callback function at each element. The
elements within the footprint of the filter at the current element are
passed through the ``buffer`` parameter, and the number of elements
within the footprint through ``filter_size``. The calculated value is
returned in ``return_value``. ``user_data`` is the data pointer provided
to `scipy.LowLevelCallable` as-is.
The callback function must return an integer error status that is zero
if something went wrong and one otherwise. If an error occurs, you should
normally set the python error status with an informative message
before returning, otherwise a default error message is set by the
calling function.
In addition, some other low-level function pointer specifications
are accepted, but these are for backward compatibility only and should
not be used in new code.
"""
if (size is not None) and (footprint is not None):
warnings.warn("ignoring size because footprint is set", UserWarning, stacklevel=2)
if extra_keywords is None:
extra_keywords = {}
input = numpy.asarray(input)
if numpy.iscomplexobj(input):
raise TypeError('Complex type not supported')
origins = _ni_support._normalize_sequence(origin, input.ndim)
if footprint is None:
if size is None:
raise RuntimeError("no footprint or filter size provided")
sizes = _ni_support._normalize_sequence(size, input.ndim)
footprint = numpy.ones(sizes, dtype=bool)
else:
footprint = numpy.asarray(footprint, dtype=bool)
fshape = [ii for ii in footprint.shape if ii > 0]
if len(fshape) != input.ndim:
raise RuntimeError('filter footprint array has incorrect shape.')
for origin, lenf in zip(origins, fshape):
if (lenf // 2 + origin < 0) or (lenf // 2 + origin >= lenf):
raise ValueError('invalid origin')
if not footprint.flags.contiguous:
footprint = footprint.copy()
output = _ni_support._get_output(output, input)
mode = _ni_support._extend_mode_to_code(mode)
_nd_image.generic_filter(input, function, footprint, output, mode,
cval, origins, extra_arguments, extra_keywords)
return output
| 34.312849 | 90 | 0.60998 |
39ad58763e4b5d5cacf4a591118e5f0bc2da982e | 24,501 | py | Python | sdk/python/pulumi_gcp/compute/get_instance.py | sisisin/pulumi-gcp | af6681d70ea457843409110c1324817fe55f68ad | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_gcp/compute/get_instance.py | sisisin/pulumi-gcp | af6681d70ea457843409110c1324817fe55f68ad | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_gcp/compute/get_instance.py | sisisin/pulumi-gcp | af6681d70ea457843409110c1324817fe55f68ad | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetInstanceResult',
'AwaitableGetInstanceResult',
'get_instance',
'get_instance_output',
]
@pulumi.output_type
class GetInstanceResult:
"""
A collection of values returned by getInstance.
"""
def __init__(__self__, advanced_machine_features=None, allow_stopping_for_update=None, attached_disks=None, boot_disks=None, can_ip_forward=None, confidential_instance_configs=None, cpu_platform=None, current_status=None, deletion_protection=None, description=None, desired_status=None, enable_display=None, guest_accelerators=None, hostname=None, id=None, instance_id=None, label_fingerprint=None, labels=None, machine_type=None, metadata=None, metadata_fingerprint=None, metadata_startup_script=None, min_cpu_platform=None, name=None, network_interfaces=None, network_performance_configs=None, project=None, reservation_affinities=None, resource_policies=None, schedulings=None, scratch_disks=None, self_link=None, service_accounts=None, shielded_instance_configs=None, tags=None, tags_fingerprint=None, zone=None):
if advanced_machine_features and not isinstance(advanced_machine_features, list):
raise TypeError("Expected argument 'advanced_machine_features' to be a list")
pulumi.set(__self__, "advanced_machine_features", advanced_machine_features)
if allow_stopping_for_update and not isinstance(allow_stopping_for_update, bool):
raise TypeError("Expected argument 'allow_stopping_for_update' to be a bool")
pulumi.set(__self__, "allow_stopping_for_update", allow_stopping_for_update)
if attached_disks and not isinstance(attached_disks, list):
raise TypeError("Expected argument 'attached_disks' to be a list")
pulumi.set(__self__, "attached_disks", attached_disks)
if boot_disks and not isinstance(boot_disks, list):
raise TypeError("Expected argument 'boot_disks' to be a list")
pulumi.set(__self__, "boot_disks", boot_disks)
if can_ip_forward and not isinstance(can_ip_forward, bool):
raise TypeError("Expected argument 'can_ip_forward' to be a bool")
pulumi.set(__self__, "can_ip_forward", can_ip_forward)
if confidential_instance_configs and not isinstance(confidential_instance_configs, list):
raise TypeError("Expected argument 'confidential_instance_configs' to be a list")
pulumi.set(__self__, "confidential_instance_configs", confidential_instance_configs)
if cpu_platform and not isinstance(cpu_platform, str):
raise TypeError("Expected argument 'cpu_platform' to be a str")
pulumi.set(__self__, "cpu_platform", cpu_platform)
if current_status and not isinstance(current_status, str):
raise TypeError("Expected argument 'current_status' to be a str")
pulumi.set(__self__, "current_status", current_status)
if deletion_protection and not isinstance(deletion_protection, bool):
raise TypeError("Expected argument 'deletion_protection' to be a bool")
pulumi.set(__self__, "deletion_protection", deletion_protection)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if desired_status and not isinstance(desired_status, str):
raise TypeError("Expected argument 'desired_status' to be a str")
pulumi.set(__self__, "desired_status", desired_status)
if enable_display and not isinstance(enable_display, bool):
raise TypeError("Expected argument 'enable_display' to be a bool")
pulumi.set(__self__, "enable_display", enable_display)
if guest_accelerators and not isinstance(guest_accelerators, list):
raise TypeError("Expected argument 'guest_accelerators' to be a list")
pulumi.set(__self__, "guest_accelerators", guest_accelerators)
if hostname and not isinstance(hostname, str):
raise TypeError("Expected argument 'hostname' to be a str")
pulumi.set(__self__, "hostname", hostname)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if instance_id and not isinstance(instance_id, str):
raise TypeError("Expected argument 'instance_id' to be a str")
pulumi.set(__self__, "instance_id", instance_id)
if label_fingerprint and not isinstance(label_fingerprint, str):
raise TypeError("Expected argument 'label_fingerprint' to be a str")
pulumi.set(__self__, "label_fingerprint", label_fingerprint)
if labels and not isinstance(labels, dict):
raise TypeError("Expected argument 'labels' to be a dict")
pulumi.set(__self__, "labels", labels)
if machine_type and not isinstance(machine_type, str):
raise TypeError("Expected argument 'machine_type' to be a str")
pulumi.set(__self__, "machine_type", machine_type)
if metadata and not isinstance(metadata, dict):
raise TypeError("Expected argument 'metadata' to be a dict")
pulumi.set(__self__, "metadata", metadata)
if metadata_fingerprint and not isinstance(metadata_fingerprint, str):
raise TypeError("Expected argument 'metadata_fingerprint' to be a str")
pulumi.set(__self__, "metadata_fingerprint", metadata_fingerprint)
if metadata_startup_script and not isinstance(metadata_startup_script, str):
raise TypeError("Expected argument 'metadata_startup_script' to be a str")
pulumi.set(__self__, "metadata_startup_script", metadata_startup_script)
if min_cpu_platform and not isinstance(min_cpu_platform, str):
raise TypeError("Expected argument 'min_cpu_platform' to be a str")
pulumi.set(__self__, "min_cpu_platform", min_cpu_platform)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if network_interfaces and not isinstance(network_interfaces, list):
raise TypeError("Expected argument 'network_interfaces' to be a list")
pulumi.set(__self__, "network_interfaces", network_interfaces)
if network_performance_configs and not isinstance(network_performance_configs, list):
raise TypeError("Expected argument 'network_performance_configs' to be a list")
pulumi.set(__self__, "network_performance_configs", network_performance_configs)
if project and not isinstance(project, str):
raise TypeError("Expected argument 'project' to be a str")
pulumi.set(__self__, "project", project)
if reservation_affinities and not isinstance(reservation_affinities, list):
raise TypeError("Expected argument 'reservation_affinities' to be a list")
pulumi.set(__self__, "reservation_affinities", reservation_affinities)
if resource_policies and not isinstance(resource_policies, list):
raise TypeError("Expected argument 'resource_policies' to be a list")
pulumi.set(__self__, "resource_policies", resource_policies)
if schedulings and not isinstance(schedulings, list):
raise TypeError("Expected argument 'schedulings' to be a list")
pulumi.set(__self__, "schedulings", schedulings)
if scratch_disks and not isinstance(scratch_disks, list):
raise TypeError("Expected argument 'scratch_disks' to be a list")
pulumi.set(__self__, "scratch_disks", scratch_disks)
if self_link and not isinstance(self_link, str):
raise TypeError("Expected argument 'self_link' to be a str")
pulumi.set(__self__, "self_link", self_link)
if service_accounts and not isinstance(service_accounts, list):
raise TypeError("Expected argument 'service_accounts' to be a list")
pulumi.set(__self__, "service_accounts", service_accounts)
if shielded_instance_configs and not isinstance(shielded_instance_configs, list):
raise TypeError("Expected argument 'shielded_instance_configs' to be a list")
pulumi.set(__self__, "shielded_instance_configs", shielded_instance_configs)
if tags and not isinstance(tags, list):
raise TypeError("Expected argument 'tags' to be a list")
pulumi.set(__self__, "tags", tags)
if tags_fingerprint and not isinstance(tags_fingerprint, str):
raise TypeError("Expected argument 'tags_fingerprint' to be a str")
pulumi.set(__self__, "tags_fingerprint", tags_fingerprint)
if zone and not isinstance(zone, str):
raise TypeError("Expected argument 'zone' to be a str")
pulumi.set(__self__, "zone", zone)
@property
@pulumi.getter(name="advancedMachineFeatures")
def advanced_machine_features(self) -> Sequence['outputs.GetInstanceAdvancedMachineFeatureResult']:
return pulumi.get(self, "advanced_machine_features")
@property
@pulumi.getter(name="allowStoppingForUpdate")
def allow_stopping_for_update(self) -> bool:
return pulumi.get(self, "allow_stopping_for_update")
@property
@pulumi.getter(name="attachedDisks")
def attached_disks(self) -> Sequence['outputs.GetInstanceAttachedDiskResult']:
"""
List of disks attached to the instance. Structure is documented below.
"""
return pulumi.get(self, "attached_disks")
@property
@pulumi.getter(name="bootDisks")
def boot_disks(self) -> Sequence['outputs.GetInstanceBootDiskResult']:
"""
The boot disk for the instance. Structure is documented below.
"""
return pulumi.get(self, "boot_disks")
@property
@pulumi.getter(name="canIpForward")
def can_ip_forward(self) -> bool:
"""
Whether sending and receiving of packets with non-matching source or destination IPs is allowed.
"""
return pulumi.get(self, "can_ip_forward")
@property
@pulumi.getter(name="confidentialInstanceConfigs")
def confidential_instance_configs(self) -> Sequence['outputs.GetInstanceConfidentialInstanceConfigResult']:
return pulumi.get(self, "confidential_instance_configs")
@property
@pulumi.getter(name="cpuPlatform")
def cpu_platform(self) -> str:
"""
The CPU platform used by this instance.
"""
return pulumi.get(self, "cpu_platform")
@property
@pulumi.getter(name="currentStatus")
def current_status(self) -> str:
return pulumi.get(self, "current_status")
@property
@pulumi.getter(name="deletionProtection")
def deletion_protection(self) -> bool:
"""
Whether deletion protection is enabled on this instance.
"""
return pulumi.get(self, "deletion_protection")
@property
@pulumi.getter
def description(self) -> str:
"""
A brief description of the resource.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="desiredStatus")
def desired_status(self) -> str:
return pulumi.get(self, "desired_status")
@property
@pulumi.getter(name="enableDisplay")
def enable_display(self) -> bool:
return pulumi.get(self, "enable_display")
@property
@pulumi.getter(name="guestAccelerators")
def guest_accelerators(self) -> Sequence['outputs.GetInstanceGuestAcceleratorResult']:
"""
List of the type and count of accelerator cards attached to the instance. Structure is documented below.
"""
return pulumi.get(self, "guest_accelerators")
@property
@pulumi.getter
def hostname(self) -> str:
return pulumi.get(self, "hostname")
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> str:
"""
The server-assigned unique identifier of this instance.
"""
return pulumi.get(self, "instance_id")
@property
@pulumi.getter(name="labelFingerprint")
def label_fingerprint(self) -> str:
"""
The unique fingerprint of the labels.
"""
return pulumi.get(self, "label_fingerprint")
@property
@pulumi.getter
def labels(self) -> Mapping[str, str]:
"""
A set of key/value label pairs assigned to the instance.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="machineType")
def machine_type(self) -> str:
"""
The machine type to create.
"""
return pulumi.get(self, "machine_type")
@property
@pulumi.getter
def metadata(self) -> Mapping[str, str]:
"""
Metadata key/value pairs made available within the instance.
"""
return pulumi.get(self, "metadata")
@property
@pulumi.getter(name="metadataFingerprint")
def metadata_fingerprint(self) -> str:
"""
The unique fingerprint of the metadata.
"""
return pulumi.get(self, "metadata_fingerprint")
@property
@pulumi.getter(name="metadataStartupScript")
def metadata_startup_script(self) -> str:
return pulumi.get(self, "metadata_startup_script")
@property
@pulumi.getter(name="minCpuPlatform")
def min_cpu_platform(self) -> str:
"""
The minimum CPU platform specified for the VM instance.
"""
return pulumi.get(self, "min_cpu_platform")
@property
@pulumi.getter
def name(self) -> Optional[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="networkInterfaces")
def network_interfaces(self) -> Sequence['outputs.GetInstanceNetworkInterfaceResult']:
"""
The networks attached to the instance. Structure is documented below.
"""
return pulumi.get(self, "network_interfaces")
@property
@pulumi.getter(name="networkPerformanceConfigs")
def network_performance_configs(self) -> Sequence['outputs.GetInstanceNetworkPerformanceConfigResult']:
"""
The network performance configuration setting for the instance, if set. Structure is documented below.
"""
return pulumi.get(self, "network_performance_configs")
@property
@pulumi.getter
def project(self) -> Optional[str]:
return pulumi.get(self, "project")
@property
@pulumi.getter(name="reservationAffinities")
def reservation_affinities(self) -> Sequence['outputs.GetInstanceReservationAffinityResult']:
return pulumi.get(self, "reservation_affinities")
@property
@pulumi.getter(name="resourcePolicies")
def resource_policies(self) -> Sequence[str]:
return pulumi.get(self, "resource_policies")
@property
@pulumi.getter
def schedulings(self) -> Sequence['outputs.GetInstanceSchedulingResult']:
"""
The scheduling strategy being used by the instance.
"""
return pulumi.get(self, "schedulings")
@property
@pulumi.getter(name="scratchDisks")
def scratch_disks(self) -> Sequence['outputs.GetInstanceScratchDiskResult']:
"""
The scratch disks attached to the instance. Structure is documented below.
"""
return pulumi.get(self, "scratch_disks")
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> Optional[str]:
"""
The URI of the created resource.
"""
return pulumi.get(self, "self_link")
@property
@pulumi.getter(name="serviceAccounts")
def service_accounts(self) -> Sequence['outputs.GetInstanceServiceAccountResult']:
"""
The service account to attach to the instance. Structure is documented below.
"""
return pulumi.get(self, "service_accounts")
@property
@pulumi.getter(name="shieldedInstanceConfigs")
def shielded_instance_configs(self) -> Sequence['outputs.GetInstanceShieldedInstanceConfigResult']:
"""
The shielded vm config being used by the instance. Structure is documented below.
"""
return pulumi.get(self, "shielded_instance_configs")
@property
@pulumi.getter
def tags(self) -> Sequence[str]:
"""
The list of tags attached to the instance.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="tagsFingerprint")
def tags_fingerprint(self) -> str:
"""
The unique fingerprint of the tags.
"""
return pulumi.get(self, "tags_fingerprint")
@property
@pulumi.getter
def zone(self) -> Optional[str]:
return pulumi.get(self, "zone")
class AwaitableGetInstanceResult(GetInstanceResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetInstanceResult(
advanced_machine_features=self.advanced_machine_features,
allow_stopping_for_update=self.allow_stopping_for_update,
attached_disks=self.attached_disks,
boot_disks=self.boot_disks,
can_ip_forward=self.can_ip_forward,
confidential_instance_configs=self.confidential_instance_configs,
cpu_platform=self.cpu_platform,
current_status=self.current_status,
deletion_protection=self.deletion_protection,
description=self.description,
desired_status=self.desired_status,
enable_display=self.enable_display,
guest_accelerators=self.guest_accelerators,
hostname=self.hostname,
id=self.id,
instance_id=self.instance_id,
label_fingerprint=self.label_fingerprint,
labels=self.labels,
machine_type=self.machine_type,
metadata=self.metadata,
metadata_fingerprint=self.metadata_fingerprint,
metadata_startup_script=self.metadata_startup_script,
min_cpu_platform=self.min_cpu_platform,
name=self.name,
network_interfaces=self.network_interfaces,
network_performance_configs=self.network_performance_configs,
project=self.project,
reservation_affinities=self.reservation_affinities,
resource_policies=self.resource_policies,
schedulings=self.schedulings,
scratch_disks=self.scratch_disks,
self_link=self.self_link,
service_accounts=self.service_accounts,
shielded_instance_configs=self.shielded_instance_configs,
tags=self.tags,
tags_fingerprint=self.tags_fingerprint,
zone=self.zone)
def get_instance(name: Optional[str] = None,
project: Optional[str] = None,
self_link: Optional[str] = None,
zone: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetInstanceResult:
"""
Get information about a VM instance resource within GCE. For more information see
[the official documentation](https://cloud.google.com/compute/docs/instances)
and
[API](https://cloud.google.com/compute/docs/reference/latest/instances).
## Example Usage
```python
import pulumi
import pulumi_gcp as gcp
appserver = gcp.compute.get_instance(name="primary-application-server",
zone="us-central1-a")
```
:param str name: The name of the instance. One of `name` or `self_link` must be provided.
:param str project: The ID of the project in which the resource belongs.
If `self_link` is provided, this value is ignored. If neither `self_link`
nor `project` are provided, the provider project is used.
:param str self_link: The self link of the instance. One of `name` or `self_link` must be provided.
:param str zone: The zone of the instance. If `self_link` is provided, this
value is ignored. If neither `self_link` nor `zone` are provided, the
provider zone is used.
"""
__args__ = dict()
__args__['name'] = name
__args__['project'] = project
__args__['selfLink'] = self_link
__args__['zone'] = zone
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('gcp:compute/getInstance:getInstance', __args__, opts=opts, typ=GetInstanceResult).value
return AwaitableGetInstanceResult(
advanced_machine_features=__ret__.advanced_machine_features,
allow_stopping_for_update=__ret__.allow_stopping_for_update,
attached_disks=__ret__.attached_disks,
boot_disks=__ret__.boot_disks,
can_ip_forward=__ret__.can_ip_forward,
confidential_instance_configs=__ret__.confidential_instance_configs,
cpu_platform=__ret__.cpu_platform,
current_status=__ret__.current_status,
deletion_protection=__ret__.deletion_protection,
description=__ret__.description,
desired_status=__ret__.desired_status,
enable_display=__ret__.enable_display,
guest_accelerators=__ret__.guest_accelerators,
hostname=__ret__.hostname,
id=__ret__.id,
instance_id=__ret__.instance_id,
label_fingerprint=__ret__.label_fingerprint,
labels=__ret__.labels,
machine_type=__ret__.machine_type,
metadata=__ret__.metadata,
metadata_fingerprint=__ret__.metadata_fingerprint,
metadata_startup_script=__ret__.metadata_startup_script,
min_cpu_platform=__ret__.min_cpu_platform,
name=__ret__.name,
network_interfaces=__ret__.network_interfaces,
network_performance_configs=__ret__.network_performance_configs,
project=__ret__.project,
reservation_affinities=__ret__.reservation_affinities,
resource_policies=__ret__.resource_policies,
schedulings=__ret__.schedulings,
scratch_disks=__ret__.scratch_disks,
self_link=__ret__.self_link,
service_accounts=__ret__.service_accounts,
shielded_instance_configs=__ret__.shielded_instance_configs,
tags=__ret__.tags,
tags_fingerprint=__ret__.tags_fingerprint,
zone=__ret__.zone)
@_utilities.lift_output_func(get_instance)
def get_instance_output(name: Optional[pulumi.Input[Optional[str]]] = None,
project: Optional[pulumi.Input[Optional[str]]] = None,
self_link: Optional[pulumi.Input[Optional[str]]] = None,
zone: Optional[pulumi.Input[Optional[str]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetInstanceResult]:
"""
Get information about a VM instance resource within GCE. For more information see
[the official documentation](https://cloud.google.com/compute/docs/instances)
and
[API](https://cloud.google.com/compute/docs/reference/latest/instances).
## Example Usage
```python
import pulumi
import pulumi_gcp as gcp
appserver = gcp.compute.get_instance(name="primary-application-server",
zone="us-central1-a")
```
:param str name: The name of the instance. One of `name` or `self_link` must be provided.
:param str project: The ID of the project in which the resource belongs.
If `self_link` is provided, this value is ignored. If neither `self_link`
nor `project` are provided, the provider project is used.
:param str self_link: The self link of the instance. One of `name` or `self_link` must be provided.
:param str zone: The zone of the instance. If `self_link` is provided, this
value is ignored. If neither `self_link` nor `zone` are provided, the
provider zone is used.
"""
...
| 44.145946 | 821 | 0.685564 |
4db23badd3e491b966c6c1eb50f854e897a2a92a | 644 | py | Python | cybereason-threat-intel/connector.py | ftnt-cse/connector-cybereason-threat-intel | 912ff3823db51126672c11c3ecd9f5b82738fa5b | [
"MIT"
] | null | null | null | cybereason-threat-intel/connector.py | ftnt-cse/connector-cybereason-threat-intel | 912ff3823db51126672c11c3ecd9f5b82738fa5b | [
"MIT"
] | null | null | null | cybereason-threat-intel/connector.py | ftnt-cse/connector-cybereason-threat-intel | 912ff3823db51126672c11c3ecd9f5b82738fa5b | [
"MIT"
] | null | null | null | """ Connector """
from connectors.core.connector import Connector, get_logger, ConnectorError
from .operations import _run_operation, _check_health
logger = get_logger('Cybereason-Threat-Intel')
class CybereasonTI(Connector):
def execute(self, config, operation, params, **kwargs):
try:
params.update({"operation":operation})
return _run_operation(config, params)
except Exception as err:
logger.error('CybereasonTI:{}'.format(err))
raise ConnectorError('CybereasonTI:{}'.format(err))
def check_health(self, config):
return _check_health(config) | 33.894737 | 75 | 0.67236 |
775dba1f3db39f0ec1e60363bac0e139b77fdfca | 1,216 | py | Python | cupyx/scipy/special/bessel.py | svlandeg/cupy | 484e007d5bf58a0445af2f6e7aa3fdfe0fcc2363 | [
"MIT"
] | 2 | 2020-02-28T09:27:58.000Z | 2020-10-12T07:10:24.000Z | cupyx/scipy/special/bessel.py | svlandeg/cupy | 484e007d5bf58a0445af2f6e7aa3fdfe0fcc2363 | [
"MIT"
] | 4 | 2020-09-15T01:49:38.000Z | 2020-12-11T03:52:35.000Z | cupyx/scipy/special/bessel.py | svlandeg/cupy | 484e007d5bf58a0445af2f6e7aa3fdfe0fcc2363 | [
"MIT"
] | 2 | 2018-07-21T13:44:26.000Z | 2019-03-25T11:30:16.000Z | from cupy import core
j0 = core.create_ufunc(
'cupyx_scipy_j0', ('f->f', 'd->d'),
'out0 = j0(in0)',
doc='''Bessel function of the first kind of order 0.
.. seealso:: :meth:`scipy.special.j0`
''')
j1 = core.create_ufunc(
'cupyx_scipy_j1', ('f->f', 'd->d'),
'out0 = j1(in0)',
doc='''Bessel function of the first kind of order 1.
.. seealso:: :meth:`scipy.special.j1`
''')
y0 = core.create_ufunc(
'cupyx_scipy_y0', ('f->f', 'd->d'),
'out0 = y0(in0)',
doc='''Bessel function of the second kind of order 0.
.. seealso:: :meth:`scipy.special.y0`
''')
y1 = core.create_ufunc(
'cupyx_scipy_y1', ('f->f', 'd->d'),
'out0 = y1(in0)',
doc='''Bessel function of the second kind of order 1.
.. seealso:: :meth:`scipy.special.y1`
''')
i0 = core.create_ufunc(
'cupyx_scipy_i0', ('f->f', 'd->d'),
'out0 = cyl_bessel_i0(in0)',
doc='''Modified Bessel function of order 0.
.. seealso:: :meth:`scipy.special.i0`
''')
i1 = core.create_ufunc(
'cupyx_scipy_i1', ('f->f', 'd->d'),
'out0 = cyl_bessel_i1(in0)',
doc='''Modified Bessel function of order 1.
.. seealso:: :meth:`scipy.special.i1`
''')
| 19.612903 | 57 | 0.5625 |
e737efe35c9a861fc68864533fbc64060b2608d0 | 3,159 | py | Python | locatable-turk/generate_truth_gtjson.py | patcon/oldto | 44c099550a4e3cfafa85afbaebd3cd6c33325891 | [
"Apache-2.0"
] | 22 | 2018-04-25T22:03:53.000Z | 2021-07-13T18:43:23.000Z | locatable-turk/generate_truth_gtjson.py | patcon/oldto | 44c099550a4e3cfafa85afbaebd3cd6c33325891 | [
"Apache-2.0"
] | 17 | 2018-04-30T14:04:08.000Z | 2022-02-13T19:52:44.000Z | locatable-turk/generate_truth_gtjson.py | patcon/oldto | 44c099550a4e3cfafa85afbaebd3cd6c33325891 | [
"Apache-2.0"
] | 7 | 2018-05-08T23:32:44.000Z | 2022-01-27T17:49:30.000Z | #!/usr/bin/env python3
"""Generate truth GTJSON from the localturk CSV output.
See:
https://docs.google.com/spreadsheets/d/1AZ_X4YFPJF9-KdRxjdiDJhRF5z6tAd3fBBaAXBZLNHE/edit
"""
from collections import defaultdict
import json
import csv
def merge_entries(entries):
"""We have more than one source of truth data for a single entry. Merge them!
entries are (date, geometry, row) tuples.
"""
# If only one source thinks the record is locatable, use that.
located = [e for e in entries if e[1]]
rows_with_notes = [e[2] for e in entries if e[2]['user_notes']]
if len(rows_with_notes) == 0:
note = ''
elif len(rows_with_notes) == 1:
note = rows_with_notes[0]['user_notes']
else:
note = '\n'.join('%s: %s' % (row['source'], row['user_notes']) for row in rows_with_notes)
if len(located) == 0:
entries[0][2]['user_notes'] = note
return entries[0]
elif len(located) == 1:
located[0][2]['user_notes'] = note
return located[0]
# We've got multiple locations. Average them?
avg_lat = sum(float(e[2]['Lat']) for e in located) / len(located)
avg_lng = sum(float(e[2]['Lng']) for e in located) / len(located)
geometry = {
'type': 'Point',
'coordinates': [avg_lng, avg_lat]
}
located[0][2]['user_notes'] = note
return (located[0][0], geometry, located[0][2])
if __name__ == '__main__':
id_to_data = defaultdict(list)
for row in csv.DictReader(open('locatable-turk/truth-combined.csv')):
id_ = row['uniqueID']
is_locatable = row['geolocatable'] == 'Locatable'
geometry = None
if is_locatable:
(lng, lat) = (float(row['Lng']), float(row['Lat']))
geometry = {
'type': 'Point',
'coordinates': [lng, lat]
}
date = None
if row['datable'] == 'yes':
start = row['date_start']
end = row['date_end']
if start and not end:
date = start
elif end and not start:
# TODO(danvk): this is kind of a lie; https://stackoverflow.com/q/48696238/388951
date = end
elif start and end:
if start == end:
date = start
else:
date = '%s/%s' % (start, end)
else:
raise ValueError('Empty start/end for %s' % id_)
id_to_data[id_].append((date, geometry, row))
features = []
for id_, entries in id_to_data.items():
if len(entries) == 1:
entry = entries[0]
else:
entry = merge_entries(entries)
date, geometry, row = entry
features.append({
'type': 'Feature',
'geometry': geometry,
'id': id_,
'properties': {
'date': date,
'title': row['title'],
'geocoding_notes': row['user_notes'],
'source': row['source']
}
})
print(json.dumps({
'type': 'FeatureCollection',
'features': features
}))
| 29.25 | 98 | 0.534663 |
8e69cb75d86865460995696e13255e837c4cd76f | 3,823 | py | Python | ex3_server.py | bokunimowakaru/ble_sensor | 09e3b8252390c5c0738ca37f13a1efd47eeb298c | [
"MIT"
] | null | null | null | ex3_server.py | bokunimowakaru/ble_sensor | 09e3b8252390c5c0738ca37f13a1efd47eeb298c | [
"MIT"
] | null | null | null | ex3_server.py | bokunimowakaru/ble_sensor | 09e3b8252390c5c0738ca37f13a1efd47eeb298c | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# coding: utf-8
################################################################################
# BLE Sensor ex3_server.py
# 1分間に発見したBLEビーコン数をHTTPサーバでLAN内に配信します。
#
# Copyright (c) 2021 Wataru KUNINO
################################################################################
#【インストール方法】
# bluepy (Bluetooth LE interface for Python)をインストールしてください
# sudo pip3 install bluepy
#
# サンプル・プログラム集をダウンロードして下さい。
# git clone https://bokunimo.net/git/ble_scan
#
#【実行方法】
# 実行するときは sudoを付与してください
# sudo ./ex3_server.py
#
#【参考文献】
# 本プログラムを作成するにあたり下記を参考にしました
# https://ianharvey.github.io/bluepy-doc/scanner.html
interval = 1.01 # 動作間隔(秒)
target_rssi = -80 # 最低受信強度
counter = None # BLEビーコン発見数
from wsgiref.simple_server import make_server # WSGIサーバ
from bluepy import btle # bluepyからbtleを組み込む
from sys import argv # sysから引数取得を組み込む
from getpass import getuser # ユーザ取得を組み込む
from time import time # 時間取得を組み込む
import threading # スレッド管理を組み込む
def wsgi_app(environ, start_response): # HTTPアクセス受信時の処理
res = 'counter = ' + str(counter) + '\r\n' # 応答文を作成
print(res, end='') # 応答文を表示
res = res.encode('utf-8') # バイト列へ変換
start_response('200 OK', [('Content-type', 'text/plain; charset=utf-8')])
return [res] # 応答メッセージを返却
def httpd(port = 80):
htserv = make_server('', port, wsgi_app) # HTTPサーバ実体化
print('HTTP port', port) # ポート番号を表示
htserv.serve_forever() # HTTPサーバを起動
if getuser() != 'root': # 実行したユーザがroot以外
print('使用方法: sudo', argv[0]) # 使用方法の表示
exit() # プログラムの終了
time_prev = time() # 現在の時間を変数に保持
MAC = list() # アドレス保存用の配列変数
scanner = btle.Scanner() # インスタンスscannerを生成
thread = threading.Thread(target=httpd, daemon=True)# スレッドhttpdの実体化
thread.start() # スレッドhttpdの起動
while thread.is_alive: # 永久ループ(httpd動作中)
devices = scanner.scan(interval) # BLEアドバタイジング取得
for dev in devices: # 発見した各デバイスについて
if dev.rssi < target_rssi: # 受信強度が-80より小さい時
continue # forループの先頭に戻る
if dev.addr not in MAC: # アドレスが配列内に無い時
MAC.append(dev.addr) # 配列変数にアドレスを追加
print(len(MAC), 'Devices found') # 発見済みデバイス数を表示
if time_prev + 30 < time(): # 30秒以上経過した時
counter = len(MAC) # 発見済みデバイス数を保持
print(counter, 'Counts/30seconds') # カウンタ値(30秒あたり)表示
MAC = list() # アドレスを廃棄
time_prev = time() # 現在の時間を変数に保持
''' 実行結果の一例
pi@raspberrypi:~ $ cd ~/ble_scan
pi@raspberrypi:~/ble_scan $ sudo ./ex3_server.py
HTTP port 80
1 Devices found
2 Devices found
3 Devices found
3 Counts/30seconds
1 Devices found
192.168.1.5 - - [17/Feb/2021 22:26:12] "GET / HTTP/1.1" 200 14
counter = 3
2 Devices found
--------------------------------------------------------------------------------
pi@raspberrypi:~ $ hostname -I
192.168.1.5 XXXX:XXXX:XXXX:XXXX:XXXX:XXXX:XXXX:XXXX
pi@raspberrypi:~ $ curl 192.168.1.5
counter = 3
pi@raspberrypi:~ $
'''
| 42.010989 | 80 | 0.472404 |
a3cbb789c24ff31287739d9d7edec1028f8e8c38 | 13,852 | py | Python | tests/unit/modules/network_test.py | bogdanr/salt | 4f198525873a1b7da3fbb9994dbb40d381494922 | [
"Apache-2.0"
] | 1 | 2020-10-19T11:49:49.000Z | 2020-10-19T11:49:49.000Z | tests/unit/modules/network_test.py | bogdanr/salt | 4f198525873a1b7da3fbb9994dbb40d381494922 | [
"Apache-2.0"
] | null | null | null | tests/unit/modules/network_test.py | bogdanr/salt | 4f198525873a1b7da3fbb9994dbb40d381494922 | [
"Apache-2.0"
] | 1 | 2020-10-19T11:49:50.000Z | 2020-10-19T11:49:50.000Z | # -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Jayesh Kariya <jayeshk@saltstack.com>`
'''
# Import Python Libs
from __future__ import absolute_import
import socket
import os.path
# Import Salt Testing Libs
from salttesting import TestCase, skipIf
from salttesting.mock import (
mock_open,
MagicMock,
patch,
NO_MOCK,
NO_MOCK_REASON
)
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import Salt Libs
import salt.utils
from salt.modules import network
from salt.exceptions import CommandExecutionError
# Globals
network.__grains__ = {}
network.__salt__ = {}
@skipIf(NO_MOCK, NO_MOCK_REASON)
class NetworkTestCase(TestCase):
'''
Test cases for salt.modules.network
'''
def test_wol_bad_mac(self):
'''
tests network.wol with bad mac
'''
bad_mac = '31337'
self.assertRaises(ValueError, network.wol, bad_mac)
def test_wol_success(self):
'''
tests network.wol success
'''
mac = '080027136977'
bcast = '255.255.255.255 7'
class MockSocket(object):
def __init__(self, *args, **kwargs):
pass
def __call__(self, *args, **kwargs):
pass
def setsockopt(self, *args, **kwargs):
pass
def sendto(self, *args, **kwargs):
pass
with patch('socket.socket', MockSocket):
self.assertTrue(network.wol(mac, bcast))
def test_ping(self):
'''
Test for Performs a ping to a host
'''
with patch.object(salt.utils.network, 'sanitize_host',
return_value='A'):
mock_all = MagicMock(side_effect=[{'retcode': 1}, {'retcode': 0}])
with patch.dict(network.__salt__, {'cmd.run_all': mock_all}):
self.assertFalse(network.ping('host', return_boolean=True))
self.assertTrue(network.ping('host', return_boolean=True))
with patch.dict(network.__salt__, {'cmd.run':
MagicMock(return_value='A')}):
self.assertEqual(network.ping('host'), 'A')
def test_netstat(self):
'''
Test for return information on open ports and states
'''
with patch.dict(network.__grains__, {'kernel': 'Linux'}):
with patch.object(network, '_netstat_linux', return_value='A'):
self.assertEqual(network.netstat(), 'A')
with patch.dict(network.__grains__, {'kernel': 'OpenBSD'}):
with patch.object(network, '_netstat_bsd', return_value='A'):
self.assertEqual(network.netstat(), 'A')
with patch.dict(network.__grains__, {'kernel': 'A'}):
self.assertRaises(CommandExecutionError, network.netstat)
def test_active_tcp(self):
'''
Test for return a dict containing information on all
of the running TCP connections
'''
with patch.object(salt.utils.network, 'active_tcp', return_value='A'):
self.assertEqual(network.active_tcp(), 'A')
def test_traceroute(self):
'''
Test for Performs a traceroute to a 3rd party host
'''
with patch.object(salt.utils, 'which', side_effect=[False, True]):
self.assertListEqual(network.traceroute('host'), [])
with patch.object(salt.utils.network, 'sanitize_host',
return_value='A'):
with patch.dict(network.__salt__, {'cmd.run':
MagicMock(return_value="")}):
self.assertListEqual(network.traceroute('host'), [])
def test_dig(self):
'''
Test for Performs a DNS lookup with dig
'''
with patch.object(salt.utils.network, 'sanitize_host',
return_value='A'):
with patch.dict(network.__salt__, {'cmd.run':
MagicMock(return_value='A')}):
self.assertEqual(network.dig('host'), 'A')
@patch('salt.utils.which', MagicMock(return_value=''))
def test_arp(self):
'''
Test for return the arp table from the minion
'''
with patch.dict(network.__salt__,
{'cmd.run':
MagicMock(return_value='A,B,C,D\nE,F,G,H\n')}):
self.assertDictEqual(network.arp(), {})
def test_interfaces(self):
'''
Test for return a dictionary of information about
all the interfaces on the minion
'''
with patch.object(salt.utils.network, 'interfaces', return_value={}):
self.assertDictEqual(network.interfaces(), {})
def test_hw_addr(self):
'''
Test for return the hardware address (a.k.a. MAC address)
for a given interface
'''
with patch.object(salt.utils.network, 'hw_addr', return_value={}):
self.assertDictEqual(network.hw_addr('iface'), {})
def test_interface(self):
'''
Test for return the inet address for a given interface
'''
with patch.object(salt.utils.network, 'interface', return_value={}):
self.assertDictEqual(network.interface('iface'), {})
def test_interface_ip(self):
'''
Test for return the inet address for a given interface
'''
with patch.object(salt.utils.network, 'interface_ip', return_value={}):
self.assertDictEqual(network.interface_ip('iface'), {})
def test_subnets(self):
'''
Test for returns a list of subnets to which the host belongs
'''
with patch.object(salt.utils.network, 'subnets', return_value={}):
self.assertDictEqual(network.subnets(), {})
def test_in_subnet(self):
'''
Test for returns True if host is within specified
subnet, otherwise False.
'''
with patch.object(salt.utils.network, 'in_subnet', return_value={}):
self.assertDictEqual(network.in_subnet('iface'), {})
def test_ip_addrs(self):
'''
Test for returns a list of IPv4 addresses assigned to the host.
'''
with patch.object(salt.utils.network, 'ip_addrs',
return_value=['0.0.0.0']):
with patch.object(salt.utils.network, 'in_subnet',
return_value=True):
self.assertListEqual(network.ip_addrs('interface',
'include_loopback',
'cidr'), ['0.0.0.0'])
self.assertListEqual(network.ip_addrs('interface',
'include_loopback'),
['0.0.0.0'])
def test_ip_addrs6(self):
'''
Test for returns a list of IPv6 addresses assigned to the host.
'''
with patch.object(salt.utils.network, 'ip_addrs6',
return_value=['A']):
self.assertListEqual(network.ip_addrs6('int', 'include'), ['A'])
def test_get_hostname(self):
'''
Test for Get hostname
'''
with patch.object(network.socket, 'gethostname', return_value='A'):
self.assertEqual(network.get_hostname(), 'A')
def test_mod_hostname(self):
'''
Test for Modify hostname
'''
self.assertFalse(network.mod_hostname(None))
with patch.object(salt.utils, 'which', return_value='hostname'):
with patch.dict(network.__salt__,
{'cmd.run': MagicMock(return_value=None)}):
file_d = '\n'.join(['#', 'A B C D,E,F G H'])
with patch('salt.utils.fopen', mock_open(read_data=file_d),
create=True) as mfi:
mfi.return_value.__iter__.return_value = file_d.splitlines()
with patch.dict(network.__grains__, {'os_family': 'A'}):
self.assertTrue(network.mod_hostname('hostname'))
@patch('socket.socket')
def test_connect(self, mock_socket):
'''
Test for Test connectivity to a host using a particular
port from the minion.
'''
self.assertDictEqual(network.connect(False, 'port'),
{'comment': 'Required argument, host, is missing.',
'result': False})
self.assertDictEqual(network.connect('host', False),
{'comment': 'Required argument, port, is missing.',
'result': False})
ret = 'Unable to connect to host (0) on tcp port port'
mock_socket.side_effect = Exception('foo')
with patch.object(salt.utils.network, 'sanitize_host',
return_value='A'):
with patch.object(socket, 'getaddrinfo',
return_value=[['ipv4', 'A', 6, 'B', '0.0.0.0']]):
self.assertDictEqual(network.connect('host', 'port'),
{'comment': ret, 'result': False})
ret = 'Successfully connected to host (0) on tcp port port'
mock_socket.side_effect = MagicMock()
mock_socket.settimeout().return_value = None
mock_socket.connect().return_value = None
mock_socket.shutdown().return_value = None
with patch.object(salt.utils.network, 'sanitize_host',
return_value='A'):
with patch.object(socket,
'getaddrinfo',
return_value=[['ipv4',
'A', 6, 'B', '0.0.0.0']]):
self.assertDictEqual(network.connect('host', 'port'),
{'comment': ret, 'result': True})
def test_is_private(self):
'''
Test for Check if the given IP address is a private address
'''
with patch.object(salt.ext.ipaddress.IPv4Address, 'is_private',
return_value=True):
self.assertTrue(network.is_private('0.0.0.0'))
with patch.object(salt.ext.ipaddress.IPv6Address, 'is_private',
return_value=True):
self.assertTrue(network.is_private('::1'))
def test_is_loopback(self):
'''
Test for Check if the given IP address is a loopback address
'''
with patch.object(salt.ext.ipaddress.IPv4Address, 'is_loopback',
return_value=True):
self.assertTrue(network.is_loopback('127.0.0.1'))
with patch.object(salt.ext.ipaddress.IPv6Address, 'is_loopback',
return_value=True):
self.assertTrue(network.is_loopback('::1'))
def test_get_bufsize(self):
'''
Test for return network buffer sizes as a dict
'''
with patch.dict(network.__grains__, {'kernel': 'Linux'}):
with patch.object(os.path, 'exists', return_value=True):
with patch.object(network, '_get_bufsize_linux',
return_value={'size': 1}):
self.assertDictEqual(network.get_bufsize('iface'),
{'size': 1})
with patch.dict(network.__grains__, {'kernel': 'A'}):
self.assertDictEqual(network.get_bufsize('iface'), {})
def test_mod_bufsize(self):
'''
Test for Modify network interface buffers (currently linux only)
'''
with patch.dict(network.__grains__, {'kernel': 'Linux'}):
with patch.object(os.path, 'exists', return_value=True):
with patch.object(network, '_mod_bufsize_linux',
return_value={'size': 1}):
self.assertDictEqual(network.mod_bufsize('iface'),
{'size': 1})
with patch.dict(network.__grains__, {'kernel': 'A'}):
self.assertFalse(network.mod_bufsize('iface'))
def test_routes(self):
'''
Test for return currently configured routes from routing table
'''
self.assertRaises(CommandExecutionError, network.routes, 'family')
with patch.dict(network.__grains__, {'kernel': 'A', 'os': 'B'}):
self.assertRaises(CommandExecutionError, network.routes, 'inet')
with patch.dict(network.__grains__, {'kernel': 'Linux'}):
with patch.object(network, '_netstat_route_linux',
side_effect=['A', [{'addr_family': 'inet'}]]):
self.assertEqual(network.routes(None), 'A')
self.assertListEqual(network.routes('inet'),
[{'addr_family': 'inet'}])
def test_default_route(self):
'''
Test for return default route(s) from routing table
'''
self.assertRaises(CommandExecutionError, network.default_route,
'family')
with patch.object(network, 'routes',
side_effect=[[{'addr_family': 'inet'},
{'destination': 'A'}], []]):
with patch.dict(network.__grains__, {'kernel': 'A',
'os': 'B'}):
self.assertRaises(CommandExecutionError,
network.default_route, 'inet')
with patch.dict(network.__grains__, {'kernel': 'Linux'}):
self.assertListEqual(network.default_route('inet'), [])
if __name__ == '__main__':
from integration import run_tests
run_tests(NetworkTestCase, needs_daemon=False)
| 38.477778 | 80 | 0.545625 |
8ed4c3a04fc6bc1966de2c63fe8911dbe83bcc74 | 8,226 | py | Python | code/GSE84433_K-medoids.py | mrshihabullah/federated-predicted-euclidean-distance | 4b7ec03e9a93cdac9d64032728028c2bec46c916 | [
"BSD-3-Clause"
] | 2 | 2022-01-09T18:39:16.000Z | 2022-01-09T18:40:14.000Z | code/GSE84433_K-medoids.py | mrshihabullah/federated-predicted-euclidean-distance | 4b7ec03e9a93cdac9d64032728028c2bec46c916 | [
"BSD-3-Clause"
] | null | null | null | code/GSE84433_K-medoids.py | mrshihabullah/federated-predicted-euclidean-distance | 4b7ec03e9a93cdac9d64032728028c2bec46c916 | [
"BSD-3-Clause"
] | null | null | null | from pathlib import Path
import numpy as np
import pandas as pd
from sklearn.preprocessing import LabelEncoder
from sklearn.metrics.pairwise import euclidean_distances
import participant_utils as pu
import coordinator_utils as cu
from sklearn_extra.cluster import KMedoids
import matplotlib.pyplot as plt
# Read the dataset accordingly
path = Path(__file__).parent / "../dataset/GSE84433_series_matrix.txt"
clustered_dataset = pd.read_csv(path, comment='!', sep="\t", header=0)
clustered_dataset = clustered_dataset.T
clustered_dataset.dropna(inplace=True)
clustered_dataset, clustered_dataset.columns = clustered_dataset[1:] , clustered_dataset.iloc[0]
#Without using sklearn's LabelEncoder()
# true_label = clustered_dataset.iloc[:,0].astype('category').cat.codes
label_encoder = LabelEncoder()
true_label = label_encoder.fit_transform(clustered_dataset.iloc[:,0])
# clustered_dataset = clustered_dataset.drop(columns="Gene_ID")
# Convert the dataset into numpy ndarray for further computation
clustered_dataset = clustered_dataset.to_numpy(dtype='float64')
#For simulating equal distribution
# D1, D2 = np.array_split(clustered_dataset, 2)
#For simulating uneven distribution
np.random.shuffle(clustered_dataset)
D1,D2,D3,D4 = np.array_split(clustered_dataset, 4)
D2=np.concatenate((D2,D3,D4))
#### Participant Based Computation ####
# Each participant generates random spike in points
# which in production environment will be shared to coordinator for creating overall spike array
generated_spikes_D1 = pu.generate_spikes_using_PCA_and_variance(D1)
generated_spikes_D2 = pu.generate_spikes_using_PCA_and_variance(D1)
#### Coordinator Based Computation ####
generated_spikes = np.concatenate((generated_spikes_D1, generated_spikes_D2))
print("Shape of Generated Spikes",generated_spikes.shape)
# pu.plot3dwithspike(width=9, height=6, title= "Clustering with actual labels", datapoints = clustered_dataset, spikes=generated_spikes, myLabel=true_label)
# # rows are s1,s2..sn while columns are datapoints
euc_dist_D1_spikes = euclidean_distances(D1,generated_spikes)
# print("Spike local distance matrix of 1st participant: \n", euc_dist_D1_spikes)
# # rows are s1,s2..sn while columns are datapoints
euc_dist_D2_spikes = euclidean_distances(D2,generated_spikes)
# print("Spike local distance matrix of 2nd participant: \n", euc_dist_D2_spikes)
slope_intercept_D1 = pu.regression_per_client(data= D1,
euc_dist_data_spike= euc_dist_D1_spikes,
regressor="Linear")
slope_intercept_D2 = pu.regression_per_client(data= D2,
euc_dist_data_spike= euc_dist_D2_spikes,
regressor="Huber")
#### Coordinator Based Computation ####
# FOR EVALUATION PURPOSE ONLY
global_true_euc_dist = euclidean_distances(clustered_dataset)
# Transform the dataset and spike points into 2D and 3D for visualization purpose
# pca,clustered_dataset_2d = pu.perform_PCA(3, clustered_dataset)
# variance_percentage = str(float(np.round(pca.explained_variance_ratio_.cumsum()[2]*100, 1)))
# plot_title = "GSE84426 dataset with 3 Principal Components covering " + variance_percentage+"% variance"
clustered_dataset_2d = cu.perform_PCA(2, clustered_dataset)
generated_spikes_2d = cu.perform_PCA(2, generated_spikes)
# generated_spikes_3d = cu.perform_PCA(3, generated_spikes)
# plt.title(plot_title, fontsize='medium', pad=20)
# ax = plt.axes(projection='3d')
# ax.scatter3D(clustered_dataset_2d[:,0] , clustered_dataset_2d[:,1], clustered_dataset_2d[:,2])
# plt.show()
# pca,clustered_dataset_2d = pu.perform_PCA(2, clustered_dataset)
# variance_percentage = str(float(np.round(pca.explained_variance_ratio_.cumsum()[1]*100, 1)))
# plot_title = "GSE84426 dataset with 2 Principal Component covering " + variance_percentage+"% variance"
# clustered_dataset_3d = cu.perform_PCA(3, clustered_dataset)
# generated_spikes_2d = cu.perform_PCA(2, generated_spikes)
# generated_spikes_3d = cu.perform_PCA(3, generated_spikes)
# plt.title(plot_title, fontsize='medium', pad=20)
# plt.scatter(clustered_dataset_2d[:,0] , clustered_dataset_2d[:,1])
# plt.show()
# https://stackoverflow.com/questions/59765712/optics-parallelism
label = KMedoids(n_clusters=32, metric='precomputed',method='pam').fit_predict(global_true_euc_dist)
#Getting unique labels
u_labels_2 = np.unique(label)
pred_label_gtdm = np.array(label).tolist()
plt.figure(figsize=(15,15))
plt.subplots_adjust(bottom=.05, top=.9, left=.05, right=.95)
plt.subplot(325)
plt.title("Clustering with true aggregated distance matrix", fontsize='medium')
for i in u_labels_2:
plt.scatter(clustered_dataset_2d[label == i , 0] , clustered_dataset_2d[label == i , 1] , label = i)
plt.scatter(generated_spikes_2d[:,0] , generated_spikes_2d[:,1] , s = 80, color = 'k')
plt.legend()
plt.savefig("GSE84433_K-medoids_ADM_2d.png")
# pu.plot3dwithspike(width=9, height=6, title= "Clustering with true aggregated distance matrix", datapoints = clustered_dataset_3d, spikes=generated_spikes_3d, myLabel=pred_label_gtdm)
# cu.unsupervised_evaluation_scores(global_true_euc_dist, "Aggregated True Distance Matrix", true_label, pred_label_gtdm, adj_rand=True, adj_mutual_info=True, f1=True, silhouette=False, davies_bouldin=True)
global_fed_euc_dist = cu.calc_fed_euc_dist([euc_dist_D1_spikes, euc_dist_D2_spikes])
label = KMedoids(n_clusters=32, metric='precomputed',method='pam').fit_predict(global_fed_euc_dist)
#Getting unique labels
u_labels_2 = np.unique(label)
pred_label_gfdm = np.array(label).tolist()
plt.figure(figsize=(15,15))
plt.subplots_adjust(bottom=.05, top=.9, left=.05, right=.95)
plt.subplot(325)
plt.title("Clustering with federated distance matrix", fontsize='medium')
for i in u_labels_2:
plt.scatter(clustered_dataset_2d[label == i , 0] , clustered_dataset_2d[label == i , 1] , label = i)
plt.scatter(generated_spikes_2d[:,0] , generated_spikes_2d[:,1] , s = 80, color = 'k')
plt.legend()
plt.savefig("GSE84433_K-medoids_FEDM_2d.png")
# pu.plot3dwithspike(width=9, height=6, title= "Clustering with globally federated distance matrix", datapoints = clustered_dataset_3d, spikes=generated_spikes_3d, myLabel=pred_label_gfdm)
cu.unsupervised_evaluation_scores(global_fed_euc_dist, "Global Federated Distance Matrix", pred_label_gtdm, pred_label_gfdm, adj_rand=True, adj_mutual_info=True, f1=True, silhouette=False, davies_bouldin=True)
MxCx = []
MxCx.append(slope_intercept_D1)
MxCx.append(slope_intercept_D2)
global_Mx, global_Cx = cu.construct_global_Mx_Cx_matrix(MxCx,[euc_dist_D1_spikes.shape[0], euc_dist_D2_spikes.shape[0]])
global_pred_euc_dist = cu.calc_pred_dist_matrix(global_Mx, global_fed_euc_dist, global_Cx)
label = KMedoids(n_clusters=32, metric='precomputed',method='pam').fit_predict(global_pred_euc_dist)
#Getting unique labels
u_labels_2 = np.unique(label)
pred_label_2 = np.array(label).tolist()
plt.figure(figsize=(15,15))
plt.subplots_adjust(bottom=.05, top=.9, left=.05, right=.95)
plt.subplot(325)
plt.title("Clustering with predicted distance matrix", fontsize='medium')
for i in u_labels_2:
plt.scatter(clustered_dataset_2d[label == i , 0] , clustered_dataset_2d[label == i , 1] , label = i)
plt.scatter(generated_spikes_2d[:,0] , generated_spikes_2d[:,1] , s = 80, color = 'k')
plt.legend()
plt.savefig("GSE84433_K-medoids_PEDM_2d.png")
# pu.plot3dwithspike(width=9, height=6, title= "Clustering with globally predicted distance matrix", datapoints = clustered_dataset_3d, spikes=generated_spikes_3d, myLabel=pred_label_2)
cu.unsupervised_evaluation_scores(global_pred_euc_dist, "Global Predicted Distance Matrix", pred_label_gtdm, pred_label_2, adj_rand=True, adj_mutual_info=True, f1=True, silhouette=False, davies_bouldin=True)
cu.plotDistanceMatrix(global_fed_euc_dist, title="Federated Global Distance Matrix")
cu.plotDistanceMatrix(global_true_euc_dist, title="True Global Distance Matrix")
cu.plotDistanceMatrix(global_pred_euc_dist, title="Predicted Global Distance Matrix")
cu.pearson_corr_coeff(global_true_euc_dist, global_fed_euc_dist, global_pred_euc_dist)
cu.spearman_corr_coeff(global_true_euc_dist, global_fed_euc_dist, global_pred_euc_dist) | 51.093168 | 210 | 0.782276 |
bf698ceab92c0643a996065de58cd501336dad32 | 1,139 | py | Python | src/megapy/pin.py | aakash-sahai/megapy | 12425622937debb5323486b703361cf77986a478 | [
"MIT"
] | null | null | null | src/megapy/pin.py | aakash-sahai/megapy | 12425622937debb5323486b703361cf77986a478 | [
"MIT"
] | null | null | null | src/megapy/pin.py | aakash-sahai/megapy | 12425622937debb5323486b703361cf77986a478 | [
"MIT"
] | null | null | null | from arduino import ArduinoConnection, ArduinoObject
class DigitalPin(ArduinoObject):
def __init__(self, conn, pin, mode='input'):
ArduinoObject.__init__(self, conn, 'dp' + str(pin), 'pin digital')
self._pin = pin
self._mode = mode
super(DigitalPin, self).create("{} {}".format(mode, pin))
def _get_value(self):
return int(super(DigitalPin, self).get("value"))
def _set_value(self, value):
return super(DigitalPin, self).set("value", value)
def _get_pin(self):
return self._pin
def _get_mode(self):
return self._mode
value = property(_get_value, _set_value)
pin = property(_get_pin)
mode = property(_get_mode)
class AnalogPin(ArduinoObject):
def __init__(self, conn, pin):
ArduinoObject.__init__(self, conn, 'ap' + str(pin), 'pin analog')
self._pin = pin
super(AnalogPin, self).create("input {}".format(pin))
def _get_value(self):
return int(super(AnalogPin, self).get("value"))
def _get_pin(self):
return self._pin
value = property(_get_value)
pin = property(_get_pin)
| 27.119048 | 75 | 0.639157 |
aecd7b5b3e5ad0d6ec34ed308ffc2e4f8488ff2f | 5,847 | py | Python | gridnode/main/persistence/model_storage.py | gmuraru/PyGridNode | f820b237e684733aa60d3fa691366c324e843276 | [
"Apache-2.0"
] | null | null | null | gridnode/main/persistence/model_storage.py | gmuraru/PyGridNode | f820b237e684733aa60d3fa691366c324e843276 | [
"Apache-2.0"
] | null | null | null | gridnode/main/persistence/model_storage.py | gmuraru/PyGridNode | f820b237e684733aa60d3fa691366c324e843276 | [
"Apache-2.0"
] | 1 | 2021-07-06T04:32:18.000Z | 2021-07-06T04:32:18.000Z | from typing import List, Dict, Union
from .database import db_instance
from .model_cache import ModelCache
from ..codes import MODEL
from syft.serde import serialize, deserialize
import hashlib
class ModelStorage:
""" Manage all models hosted by an specific worker. """
def __init__(self, worker):
self.worker = worker
self.cache = ModelCache()
@property
def id(self) -> str:
""" Returns worker's id."""
return self.worker.id
@property
def models(self) -> List:
""" Returns a list of model ids hosted by this storage instance.
If persistence mode isn't enabled, it will return models stored in cache memory.
Returns:
model_ids : List of model ids hosted by this storage instance.
"""
# If persistence mode enabled
if db_instance():
key = self._generate_hash_key()
size = db_instance().llen(key)
model_ids = db_instance().lrange(key, 0, size)
model_ids = [id.decode("utf-8") for id in model_ids]
return model_ids
return self.cache.models
def save_model(
self,
serialized_model: bytes,
model_id: str,
allow_download: bool,
allow_remote_inference: bool,
mpc: bool,
):
""" Save the desired model at database and load it in cache memory.
Args:
serialized_model: Model serialized.
model_id: Model's ID.
allow_download: Flag to enable/disable download.
allow_remote_inference: Flag to enable/disable remote inference.
mpc: Flag used to identify if it is an encrypted model.
"""
# If persistence mode enabled
if db_instance():
key = self._generate_hash_key(model_id)
model = {
MODEL.MODEL: serialized_model,
MODEL.ALLOW_DOWNLOAD: int(allow_download),
MODEL.ALLOW_REMOTE_INFERENCE: int(allow_remote_inference),
MODEL.MPC: int(mpc),
}
# Save serialized model into db
# Format: { hash(worker_id + model_id) : dict( serialized_model, allow_download, allow_inference, mpc) }
result = db_instance().hmset(key, model)
primary_key = self._generate_hash_key()
# Save model id
db_instance().lpush(primary_key, model_id)
self.cache.save(
serialized_model,
model_id,
allow_download,
allow_remote_inference,
mpc,
serialized=True,
)
def get(self, model_id: str) -> Union[Dict, None]:
""" Retrieves model from cache/database by model_id.
If persistent mode isn't enable, this model will be searched only in the cache memory.
Args:
model_id: Model's ID.
Returns:
result : Dict Model properties or None it not found.
"""
if self.cache.contains(model_id):
return self.cache.get(model_id)
# If persistence mode enabled
if db_instance():
key = self._generate_hash_key(model_id)
raw_data = db_instance().hgetall(key)
# Decode binary keys
raw_data = {key.decode("utf-8"): value for key, value in raw_data.items()}
# Decode binary values
raw_data[MODEL.ALLOW_DOWNLOAD] = bool(
int(raw_data[MODEL.ALLOW_DOWNLOAD].decode("utf-8"))
)
raw_data[MODEL.ALLOW_REMOTE_INFERENCE] = bool(
int(raw_data[MODEL.ALLOW_REMOTE_INFERENCE].decode("utf-8"))
)
raw_data[MODEL.MPC] = bool(int(raw_data[MODEL.MPC].decode("utf-8")))
# Save model in cache
self.cache.save(
raw_data[MODEL.MODEL],
model_id,
raw_data[MODEL.ALLOW_DOWNLOAD],
raw_data[MODEL.ALLOW_REMOTE_INFERENCE],
raw_data[MODEL.MPC],
True,
)
return self.cache.get(model_id)
else:
return None
def remove(self, model_id: str) -> bool:
""" Remove the specific model from cache/database.
Args:
model_id: Model's ID
Returns:
result: True if it was removed, otherwise returns False.
"""
# Remove model from cache
self.cache.remove(model_id)
if db_instance():
# Remove model ID from id's list
ids_list_key = self._generate_hash_key()
db_instance().lrem(ids_list_key, 0, model_id)
# Remove model from database
key = self._generate_hash_key(model_id)
return db_instance().delete(key)
else:
return True
def contains(self, model_id: str) -> bool:
""" Verify if this storage instance contains the desired model.
Args:
model_id: Model's ID.
Returns:
result: True if contains, otherwise returns False.
"""
key = self._generate_hash_key(model_id)
if not db_instance():
return self.cache.contains(model_id)
else:
return self.cache.contains(model_id) or bool(db_instance().hgetall(key))
def _generate_hash_key(self, primary_key: str = "") -> str:
""" To improve performance our queries will be made by hashkeys generated by
the aggregation between storage's id and primary key.
Args:
primary_key: Key/ID used to map an object.
Returns:
hashcode: Generated hashcode.
"""
return hashlib.sha256(bytes(self.id + primary_key, "utf-8")).hexdigest()
| 34.597633 | 116 | 0.571404 |
67aca4ea26d8b86d58f6ccdcf6f510e70e65c65a | 5,599 | py | Python | evaluation_service_pb2.py | lerrytang/es_on_gke | fd498f12db73a22726b3e70362bdcbc41e75b46a | [
"Apache-2.0"
] | 38 | 2019-06-18T00:13:18.000Z | 2020-10-08T18:30:23.000Z | evaluation_service_pb2.py | BorjaEst/es_on_gke | fd498f12db73a22726b3e70362bdcbc41e75b46a | [
"Apache-2.0"
] | null | null | null | evaluation_service_pb2.py | BorjaEst/es_on_gke | fd498f12db73a22726b3e70362bdcbc41e75b46a | [
"Apache-2.0"
] | 7 | 2019-06-18T08:09:41.000Z | 2021-03-08T18:10:34.000Z | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: evaluation_service.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='evaluation_service.proto',
package='es_on_gke',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\x18\x65valuation_service.proto\x12\tes_on_gke\"c\n\x0eRolloutRequest\x12\x15\n\rrollout_index\x18\x01 \x01(\x05\x12\x10\n\x08\x65nv_seed\x18\x02 \x01(\x05\x12\x16\n\x0epolicy_weights\x18\x03 \x03(\x01\x12\x10\n\x08\x65valuate\x18\x04 \x01(\x08\"@\n\x0fRolloutResponse\x12\x15\n\rrollout_index\x18\x01 \x01(\x05\x12\x16\n\x0erollout_reward\x18\x02 \x01(\x01\x32Z\n\x07Rollout\x12O\n\x14RolloutWithParameter\x12\x19.es_on_gke.RolloutRequest\x1a\x1a.es_on_gke.RolloutResponse\"\x00\x62\x06proto3')
)
_ROLLOUTREQUEST = _descriptor.Descriptor(
name='RolloutRequest',
full_name='es_on_gke.RolloutRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='rollout_index', full_name='es_on_gke.RolloutRequest.rollout_index', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='env_seed', full_name='es_on_gke.RolloutRequest.env_seed', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='policy_weights', full_name='es_on_gke.RolloutRequest.policy_weights', index=2,
number=3, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='evaluate', full_name='es_on_gke.RolloutRequest.evaluate', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=39,
serialized_end=138,
)
_ROLLOUTRESPONSE = _descriptor.Descriptor(
name='RolloutResponse',
full_name='es_on_gke.RolloutResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='rollout_index', full_name='es_on_gke.RolloutResponse.rollout_index', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='rollout_reward', full_name='es_on_gke.RolloutResponse.rollout_reward', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=140,
serialized_end=204,
)
DESCRIPTOR.message_types_by_name['RolloutRequest'] = _ROLLOUTREQUEST
DESCRIPTOR.message_types_by_name['RolloutResponse'] = _ROLLOUTRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
RolloutRequest = _reflection.GeneratedProtocolMessageType('RolloutRequest', (_message.Message,), dict(
DESCRIPTOR = _ROLLOUTREQUEST,
__module__ = 'evaluation_service_pb2'
# @@protoc_insertion_point(class_scope:es_on_gke.RolloutRequest)
))
_sym_db.RegisterMessage(RolloutRequest)
RolloutResponse = _reflection.GeneratedProtocolMessageType('RolloutResponse', (_message.Message,), dict(
DESCRIPTOR = _ROLLOUTRESPONSE,
__module__ = 'evaluation_service_pb2'
# @@protoc_insertion_point(class_scope:es_on_gke.RolloutResponse)
))
_sym_db.RegisterMessage(RolloutResponse)
_ROLLOUT = _descriptor.ServiceDescriptor(
name='Rollout',
full_name='es_on_gke.Rollout',
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=206,
serialized_end=296,
methods=[
_descriptor.MethodDescriptor(
name='RolloutWithParameter',
full_name='es_on_gke.Rollout.RolloutWithParameter',
index=0,
containing_service=None,
input_type=_ROLLOUTREQUEST,
output_type=_ROLLOUTRESPONSE,
serialized_options=None,
),
])
_sym_db.RegisterServiceDescriptor(_ROLLOUT)
DESCRIPTOR.services_by_name['Rollout'] = _ROLLOUT
# @@protoc_insertion_point(module_scope)
| 34.561728 | 517 | 0.757992 |
5df4d9fbecbb2cc92e23f82cc300df8454298060 | 652 | py | Python | setup.py | covalentcareers/wagtail-polls | 70c7ab57e5560feee603c9b5f2236f0436ac226c | [
"MIT"
] | null | null | null | setup.py | covalentcareers/wagtail-polls | 70c7ab57e5560feee603c9b5f2236f0436ac226c | [
"MIT"
] | null | null | null | setup.py | covalentcareers/wagtail-polls | 70c7ab57e5560feee603c9b5f2236f0436ac226c | [
"MIT"
] | null | null | null | import os
from setuptools import setup, find_packages
import polls
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setup(
name='wagtail-rest-polls',
version=polls.__version__,
description='A simple polls app for wagtail in django',
long_description=long_description,
long_description_content_type="text/markdown",
license='MIT License',
author='jkearney126',
author_email='josh.kearney@covalentcareers.com',
url='https://github.com/covalentcareers/wagtail-polls',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Django',
],
) | 26.08 | 59 | 0.710123 |
6f47c87a669aff866bcda779598feea0ed4b57e1 | 731 | py | Python | variation/translators/genomic_substitution.py | cancervariants/variant-normalization | e89a9f8366a659c82b2042aeb7effe339851bfb4 | [
"MIT"
] | 1 | 2022-01-19T18:17:49.000Z | 2022-01-19T18:17:49.000Z | variation/translators/genomic_substitution.py | cancervariants/variation-normalization | 9c8fbab1562591ae9445d82ddd15df29f1ea1f5a | [
"MIT"
] | 99 | 2021-06-07T12:50:34.000Z | 2022-03-23T13:38:29.000Z | variation/translators/genomic_substitution.py | cancervariants/variant-normalization | e89a9f8366a659c82b2042aeb7effe339851bfb4 | [
"MIT"
] | null | null | null | """Module for Genomic Substitution Translation."""
from variation.translators.translator import Translator
from variation.schemas.classification_response_schema import ClassificationType
from variation.schemas.token_response_schema import GenomicSubstitutionToken
class GenomicSubstitution(Translator):
"""The Genomic Substitution Translator class."""
def can_translate(self, type: ClassificationType) -> bool:
"""Return if classification type is Genomic Substitution."""
return type == ClassificationType.GENOMIC_SUBSTITUTION
def is_token_instance(self, token):
"""Return if the token is an Genomic Substitution token instance."""
return isinstance(token, GenomicSubstitutionToken)
| 43 | 79 | 0.78249 |
2bea676b32223bbb1395381005b33a4ec835d906 | 251 | py | Python | myapp/main/__init__.py | menghao2015/MyBlog | 8cc7b74295bfbdfc0f825665c915bf36bbf772fe | [
"MIT"
] | null | null | null | myapp/main/__init__.py | menghao2015/MyBlog | 8cc7b74295bfbdfc0f825665c915bf36bbf772fe | [
"MIT"
] | null | null | null | myapp/main/__init__.py | menghao2015/MyBlog | 8cc7b74295bfbdfc0f825665c915bf36bbf772fe | [
"MIT"
] | null | null | null | from flask import Blueprint
main = Blueprint('main', __name__)
from . import views, errors
from ..models import Permission
#define variable in all template
@main.app_context_processor
def injuect_permissions():
return dict(Permission=Permission)
| 19.307692 | 35 | 0.796813 |
5270a13f4462fc48eadd31c88bbe272ddbda57d5 | 3,739 | py | Python | datadog_checks_downloader/datadog_checks/downloader/cli.py | jfmyers9/integrations-core | 8793c784f1d5b2c9541b2dd4214dd91584793ced | [
"BSD-3-Clause"
] | null | null | null | datadog_checks_downloader/datadog_checks/downloader/cli.py | jfmyers9/integrations-core | 8793c784f1d5b2c9541b2dd4214dd91584793ced | [
"BSD-3-Clause"
] | null | null | null | datadog_checks_downloader/datadog_checks/downloader/cli.py | jfmyers9/integrations-core | 8793c784f1d5b2c9541b2dd4214dd91584793ced | [
"BSD-3-Clause"
] | null | null | null | # (C) Datadog, Inc. 2019-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
# 1st party.
import argparse
import os
import re
import sys
# 2nd party.
from .download import DEFAULT_ROOT_LAYOUT_TYPE, REPOSITORY_URL_PREFIX, ROOT_LAYOUTS, TUFDownloader
from .exceptions import NonCanonicalVersion, NonDatadogPackage
# Private module functions.
def __is_canonical(version):
'''
https://www.python.org/dev/peps/pep-0440/#appendix-b-parsing-version-strings-with-regular-expressions
'''
P = r'^([1-9]\d*!)?(0|[1-9]\d*)(\.(0|[1-9]\d*))*((a|b|rc)(0|[1-9]\d*))?(\.post(0|[1-9]\d*))?(\.dev(0|[1-9]\d*))?$'
return re.match(P, version) is not None
def __find_shipped_integrations():
# Recurse up from site-packages until we find the Agent root directory.
# The relative path differs between operating systems.
root = os.path.dirname(os.path.abspath(__file__))
filename = 'requirements-agent-release.txt'
integrations = set()
while True:
file_path = os.path.join(root, filename)
if os.path.isfile(file_path):
break
new_root = os.path.dirname(root)
if new_root == root:
return integrations
root = new_root
with open(file_path, 'rb') as f:
contents = f.read().decode('utf-8')
for line in contents.splitlines():
integration, separator, _ = line.strip().partition('==')
if separator:
integrations.add(integration)
return integrations
# Public module functions.
def download():
parser = argparse.ArgumentParser()
parser.add_argument(
'standard_distribution_name', type=str, help='Standard distribution name of the desired Datadog check.'
)
parser.add_argument(
'--repository', type=str, default=REPOSITORY_URL_PREFIX, help='The complete URL prefix for the TUF repository.'
)
parser.add_argument('--version', type=str, default=None, help='The version number of the desired Datadog check.')
parser.add_argument(
'--type',
type=str,
default=DEFAULT_ROOT_LAYOUT_TYPE,
choices=list(ROOT_LAYOUTS),
help='The type of integration.',
)
parser.add_argument(
'--force', action='store_true', help='Force download even if the type of integration may be incorrect.'
)
parser.add_argument(
'-v', '--verbose', action='count', default=0, help='Show verbose information about TUF and in-toto.'
)
args = parser.parse_args()
repository_url_prefix = args.repository
standard_distribution_name = args.standard_distribution_name
version = args.version
root_layout_type = args.type
force = args.force
verbose = args.verbose
if not standard_distribution_name.startswith('datadog-'):
raise NonDatadogPackage(standard_distribution_name)
if version and not __is_canonical(version):
raise NonCanonicalVersion(version)
if root_layout_type != 'core':
shipped_integrations = __find_shipped_integrations()
if standard_distribution_name in shipped_integrations:
sys.stderr.write(
'{}: {} is a known core integration'.format('WARNING' if force else 'ERROR', standard_distribution_name)
)
sys.stderr.flush()
if not force:
sys.exit(1)
tuf_downloader = TUFDownloader(
repository_url_prefix=repository_url_prefix, root_layout_type=root_layout_type, verbose=verbose
)
wheel_relpath = tuf_downloader.get_wheel_relpath(standard_distribution_name, version=version)
wheel_abspath = tuf_downloader.download(wheel_relpath)
print(wheel_abspath) # pylint: disable=print-statement
| 30.900826 | 120 | 0.677186 |
7c9e0689f9cf7cbfb2758a84620823459083a93f | 21,792 | py | Python | src/scripts/utility_source_generator.py | TrevorDev/OpenXR-SDK | 5ca8a38cb7a65fc0ffcea88f565f27465d3b5be8 | [
"Apache-2.0"
] | null | null | null | src/scripts/utility_source_generator.py | TrevorDev/OpenXR-SDK | 5ca8a38cb7a65fc0ffcea88f565f27465d3b5be8 | [
"Apache-2.0"
] | null | null | null | src/scripts/utility_source_generator.py | TrevorDev/OpenXR-SDK | 5ca8a38cb7a65fc0ffcea88f565f27465d3b5be8 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python3 -i
#
# Copyright (c) 2017-2019 The Khronos Group Inc.
# Copyright (c) 2017-2019 Valve Corporation
# Copyright (c) 2017-2019 LunarG, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author(s): Mark Young <marky@lunarg.com>
#
# Purpose: This file utilizes the content formatted in the
# automatic_source_generator.py class to produce the
# generated source code for the loader.
import os
import re
import sys
from automatic_source_generator import *
from collections import namedtuple
# The following commands should only exist in the loader, and only as a trampoline
# (i.e. Don't add it to the dispatch table)
NO_TRAMPOLINE_OR_TERMINATOR = [
'xrEnumerateApiLayerProperties',
'xrEnumerateInstanceExtensionProperties',
]
# UtilitySourceGeneratorOptions - subclass of AutomaticSourceGeneratorOptions.
class UtilitySourceGeneratorOptions(AutomaticSourceGeneratorOptions):
def __init__(self,
filename=None,
directory='.',
apiname=None,
profile=None,
versions='.*',
emitversions='.*',
defaultExtensions=None,
addExtensions=None,
removeExtensions=None,
emitExtensions=None,
sortProcedure=regSortFeatures,
prefixText="",
genFuncPointers=True,
protectFile=True,
protectFeature=True,
protectProto=None,
protectProtoStr=None,
apicall='',
apientry='',
apientryp='',
indentFuncProto=True,
indentFuncPointer=False,
alignFuncParam=0,
genEnumBeginEndRange=False):
AutomaticSourceGeneratorOptions.__init__(self, filename, directory, apiname, profile,
versions, emitversions, defaultExtensions,
addExtensions, removeExtensions,
emitExtensions, sortProcedure)
# Instead of using prefixText, we write our own
self.prefixText = None
self.genFuncPointers = genFuncPointers
self.protectFile = protectFile
self.protectFeature = protectFeature
self.protectProto = protectProto
self.protectProtoStr = protectProtoStr
self.apicall = apicall
self.apientry = apientry
self.apientryp = apientryp
self.indentFuncProto = indentFuncProto
self.indentFuncPointer = indentFuncPointer
self.alignFuncParam = alignFuncParam
self.genEnumBeginEndRange = genEnumBeginEndRange
# UtilitySourceOutputGenerator - subclass of AutomaticSourceOutputGenerator.
class UtilitySourceOutputGenerator(AutomaticSourceOutputGenerator):
"""Generate loader source using XML element attributes from registry"""
def __init__(self,
errFile=sys.stderr,
warnFile=sys.stderr,
diagFile=sys.stdout):
AutomaticSourceOutputGenerator.__init__(
self, errFile, warnFile, diagFile)
# Override the base class header warning so the comment indicates this file.
# self the UtilitySourceOutputGenerator object
def outputGeneratedHeaderWarning(self):
generated_warning = '// *********** THIS FILE IS GENERATED - DO NOT EDIT ***********\n'
generated_warning += '// See utility_source_generator.py for modifications\n'
generated_warning += '// ************************************************************\n'
write(generated_warning, file=self.outFile)
# Call the base class to properly begin the file, and then add
# the file-specific header information.
# self the UtilitySourceOutputGenerator object
# gen_opts the UtilitySourceGeneratorOptions object
def beginFile(self, genOpts):
AutomaticSourceOutputGenerator.beginFile(self, genOpts)
preamble = ''
if self.genOpts.filename == 'xr_generated_dispatch_table.h':
preamble += '#pragma once\n'
elif self.genOpts.filename == 'xr_generated_dispatch_table.c':
preamble += '#include "xr_dependencies.h"\n'
preamble += '#include <openxr/openxr.h>\n'
preamble += '#include <openxr/openxr_platform.h>\n\n'
preamble += '#include "xr_generated_dispatch_table.h"\n'
elif self.genOpts.filename == 'xr_generated_utilities.h':
preamble += '#ifndef XR_GENERATED_UTILITIES_HEADER_FILE\n'
preamble += '#define XR_GENERATED_UTILITIES_HEADER_FILE\n\n'
elif self.genOpts.filename == 'xr_generated_utilities.c':
preamble += '#ifdef _WIN32\n'
preamble += '// Disable Windows warning about using strncpy_s instead of strncpy\n'
preamble += '#define _CRT_SECURE_NO_WARNINGS 1\n'
preamble += '#endif // _WIN32\n\n'
preamble += '#include <stdio.h>\n'
preamble += '#include <string.h>\n\n'
preamble += '#include "xr_dependencies.h"\n'
preamble += '#include <openxr/openxr.h>\n'
preamble += '#include <openxr/openxr_platform.h>\n\n'
preamble += '#include "xr_generated_utilities.h"\n\n'
write(preamble, file=self.outFile)
# Write out all the information for the appropriate file,
# and then call down to the base class to wrap everything up.
# self the UtiliitySourceOutputGenerator object
def endFile(self):
file_data = ''
if self.genOpts.filename == 'xr_generated_dispatch_table.h':
file_data += '#ifdef __cplusplus\n'
file_data += 'extern "C" { \n'
file_data += '#endif\n'
file_data += self.outputDispatchTable()
file_data += self.outputDispatchPrototypes()
file_data += '\n'
file_data += '#ifdef __cplusplus\n'
file_data += '} // extern "C"\n'
file_data += '#endif\n'
elif self.genOpts.filename == 'xr_generated_dispatch_table.c':
file_data += '#ifdef __cplusplus\n'
file_data += 'extern "C" { \n'
file_data += '#endif\n'
file_data += self.outputDispatchTableHelper()
file_data += '#ifdef __cplusplus\n'
file_data += '} // extern "C"\n'
file_data += '#endif\n'
elif self.genOpts.filename == 'xr_generated_utilities.h':
file_data += '#ifdef __cplusplus\n'
file_data += 'extern "C" { \n'
file_data += '#endif\n'
file_data += self.outputUtilityPrototypes()
file_data += '#ifdef __cplusplus\n'
file_data += '} // extern "C"\n'
file_data += '#endif\n'
file_data += self.outputUtilityVersionDefine()
file_data += '#endif // XR_GENERATED_UTILITIES_HEADER_FILE\n'
elif self.genOpts.filename == 'xr_generated_utilities.c':
file_data += '#ifdef __cplusplus\n'
file_data += 'extern "C" { \n'
file_data += '#endif\n'
file_data += self.outputUtilityFuncs()
file_data += '#ifdef __cplusplus\n'
file_data += '} // extern "C"\n'
file_data += '#endif\n'
write(file_data, file=self.outFile)
# Finish processing in superclass
AutomaticSourceOutputGenerator.endFile(self)
# Create a prototype for each of the utility objects.
# self the UtiliitySourceOutputGenerator object
def outputUtilityPrototypes(self):
utility_prototypes = '\n'
utility_prototypes += 'XrResult GeneratedXrUtilitiesResultToString(XrResult result,\n'
utility_prototypes += ' char buffer[XR_MAX_RESULT_STRING_SIZE]);\n'
utility_prototypes += 'XrResult GeneratedXrUtilitiesStructureTypeToString(XrStructureType struct_type,\n'
utility_prototypes += ' char buffer[XR_MAX_STRUCTURE_NAME_SIZE]);\n'
return utility_prototypes
# Generate a variable the loader can use to indicate what API version it is.
# self the UtiliitySourceOutputGenerator object
def outputUtilityVersionDefine(self):
cur_loader_version = '\n// Current API version of the utililties\n#define XR_UTILITIES_API_VERSION '
cur_loader_version += self.api_version_define
cur_loader_version += '\n'
return cur_loader_version
# A special-case handling of the "xrResultToString" command. Since we can actually
# do the work in the loader, write the command to convert from a result to the
# appropriate string. We need the command information from automatic_source_generator
# so we can use the correct names for each parameter when writing the output.
# self the UtiliitySourceOutputGenerator object
def outputResultToString(self):
result_to_str = ''
count = 0
result_to_str += 'XrResult GeneratedXrUtilitiesResultToString(XrResult result,\n'
result_to_str += ' char buffer[XR_MAX_RESULT_STRING_SIZE]) {\n'
indent = 1
result_to_str += self.writeIndent(indent)
result_to_str += 'XrResult int_result = XR_SUCCESS;\n'
result_to_str += self.writeIndent(indent)
result_to_str += 'switch (result) {\n'
indent = indent + 1
for enum_tuple in self.api_enums:
if enum_tuple.name == 'XrResult':
if enum_tuple.protect_value:
result_to_str += '#if %s\n' % enum_tuple.protect_string
for cur_value in enum_tuple.values:
if cur_value.protect_value and enum_tuple.protect_value != cur_value.protect_value:
result_to_str += '#if %s\n' % cur_value.protect_string
result_to_str += self.writeIndent(indent)
result_to_str += 'case %s:\n' % cur_value.name
indent = indent + 1
result_to_str += self.writeIndent(indent)
result_to_str += 'strncpy(buffer, "%s", XR_MAX_RESULT_STRING_SIZE);\n' % cur_value.name
result_to_str += self.writeIndent(indent)
result_to_str += 'break;\n'
indent = indent - 1
if cur_value.protect_value and enum_tuple.protect_value != cur_value.protect_value:
result_to_str += '#endif // %s\n' % cur_value.protect_string
count = count + 1
if enum_tuple.protect_value:
result_to_str += '#endif // %s\n' % enum_tuple.protect_string
break
result_to_str += self.writeIndent(indent)
result_to_str += 'default:\n'
result_to_str += self.writeIndent(indent + 1)
result_to_str += '// Unknown result type\n'
result_to_str += self.writeIndent(indent + 1)
result_to_str += 'if (XR_SUCCEEDED(result)) {\n'
result_to_str += self.writeIndent(indent + 2)
result_to_str += 'snprintf(buffer, XR_MAX_RESULT_STRING_SIZE, "XR_UNKNOWN_SUCCESS_%d", result);\n'
result_to_str += self.writeIndent(indent + 1)
result_to_str += '} else {\n'
result_to_str += self.writeIndent(indent + 2)
result_to_str += 'snprintf(buffer, XR_MAX_RESULT_STRING_SIZE, "XR_UNKNOWN_FAILURE_%d", result);\n'
result_to_str += self.writeIndent(indent + 1)
result_to_str += '}\n'
result_to_str += self.writeIndent(indent + 1)
result_to_str += 'int_result = XR_ERROR_VALIDATION_FAILURE;\n'
result_to_str += self.writeIndent(indent + 1)
result_to_str += 'break;\n'
indent = indent - 1
result_to_str += self.writeIndent(indent)
result_to_str += '}\n'
result_to_str += self.writeIndent(indent)
result_to_str += 'return int_result;\n'
result_to_str += '}\n\n'
return result_to_str
# A special-case handling of the "StructureTypeToString" command. Since we can actually
# do the work in the loader, write the command to convert from a structure type to the
# appropriate string. We need the command information from automatic_source_generator
# so we can use the correct names for each parameter when writing the output.
# self the UtiliitySourceOutputGenerator object
def outputStructTypeToString(self):
struct_to_str = ''
count = 0
struct_to_str = 'XrResult GeneratedXrUtilitiesStructureTypeToString(XrStructureType struct_type,\n'
struct_to_str += ' char buffer[XR_MAX_STRUCTURE_NAME_SIZE]) {\n'
indent = 1
struct_to_str += self.writeIndent(indent)
struct_to_str += 'XrResult int_result = XR_SUCCESS;\n'
struct_to_str += self.writeIndent(indent)
struct_to_str += 'switch (struct_type) {\n'
indent = indent + 1
for enum_tuple in self.api_enums:
if enum_tuple.name == 'XrStructureType':
if enum_tuple.protect_value:
struct_to_str += '#if %s\n' % enum_tuple.protect_string
for cur_value in enum_tuple.values:
if cur_value.protect_value and enum_tuple.protect_value != cur_value.protect_value:
struct_to_str += '#if %s\n' % cur_value.protect_string
struct_to_str += self.writeIndent(indent)
struct_to_str += 'case %s:\n' % cur_value.name
indent = indent + 1
struct_to_str += self.writeIndent(indent)
struct_to_str += 'strncpy(buffer, "%s", XR_MAX_STRUCTURE_NAME_SIZE);\n' % cur_value.name
struct_to_str += self.writeIndent(indent)
struct_to_str += 'break;\n'
indent = indent - 1
if cur_value.protect_value and enum_tuple.protect_value != cur_value.protect_value:
struct_to_str += '#endif // %s\n' % cur_value.protect_string
count = count + 1
if enum_tuple.protect_value:
struct_to_str += '#endif // %s\n' % enum_tuple.protect_string
break
struct_to_str += self.writeIndent(indent)
struct_to_str += 'default:\n'
struct_to_str += self.writeIndent(indent + 1)
struct_to_str += '// Unknown structure type\n'
struct_to_str += self.writeIndent(indent + 1)
struct_to_str += 'snprintf(buffer, XR_MAX_STRUCTURE_NAME_SIZE, "XR_UNKNOWN_STRUCTURE_TYPE_%d", struct_type);\n'
struct_to_str += self.writeIndent(indent + 1)
struct_to_str += 'int_result = XR_ERROR_VALIDATION_FAILURE;\n'
struct_to_str += self.writeIndent(indent + 1)
struct_to_str += 'break;\n'
indent = indent - 1
struct_to_str += self.writeIndent(indent)
struct_to_str += '}\n'
struct_to_str += self.writeIndent(indent)
struct_to_str += 'return int_result;\n'
struct_to_str += '}\n\n'
return struct_to_str
# Output utility generated functions.
# self the UtiliitySourceOutputGenerator object
def outputUtilityFuncs(self):
utility_funcs = '\n'
utility_funcs += self.outputResultToString()
utility_funcs += self.outputStructTypeToString()
return utility_funcs
# Write out a prototype for a C-style command to populate a Dispatch table
# self the ApiDumpOutputGenerator object
def outputDispatchPrototypes(self):
table_helper = '\n'
table_helper += '// Prototype for dispatch table helper function\n'
table_helper += 'void GeneratedXrPopulateDispatchTable(struct XrGeneratedDispatchTable *table,\n'
table_helper += ' XrInstance instance,\n'
table_helper += ' PFN_xrGetInstanceProcAddr get_inst_proc_addr);\n'
return table_helper
# Write out a C-style structure used to store the Dispatch table information
# self the ApiDumpOutputGenerator object
def outputDispatchTable(self):
commands = []
table = ''
cur_extension_name = ''
table += '// Generated dispatch table\n'
table += 'struct XrGeneratedDispatchTable {\n'
# Loop through both core commands, and extension commands
# Outputting the core commands first, and then the extension commands.
for x in range(0, 2):
if x == 0:
commands = self.core_commands
else:
commands = self.ext_commands
for cur_cmd in commands:
# If we've switched to a new "feature" print out a comment on what it is. Usually,
# this is a group of core commands or a group of commands in an extension.
if cur_cmd.ext_name != cur_extension_name:
if self.isCoreExtensionName(cur_cmd.ext_name):
table += '\n // ---- Core %s commands\n' % cur_cmd.ext_name[11:].replace(
"_", ".")
else:
table += '\n // ---- %s extension commands\n' % cur_cmd.ext_name
cur_extension_name = cur_cmd.ext_name
# Remove 'xr' from proto name
base_name = cur_cmd.name[2:]
# If a protect statement exists, use it.
if cur_cmd.protect_value:
table += '#if %s\n' % cur_cmd.protect_string
# Write out each command using it's function pointer for each command
table += ' PFN_%s %s;\n' % (cur_cmd.name, base_name)
# If a protect statement exists, wrap it up.
if cur_cmd.protect_value:
table += '#endif // %s\n' % cur_cmd.protect_string
table += '};\n\n'
return table
# Write out the helper function that will populate a dispatch table using
# an instance handle and a corresponding xrGetInstanceProcAddr command.
# self the ApiDumpOutputGenerator object
def outputDispatchTableHelper(self):
commands = []
table_helper = ''
cur_extension_name = ''
table_helper += '// Helper function to populate an instance dispatch table\n'
table_helper += 'void GeneratedXrPopulateDispatchTable(struct XrGeneratedDispatchTable *table,\n'
table_helper += ' XrInstance instance,\n'
table_helper += ' PFN_xrGetInstanceProcAddr get_inst_proc_addr) {\n'
# Loop through both core commands, and extension commands
# Outputting the core commands first, and then the extension commands.
for x in range(0, 2):
if x == 0:
commands = self.core_commands
else:
commands = self.ext_commands
for cur_cmd in commands:
# If the command is only manually implemented in the loader,
# it is not needed anywhere else, so skip it.
if cur_cmd.name in NO_TRAMPOLINE_OR_TERMINATOR:
continue
# If we've switched to a new "feature" print out a comment on what it is. Usually,
# this is a group of core commands or a group of commands in an extension.
if cur_cmd.ext_name != cur_extension_name:
if self.isCoreExtensionName(cur_cmd.ext_name):
table_helper += '\n // ---- Core %s commands\n' % cur_cmd.ext_name[11:].replace(
"_", ".")
else:
table_helper += '\n // ---- %s extension commands\n' % cur_cmd.ext_name
cur_extension_name = cur_cmd.ext_name
# Remove 'xr' from proto name
base_name = cur_cmd.name[2:]
if cur_cmd.protect_value:
table_helper += '#if %s\n' % cur_cmd.protect_string
if cur_cmd.name == 'xrGetInstanceProcAddr':
# If the command we're filling in is the xrGetInstanceProcAddr command, use
# the one passed into this helper function.
table_helper += ' table->GetInstanceProcAddr = get_inst_proc_addr;\n'
else:
# Otherwise, fill in the dispatch table with an xrGetInstanceProcAddr call
# to the appropriate command.
table_helper += ' (get_inst_proc_addr(instance, "%s", (PFN_xrVoidFunction*)&table->%s));\n' % (
cur_cmd.name, base_name)
if cur_cmd.protect_value:
table_helper += '#endif // %s\n' % cur_cmd.protect_string
table_helper += '}\n\n'
return table_helper
| 49.191874 | 126 | 0.598109 |
058cd126080b8dcb22d6d4f9d19205851c09eec7 | 6,131 | py | Python | tests/cupy_tests/cuda_tests/test_memory.py | PhysicsTeacher13/CHAINER | 64018f7c6956c8ea42220e2e4bd55f7ff30df097 | [
"BSD-3-Clause"
] | null | null | null | tests/cupy_tests/cuda_tests/test_memory.py | PhysicsTeacher13/CHAINER | 64018f7c6956c8ea42220e2e4bd55f7ff30df097 | [
"BSD-3-Clause"
] | null | null | null | tests/cupy_tests/cuda_tests/test_memory.py | PhysicsTeacher13/CHAINER | 64018f7c6956c8ea42220e2e4bd55f7ff30df097 | [
"BSD-3-Clause"
] | null | null | null | import ctypes
import unittest
import cupy.cuda
from cupy.cuda import memory
from cupy import testing
class MockMemory(memory.Memory):
cur_ptr = 1
def __init__(self, size):
self.ptr = MockMemory.cur_ptr
MockMemory.cur_ptr += size
self.size = size
self.device = None
def __del__(self):
self.ptr = 0
pass
def mock_alloc(size):
mem = MockMemory(size)
return memory.MemoryPointer(mem, 0)
# -----------------------------------------------------------------------------
# Memory pointer
@testing.gpu
class TestMemoryPointer(unittest.TestCase):
def test_int(self):
pval = MockMemory.cur_ptr
memptr = mock_alloc(1)
self.assertEqual(pval, int(memptr))
def test_add(self):
pval = MockMemory.cur_ptr
memptr = mock_alloc(8)
memptr2 = memptr + 4
self.assertIsInstance(memptr2, memory.MemoryPointer)
self.assertEqual(pval + 4, int(memptr2))
memptr3 = 4 + memptr
self.assertIsInstance(memptr3, memory.MemoryPointer)
self.assertEqual(pval + 4, int(memptr3))
memptr += 4
self.assertIsInstance(memptr, memory.MemoryPointer)
self.assertEqual(pval + 4, int(memptr))
def test_sub(self):
pval = MockMemory.cur_ptr
memptr = mock_alloc(8) + 4
memptr2 = memptr - 4
self.assertIsInstance(memptr2, memory.MemoryPointer)
self.assertEqual(pval, int(memptr2))
memptr -= 4
self.assertIsInstance(memptr, memory.MemoryPointer)
self.assertEqual(pval, int(memptr))
def test_copy_to_and_from_host(self):
a_gpu = memory.alloc(4)
a_cpu = ctypes.c_int(100)
a_gpu.copy_from(ctypes.cast(ctypes.byref(a_cpu), ctypes.c_void_p), 4)
b_cpu = ctypes.c_int()
a_gpu.copy_to_host(
ctypes.cast(ctypes.byref(b_cpu), ctypes.c_void_p), 4)
self.assertEqual(b_cpu.value, a_cpu.value)
def test_copy_from_device(self):
a_gpu = memory.alloc(4)
a_cpu = ctypes.c_int(100)
a_gpu.copy_from(ctypes.cast(ctypes.byref(a_cpu), ctypes.c_void_p), 4)
b_gpu = memory.alloc(4)
b_gpu.copy_from(a_gpu, 4)
b_cpu = ctypes.c_int()
b_gpu.copy_to_host(
ctypes.cast(ctypes.byref(b_cpu), ctypes.c_void_p), 4)
self.assertEqual(b_cpu.value, a_cpu.value)
def test_memset(self):
a_gpu = memory.alloc(4)
a_gpu.memset(1, 4)
a_cpu = ctypes.c_ubyte()
for i in range(4):
a_gpu.copy_to_host(
ctypes.cast(ctypes.byref(a_cpu), ctypes.c_void_p), 1)
self.assertEqual(a_cpu.value, 1)
a_gpu += 1
# -----------------------------------------------------------------------------
# Memory pool
@testing.gpu
class TestSingleDeviceMemoryPool(unittest.TestCase):
def setUp(self):
self.pool = memory.SingleDeviceMemoryPool(allocator=mock_alloc)
def test_alloc(self):
p1 = self.pool.malloc(1000)
p2 = self.pool.malloc(1000)
p3 = self.pool.malloc(2000)
self.assertNotEqual(p1.ptr, p2.ptr)
self.assertNotEqual(p1.ptr, p3.ptr)
self.assertNotEqual(p2.ptr, p3.ptr)
def test_free(self):
p1 = self.pool.malloc(1000)
ptr1 = p1.ptr
del p1
p2 = self.pool.malloc(1000)
self.assertEqual(ptr1, p2.ptr)
def test_free_different_size(self):
p1 = self.pool.malloc(1000)
ptr1 = p1.ptr
del p1
p2 = self.pool.malloc(2000)
self.assertNotEqual(ptr1, p2.ptr)
def test_free_all_blocks(self):
p1 = self.pool.malloc(1000)
ptr1 = p1.ptr
del p1
self.pool.free_all_blocks()
p2 = self.pool.malloc(1000)
self.assertNotEqual(ptr1, p2.ptr)
def test_free_all_free(self):
p1 = self.pool.malloc(1000)
ptr1 = p1.ptr
del p1
self.pool.free_all_free()
p2 = self.pool.malloc(1000)
self.assertNotEqual(ptr1, p2.ptr)
@testing.gpu
class TestMemoryPool(unittest.TestCase):
def setUp(self):
self.pool = memory.MemoryPool()
def test_zero_size_alloc(self):
with cupy.cuda.Device(0):
mem = self.pool.malloc(0).mem
self.assertIsInstance(mem, memory.Memory)
self.assertNotIsInstance(mem, memory.PooledMemory)
def test_double_free(self):
with cupy.cuda.Device(0):
mem = self.pool.malloc(1).mem
mem.free()
mem.free()
def test_free_all_blocks(self):
with cupy.cuda.Device(0):
mem = self.pool.malloc(1).mem
self.assertIsInstance(mem, memory.Memory)
self.assertIsInstance(mem, memory.PooledMemory)
self.assertEqual(self.pool.n_free_blocks(), 0)
mem.free()
self.assertEqual(self.pool.n_free_blocks(), 1)
self.pool.free_all_blocks()
self.assertEqual(self.pool.n_free_blocks(), 0)
def test_free_all_blocks_without_malloc(self):
with cupy.cuda.Device(0):
# call directly without malloc.
self.pool.free_all_blocks()
self.assertEqual(self.pool.n_free_blocks(), 0)
def test_free_all_free(self):
with cupy.cuda.Device(0):
mem = self.pool.malloc(1).mem
self.assertIsInstance(mem, memory.Memory)
self.assertIsInstance(mem, memory.PooledMemory)
self.assertEqual(self.pool.n_free_blocks(), 0)
mem.free()
self.assertEqual(self.pool.n_free_blocks(), 1)
self.pool.free_all_free()
self.assertEqual(self.pool.n_free_blocks(), 0)
def test_free_all_free_without_malloc(self):
with cupy.cuda.Device(0):
# call directly without malloc.
self.pool.free_all_free()
self.assertEqual(self.pool.n_free_blocks(), 0)
def test_n_free_blocks_without_malloc(self):
with cupy.cuda.Device(0):
# call directly without malloc/free_all_free.
self.assertEqual(self.pool.n_free_blocks(), 0)
| 30.053922 | 79 | 0.602349 |
5dc20ed4d850bac830e27240c8ca5946967c396e | 1,712 | py | Python | aliyun-python-sdk-reid/aliyunsdkreid/request/v20190928/PullTakeShoesEventRequest.py | yndu13/aliyun-openapi-python-sdk | 12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5 | [
"Apache-2.0"
] | 1,001 | 2015-07-24T01:32:41.000Z | 2022-03-25T01:28:18.000Z | aliyun-python-sdk-reid/aliyunsdkreid/request/v20190928/PullTakeShoesEventRequest.py | yndu13/aliyun-openapi-python-sdk | 12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5 | [
"Apache-2.0"
] | 363 | 2015-10-20T03:15:00.000Z | 2022-03-08T12:26:19.000Z | aliyun-python-sdk-reid/aliyunsdkreid/request/v20190928/PullTakeShoesEventRequest.py | yndu13/aliyun-openapi-python-sdk | 12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5 | [
"Apache-2.0"
] | 682 | 2015-09-22T07:19:02.000Z | 2022-03-22T09:51:46.000Z | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkreid.endpoint import endpoint_data
class PullTakeShoesEventRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'reid', '2019-09-28', 'PullTakeShoesEvent','1.1.8.5')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_Date(self):
return self.get_body_params().get('Date')
def set_Date(self,Date):
self.add_body_params('Date', Date)
def get_StoreId(self):
return self.get_body_params().get('StoreId')
def set_StoreId(self,StoreId):
self.add_body_params('StoreId', StoreId)
def get_SkuId(self):
return self.get_body_params().get('SkuId')
def set_SkuId(self,SkuId):
self.add_body_params('SkuId', SkuId) | 34.24 | 82 | 0.752336 |
3a7ba97d6bfc04a4b61127a34d5b2ad3d7354bef | 5,985 | py | Python | sdks/python/http_client/v1/polyaxon_sdk/models/v1_list_searches_response.py | onilton/polyaxon | 3b0d7cbeead74e62eb0eedbb2962f605ebb9fa81 | [
"Apache-2.0"
] | null | null | null | sdks/python/http_client/v1/polyaxon_sdk/models/v1_list_searches_response.py | onilton/polyaxon | 3b0d7cbeead74e62eb0eedbb2962f605ebb9fa81 | [
"Apache-2.0"
] | null | null | null | sdks/python/http_client/v1/polyaxon_sdk/models/v1_list_searches_response.py | onilton/polyaxon | 3b0d7cbeead74e62eb0eedbb2962f605ebb9fa81 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
Polyaxon SDKs and REST API specification.
Polyaxon SDKs and REST API specification. # noqa: E501
The version of the OpenAPI document: 1.9.4
Contact: contact@polyaxon.com
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from polyaxon_sdk.configuration import Configuration
class V1ListSearchesResponse(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'count': 'int',
'results': 'list[V1Search]',
'previous': 'str',
'next': 'str'
}
attribute_map = {
'count': 'count',
'results': 'results',
'previous': 'previous',
'next': 'next'
}
def __init__(self, count=None, results=None, previous=None, next=None, local_vars_configuration=None): # noqa: E501
"""V1ListSearchesResponse - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._count = None
self._results = None
self._previous = None
self._next = None
self.discriminator = None
if count is not None:
self.count = count
if results is not None:
self.results = results
if previous is not None:
self.previous = previous
if next is not None:
self.next = next
@property
def count(self):
"""Gets the count of this V1ListSearchesResponse. # noqa: E501
:return: The count of this V1ListSearchesResponse. # noqa: E501
:rtype: int
"""
return self._count
@count.setter
def count(self, count):
"""Sets the count of this V1ListSearchesResponse.
:param count: The count of this V1ListSearchesResponse. # noqa: E501
:type: int
"""
self._count = count
@property
def results(self):
"""Gets the results of this V1ListSearchesResponse. # noqa: E501
:return: The results of this V1ListSearchesResponse. # noqa: E501
:rtype: list[V1Search]
"""
return self._results
@results.setter
def results(self, results):
"""Sets the results of this V1ListSearchesResponse.
:param results: The results of this V1ListSearchesResponse. # noqa: E501
:type: list[V1Search]
"""
self._results = results
@property
def previous(self):
"""Gets the previous of this V1ListSearchesResponse. # noqa: E501
:return: The previous of this V1ListSearchesResponse. # noqa: E501
:rtype: str
"""
return self._previous
@previous.setter
def previous(self, previous):
"""Sets the previous of this V1ListSearchesResponse.
:param previous: The previous of this V1ListSearchesResponse. # noqa: E501
:type: str
"""
self._previous = previous
@property
def next(self):
"""Gets the next of this V1ListSearchesResponse. # noqa: E501
:return: The next of this V1ListSearchesResponse. # noqa: E501
:rtype: str
"""
return self._next
@next.setter
def next(self, next):
"""Sets the next of this V1ListSearchesResponse.
:param next: The next of this V1ListSearchesResponse. # noqa: E501
:type: str
"""
self._next = next
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1ListSearchesResponse):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1ListSearchesResponse):
return True
return self.to_dict() != other.to_dict()
| 27.708333 | 120 | 0.597327 |
b8be9c32211bd94d19dd347849f3c6fdd709c5d3 | 1,145 | py | Python | UserModel/migrations/0001_initial.py | CryptoCompetition2019-RNG/AuthServer | c22e2b13af2cc51f62fdc55e3f682eb344d4fbcb | [
"Apache-2.0"
] | null | null | null | UserModel/migrations/0001_initial.py | CryptoCompetition2019-RNG/AuthServer | c22e2b13af2cc51f62fdc55e3f682eb344d4fbcb | [
"Apache-2.0"
] | 10 | 2020-06-05T23:28:04.000Z | 2022-03-12T00:02:52.000Z | UserModel/migrations/0001_initial.py | CryptoCompetition2019-RNG/AuthServer | c22e2b13af2cc51f62fdc55e3f682eb344d4fbcb | [
"Apache-2.0"
] | null | null | null | # Generated by Django 2.2.5 on 2019-10-21 03:24
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='UserModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user_name', models.CharField(help_text='最长128个字符,用户唯一标示,但不是主键', max_length=64, unique=True, verbose_name='用户名')),
('hash_IMEI', models.CharField(help_text='手机标示码的哈希值,要求唯一标示(应当选取一个避免哈希碰撞的函数)', max_length=64, unique=True, verbose_name='IMEI码哈希值')),
('salt', models.CharField(help_text='这个盐值会随 timestamp 变化', max_length=64, verbose_name='盐值')),
('A_pwd', models.CharField(max_length=64)),
('B_pwd', models.CharField(max_length=64)),
('random_value1', models.CharField(max_length=64)),
('random_value2', models.CharField(max_length=64)),
('random_value3', models.CharField(max_length=64)),
],
),
]
| 39.482759 | 148 | 0.61048 |
4814c0724f27b85529dac2f350d37b70f1e89d31 | 5,855 | py | Python | docs/conf.py | mt-inside/tarmak | ba3d95dbf54cc321156a0c21399249768d4e37c1 | [
"Apache-2.0"
] | 1 | 2021-10-15T13:43:54.000Z | 2021-10-15T13:43:54.000Z | docs/conf.py | mt-inside/tarmak | ba3d95dbf54cc321156a0c21399249768d4e37c1 | [
"Apache-2.0"
] | 6 | 2021-09-28T06:04:47.000Z | 2022-02-26T12:25:30.000Z | docs/conf.py | mt-inside/tarmak | ba3d95dbf54cc321156a0c21399249768d4e37c1 | [
"Apache-2.0"
] | 1 | 2021-10-15T13:43:45.000Z | 2021-10-15T13:43:45.000Z | # Copyright Jetstack Ltd. See LICENSE for details.
# -*- coding: utf-8 -*-
#
# Tarmak documentation build configuration file, created by
# sphinx-quickstart on Mon Sep 4 15:49:00 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinxcontrib.spelling',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
source_suffix = ['.rst', '.md']
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Tarmak'
copyright = u'2017, Jetstack'
author = u'Jetstack'
html_logo = "static/logo-tarmak.png"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u"0.3"
# The full version, including alpha/beta/rc tags.
release = u"0.3"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'venv']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
'logo_only': True,
}
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
'donate.html',
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Tarmakdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Tarmak.tex', u'Tarmak Documentation', u'Jetstack', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, 'tarmak', u'Tarmak Documentation', [author], 1)]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Tarmak', u'Tarmak Documentation', author, 'Tarmak',
'One line description of project.', 'Miscellaneous'),
]
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
spelling_lang = "en_GB"
def setup(app):
app.add_stylesheet('css/custom.css')
| 30.180412 | 79 | 0.680102 |
3fc5341451e4c0605177b6e4f86ea507d19449e9 | 2,066 | py | Python | workflow/scripts/tsinfer-gnn.py | percyfal/wg-genealogy-smk | f9ed6e5876ca20c6693625237e57b22acd3f4cee | [
"MIT"
] | 1 | 2022-03-21T17:43:08.000Z | 2022-03-21T17:43:08.000Z | workflow/scripts/tsinfer-gnn.py | percyfal/wg-genealogy-smk | f9ed6e5876ca20c6693625237e57b22acd3f4cee | [
"MIT"
] | null | null | null | workflow/scripts/tsinfer-gnn.py | percyfal/wg-genealogy-smk | f9ed6e5876ca20c6693625237e57b22acd3f4cee | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import re
import sys
import json
import pandas as pd
import tskit
ts = tskit.load(snakemake.input.trees)
mode = snakemake.wildcards.mode
if mode == "population":
samples_listed_by_group = [
ts.samples(population=pop_id) for pop_id in range(ts.num_populations)
]
elif mode == "individual":
samples_listed_by_group = [ind.nodes for ind in ts.individuals()]
gnn = ts.genealogical_nearest_neighbours(ts.samples(), samples_listed_by_group)
# Recall: a sample is one observed sequence. A diploid individual
# consists of two samples.
sample_nodes = [ts.node(n) for n in ts.samples()]
sample_node_ids = [n.id for n in sample_nodes]
sample_names = [
json.loads(ts.individual(n.individual).metadata)["SM"] for n in sample_nodes
]
def make_unique(x):
indices = []
seen = {}
for y in x:
if y not in seen:
seen[y] = 0
else:
seen[y] = seen[y] + 1
indices.append(seen[y])
return indices
sample_names_unique = list(
map(lambda x: f"{x[0]}/{x[1]}", zip(sample_names, make_unique(sample_names)))
)
sample_node_pop_ids = [ts.population(n.population).id for n in sample_nodes]
sample_node_pops = [
json.loads(ts.population(i).metadata.decode())["population"]
for i in sample_node_pop_ids
]
if mode == "population":
columns = [json.loads(p.metadata)["population"] for p in ts.populations()]
elif mode == "individual":
columns = [json.loads(ind.metadata)["SM"] for ind in ts.individuals()]
# Could possibly add metadata information in populations here
gnn_table = pd.DataFrame(
data=gnn,
index=[
pd.Index(sample_node_ids, name="sample_node_id"),
pd.Index(sample_names, name="sample_name"),
pd.Index(sample_names_unique, name="sample_name_unique"),
pd.Index(sample_node_pop_ids, name="sample_node_population_id"),
pd.Index(sample_node_pops, name="sample_node_population"),
],
columns=columns,
)
# Save gnn table
gnn_table.to_csv(snakemake.output.gnn, header=True)
| 28.30137 | 81 | 0.689255 |
d84d74041cedc2ff191ea7bdde87fccf3b1477b7 | 681 | py | Python | compliance/utils/__init__.py | Invidence/auditree-framework | 39fbef32aa5066449485af501a74051838dc45f6 | [
"Apache-2.0"
] | null | null | null | compliance/utils/__init__.py | Invidence/auditree-framework | 39fbef32aa5066449485af501a74051838dc45f6 | [
"Apache-2.0"
] | 15 | 2020-11-10T23:01:35.000Z | 2021-08-19T23:30:27.000Z | compliance/utils/__init__.py | dlminvestments/auditree-framework | 19858c17797a7626fe20f0489d1aab163c6d69ec | [
"Apache-2.0"
] | null | null | null | # -*- mode:python; coding:utf-8 -*-
# Copyright (c) 2020 IBM Corp. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Compliance automation utilities package."""
| 42.5625 | 74 | 0.748899 |
c65df0aa2ecca35449ea4c52883c674c0b732ff8 | 159 | py | Python | fundamentals/list_unpacking.py | fredsonchaves07/python-fundamentals | 4aee479c48f86319a2041e35ea985f971393c2ce | [
"MIT"
] | null | null | null | fundamentals/list_unpacking.py | fredsonchaves07/python-fundamentals | 4aee479c48f86319a2041e35ea985f971393c2ce | [
"MIT"
] | null | null | null | fundamentals/list_unpacking.py | fredsonchaves07/python-fundamentals | 4aee479c48f86319a2041e35ea985f971393c2ce | [
"MIT"
] | null | null | null | """
- Podemos realizar a operação de desempacotamento de listas
"""
lista = ['Python', 'Java', 'Javascript']
python, java, javascript = lista
print(python) | 17.666667 | 59 | 0.698113 |
0357ef08a55981e29f07ac5b3b07f8ad534c3ff4 | 144 | py | Python | DungeonCrawl/Model/GameObjects/Sword.py | BoogyWinterfell/friday-fun | 686e8c53415e748db437e74c3e2b02afcd9695bf | [
"MIT"
] | null | null | null | DungeonCrawl/Model/GameObjects/Sword.py | BoogyWinterfell/friday-fun | 686e8c53415e748db437e74c3e2b02afcd9695bf | [
"MIT"
] | null | null | null | DungeonCrawl/Model/GameObjects/Sword.py | BoogyWinterfell/friday-fun | 686e8c53415e748db437e74c3e2b02afcd9695bf | [
"MIT"
] | null | null | null | from dataclasses import dataclass
from DungeonCrawl.Model.GameObjects.Abstract.Weapon import Weapon
@dataclass
class Sword(Weapon):
pass
| 16 | 65 | 0.8125 |
7a95643dfa8762f85fec236a361696f90866b5a6 | 3,552 | py | Python | libs/telegram/dice.py | rocketbot-cl/Telegram | e44713f6eb15460d4609d844ed5cccbbc84d4309 | [
"MIT"
] | null | null | null | libs/telegram/dice.py | rocketbot-cl/Telegram | e44713f6eb15460d4609d844ed5cccbbc84d4309 | [
"MIT"
] | null | null | null | libs/telegram/dice.py | rocketbot-cl/Telegram | e44713f6eb15460d4609d844ed5cccbbc84d4309 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# pylint: disable=R0903
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2020
# Leandro Toledo de Souza <devs@python-telegram-bot.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains an object that represents a Telegram Dice."""
from typing import Any, List, ClassVar
from telegram import TelegramObject, constants
class Dice(TelegramObject):
"""
This object represents an animated emoji with a random value for currently supported base
emoji. (The singular form of "dice" is "die". However, PTB mimics the Telegram API, which uses
the term "dice".)
Objects of this class are comparable in terms of equality. Two objects of this class are
considered equal, if their :attr:`value` and :attr:`emoji` are equal.
Note:
If :attr:`emoji` is "🎯", a value of 6 currently represents a bullseye, while a value of 1
indicates that the dartboard was missed. However, this behaviour is undocumented and might
be changed by Telegram.
If :attr:`emoji` is "🏀", a value of 4 or 5 currently score a basket, while a value of 1 to
3 indicates that the basket was missed. However, this behaviour is undocumented and might
be changed by Telegram.
If :attr:`emoji` is "⚽", a value of 3 to 5 currently scores a goal, while a value of 1 to
3 indicates that the goal was missed. However, this behaviour is undocumented and might
be changed by Telegram.
If :attr:`emoji` is "🎰", each value corresponds to a unique combination of symbols, which
can be found at our `wiki <https://git.io/JkeC6>`_. However, this behaviour is undocumented
and might be changed by Telegram.
Attributes:
value (:obj:`int`): Value of the dice.
emoji (:obj:`str`): Emoji on which the dice throw animation is based.
Args:
value (:obj:`int`): Value of the dice. 1-6 for dice and darts, 1-5 for basketball and
football/soccer ball, 1-64 for slot machine.
emoji (:obj:`str`): Emoji on which the dice throw animation is based.
"""
def __init__(self, value: int, emoji: str, **_kwargs: Any):
self.value = value
self.emoji = emoji
self._id_attrs = (self.value, self.emoji)
DICE: ClassVar[str] = constants.DICE_DICE
""":const:`telegram.constants.DICE_DICE`"""
DARTS: ClassVar[str] = constants.DICE_DARTS
""":const:`telegram.constants.DICE_DARTS`"""
BASKETBALL: ClassVar[str] = constants.DICE_BASKETBALL
""":const:`telegram.constants.DICE_BASKETBALL`"""
FOOTBALL: ClassVar[str] = constants.DICE_FOOTBALL
""":const:`telegram.constants.DICE_FOOTBALL`"""
SLOT_MACHINE: ClassVar[str] = constants.DICE_SLOT_MACHINE
""":const:`telegram.constants.DICE_SLOT_MACHINE`"""
ALL_EMOJI: ClassVar[List[str]] = constants.DICE_ALL_EMOJI
""":const:`telegram.constants.DICE_ALL_EMOJI`"""
| 44.4 | 99 | 0.701014 |
b2a8e74da3cd6d797f2e8539cbac096dbddf826d | 1,384 | py | Python | CLRS/Chapter4/max_subarray_recursive.py | Niranjan-Ananth/DS-and-Algorithms | 8a0c7331fa18dde24ff50888714bb1949af821f4 | [
"MIT"
] | null | null | null | CLRS/Chapter4/max_subarray_recursive.py | Niranjan-Ananth/DS-and-Algorithms | 8a0c7331fa18dde24ff50888714bb1949af821f4 | [
"MIT"
] | null | null | null | CLRS/Chapter4/max_subarray_recursive.py | Niranjan-Ananth/DS-and-Algorithms | 8a0c7331fa18dde24ff50888714bb1949af821f4 | [
"MIT"
] | null | null | null | def find_max_crossing_subarray(arr, low, mid, high):
left_sum = -10000
sum = 0
left = 0
right = 0
for i in range(mid, low-1, -1):
sum += arr[i]
if sum > left_sum:
left_sum = sum
left = i
right_sum = -10000
sum = 0
for i in range(mid+1, high+1):
sum += arr[i]
if sum > right_sum:
right_sum = sum
right = i
return left, right, left_sum+right_sum
def find_max_subarray_recursive(arr, low, high):
if low==high:
return low, high, arr[low]
mid = int((low+high)/2)
left_low, left_high, left_sum = find_max_subarray_recursive(arr, low, mid)
right_low, right_high, right_sum = find_max_subarray_recursive(arr, mid+1, high)
cross_low, cross_high, cross_sum = find_max_crossing_subarray(arr, low, mid, high)
if left_sum > right_sum and left_sum > cross_sum:
return left_low, left_high, left_sum
if right_sum > left_sum and right_sum > cross_sum:
return right_low, right_high, right_sum
return cross_low, cross_high, cross_sum
if __name__=='__main__':
arr = [12, -3, -25, 20, -3, -16, -23, 18, 20, -7, 12, -5, -22, 15, -4, 7]
left, right, max_sum = find_max_subarray_recursive(arr, 0, len(arr)-1)
print("Max subarray of sum: " + str(max_sum) + " is found between indices " + str(left) + " and " + str(right))
| 37.405405 | 115 | 0.620665 |
8aebbc975ba3b764d39a708483b8860ff79580c6 | 1,314 | py | Python | samples/getting-started/qrng/host.py | Rayislandstyle/Quantum | fa1eb9dcc64785eeefc25ba332967fc67789bc78 | [
"MIT"
] | 2 | 2020-12-21T05:59:27.000Z | 2021-03-07T01:55:57.000Z | samples/getting-started/qrng/host.py | Rayislandstyle/Quantum | fa1eb9dcc64785eeefc25ba332967fc67789bc78 | [
"MIT"
] | 2 | 2021-05-13T03:33:09.000Z | 2022-03-25T19:18:38.000Z | samples/getting-started/qrng/host.py | Rayislandstyle/Quantum | fa1eb9dcc64785eeefc25ba332967fc67789bc78 | [
"MIT"
] | 1 | 2020-07-28T01:34:44.000Z | 2020-07-28T01:34:44.000Z | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# This Python script contains a quantum random integer generator
# using the operation QuantumRandomNumberGenerator defined in
# the file qrng.qs.
# For instructions on how to install the qsharp package,
# see: https://docs.microsoft.com/quantum/install-guide/python
import qsharp
from Qrng import SampleQuantumRandomNumberGenerator # We import the
# quantum operation from the namespace defined in the file Qrng.qs
max = 50 # Here we set the maximum of our range
output = max + 1 # Variable to store the output
while output > max:
bit_string = [] # We initialise a list to store the bits that
# will define our random integer
for i in range(0, max.bit_length()): # We need to call the quantum
# operation as many times as bits are needed to define the
# maximum of our range. For example, if max=7 we need 3 bits
# to generate all the numbers from 0 to 7.
bit_string.append(SampleQuantumRandomNumberGenerator.simulate())
# Here we call the quantum operation and store the random bit
# in the list
output = int("".join(str(x) for x in bit_string), 2)
# Transform bit string to integer
print("The random number generated is " + str(output))
# We print the random number
| 42.387097 | 73 | 0.727549 |
e41d10bf745acbb9eeb46e9f07d077f0f14cc477 | 6,172 | py | Python | tracking/tracks.py | XiaoJake/EagerMOT | 99ffeee5a9d2ff9950701b00e0daedb1704e3812 | [
"MIT"
] | 2 | 2021-12-30T14:41:05.000Z | 2022-01-21T20:58:31.000Z | tracking/tracks.py | chisyliu/EagerMOT | 3733e6070e7ac644315d28c1c5c4c525cce64746 | [
"MIT"
] | null | null | null | tracking/tracks.py | chisyliu/EagerMOT | 3733e6070e7ac644315d28c1c5c4c525cce64746 | [
"MIT"
] | 1 | 2021-07-04T06:16:57.000Z | 2021-07-04T06:16:57.000Z | import copy
import pickle
from pathlib import Path
import time
from statistics import median
from typing import List, Optional, Dict, Any, Mapping
import numpy as np
from filterpy.kalman import KalmanFilter
from inputs.bbox import Bbox3d, Bbox2d, ProjectsToCam
from objects.fused_instance import FusedInstance, Source
from configs.local_variables import MOUNT_PATH
import tracking.utils_tracks as utils
from transform.transformation import Transformation
from utils.utils_geometry import convert_bbox_coordinates_to_corners, project_bbox_3d_to_2d
class Track(ProjectsToCam):
count = 0
def __init__(self, instance: FusedInstance, is_angular: bool):
"""
Initializes a tracker using initial bounding box.
"""
self.instance = instance
self.is_angular = is_angular
self.id = Track.count
Track.count += 1
self.age_total = 1
self.hits = 1 # number of total hits including the first detection
self.time_since_update = 0
self.time_since_3d_update = 0 if instance.bbox3d is not None else 10
self.time_since_2d_update = 0 if instance.detection_2d is not None else 10
self.mask_score_history: List[float] = []
self.kf_3d = None
self.obs_angle: Optional[float] = None
self.confidence: Optional[float] = None
if instance.bbox3d is not None:
self.init_motion_model(instance.bbox3d)
self.predicted_translation = None
self._predicted_bbox_2d_in_cam: Dict[str, Optional[Bbox2d]] = {}
def init_motion_model(self, bbox3d):
assert bbox3d is not None
self.kf_3d = utils.default_kf_3d(self.is_angular)
self.kf_3d.x[:7] = bbox3d.kf_coordinates.reshape(7, 1)
self._set_info(bbox3d)
def _set_info(self, bbox3d):
self.obs_angle = bbox3d.obs_angle
self.confidence = bbox3d.confidence
@property
def has_motion_model(self):
return self.kf_3d is not None
def predict_motion(self):
""" Advances the state vector and returns the predicted bounding box estimate. """
assert self.has_motion_model
self.instance.bbox3d.clear_2d()
old_x = self.kf_3d.x.copy()
self.kf_3d.predict()
# to move point cloud according to KF correction in case it will not later be updated (see below)
self.predicted_translation = self.kf_3d.x[:3] - old_x[:3]
return self.kf_3d.x.flatten().reshape(-1,) # shape (10,)
def update_with_match(self, matched_instance: FusedInstance):
if matched_instance.bbox3d is not None:
self._update_3d_info(matched_instance)
if matched_instance.detection_2d is not None:
self._update_2d_info(matched_instance)
self.time_since_update = 0
self.hits += 1
def _update_3d_info(self, matched_instance: FusedInstance):
""" Updates the state vector with observed bbox. """
assert matched_instance.bbox3d is not None
self.time_since_3d_update = 0
if self.has_motion_model:
assert self.kf_3d is not None
# new angle needs to be corrected to be the closest match to the current angle
new_angle = matched_instance.bbox3d.kf_coordinates[3]
new_angle, angle_diff = utils.correct_new_angle_and_diff(self.kf_3d.x[3], new_angle)
assert angle_diff <= np.pi / 2, f"angle_diff {angle_diff}"
matched_instance.bbox3d.kf_coordinates[3] = new_angle
self.kf_3d.update(matched_instance.bbox3d.kf_coordinates)
else:
self.init_motion_model(matched_instance.bbox3d)
self._set_info(matched_instance.bbox3d)
self.instance = matched_instance
def _update_2d_info(self, instance_from_mask: FusedInstance):
# set mask, bbox_2d, etc. but keep 3D fields
self.instance.set_with_instance_from_mask(instance_from_mask)
self.time_since_2d_update = 0
def reset_for_new_frame(self):
self.age_total += 1
self.time_since_update += 1
self.time_since_3d_update += 1
self.time_since_2d_update += 1
self.instance.reset_seg(keep_matching_info=True)
self._predicted_bbox_2d_in_cam = {}
@property
def current_bbox_3d_coordinates(self):
assert self.has_motion_model
return self.kf_3d.x[:7].reshape(7,)
def current_bbox_3d(self, ego_transform, angle_around_y) -> Optional[Bbox3d]:
""" Returns the current bounding box estimate. """
if not self.has_motion_model:
return None
bbox = Bbox3d.from_pointrcnn(self.current_bbox_3d_coordinates.copy())
if ego_transform is not None and angle_around_y is not None:
bbox.inverse_transform(ego_transform, angle_around_y)
bbox.obs_angle = self.obs_angle
bbox.confidence = self.confidence
return bbox
def current_instance(self, ego_transform, angle_around_y, min_hits=1) -> FusedInstance:
if ego_transform is None or angle_around_y is None:
return copy.deepcopy(self.instance)
local_frame_instance = copy.deepcopy(self.instance)
local_frame_instance.inverse_transform(ego_transform, angle_around_y)
return local_frame_instance
def bbox_2d_in_cam(self, cam: str) -> Optional[Bbox2d]:
return self._predicted_bbox_2d_in_cam[cam]
def predicted_bbox_2d_in_cam(self, ego_transform, angle_around_y,
transformation: Transformation, img_shape_per_cam: Mapping[str, Any],
cam: str, frame_data: Mapping[str, Any]) -> Optional[Bbox2d]:
self._predicted_bbox_2d_in_cam[cam] = self.instance.bbox_2d_best(cam)
bbox_3d = self.current_bbox_3d(ego_transform, angle_around_y)
if bbox_3d is not None:
bbox_2d = project_bbox_3d_to_2d(bbox_3d, transformation, img_shape_per_cam, cam, frame_data)
if bbox_2d is not None:
self._predicted_bbox_2d_in_cam[cam] = Bbox2d(*bbox_2d)
return self._predicted_bbox_2d_in_cam[cam]
@property
def class_id(self):
return self.instance.class_id
| 40.077922 | 105 | 0.68908 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.