hexsha
stringlengths 40
40
| size
int64 2
1.05M
| ext
stringclasses 9
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
193
| max_stars_repo_name
stringlengths 6
109
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
36.6k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
193
| max_issues_repo_name
stringlengths 6
109
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
29.8k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
193
| max_forks_repo_name
stringlengths 6
109
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
11.2k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
1.05M
| avg_line_length
float64 1
404k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f70001f658d4dfaa72dd4f0d1b3176492f6658bb | 6,442 | py | Python | spider/openwrt.py | CNDB/CNDB | 2e3a41111f604cf2f4f22a7c9370bb3f753e3e88 | [
"BSD-3-Clause"
] | null | null | null | spider/openwrt.py | CNDB/CNDB | 2e3a41111f604cf2f4f22a7c9370bb3f753e3e88 | [
"BSD-3-Clause"
] | null | null | null | spider/openwrt.py | CNDB/CNDB | 2e3a41111f604cf2f4f22a7c9370bb3f753e3e88 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
# #*** <License> ************************************************************#
# This module is part of the repository CNDB.
#
# This module is licensed under the terms of the BSD 3-Clause License
# <http://www.c-tanzer.at/license/bsd_3c.html>.
# #*** </License> ***********************************************************#
from _TFL.pyk import pyk
from rsclib.HTML_Parse import tag, Page_Tree
from rsclib.autosuper import autosuper
from spider.common import Interface, Inet4, Inet6, unroutable
from spider.common import WLAN_Config
from spider.luci import Version_Mixin
class Status (Page_Tree, Version_Mixin) :
url = 'cgi-bin/luci/freifunk/status/status'
retries = 2
timeout = 10
html_charset = 'utf-8' # force utf-8 encoding
wl_names = dict \
( ssid = 'ssid'
, _bsiid = 'bssid'
, channel = 'channel'
, mode = 'mode'
)
def parse (self) :
root = self.tree.getroot ()
self.wlans = []
self.routes = {}
for div in root.findall (".//%s" % tag ("div")) :
id = div.get ('id')
if id == 'cbi-wireless' :
wlan_div = div
elif id == 'cbi-routes' :
route_div = div
self.try_get_version (div)
for d in self.tbl_iter (wlan_div) :
for k, newkey in pyk.iteritems (self.wl_names) :
if k in d :
d [newkey] = d [k]
wl = WLAN_Config (** d)
self.wlans.append (wl)
for d in self.tbl_iter (route_div) :
iface = d.get ('iface')
gw = d.get ('gateway')
if iface and gw :
self.routes [iface] = gw
self.set_version (root)
# end def parse
def tbl_iter (self, div) :
tbl = div.find (".//%s" % tag ("table"))
assert tbl.get ('class') == 'cbi-section-table'
d = {}
for tr in tbl :
if 'cbi-section-table-row' not in tr.get ('class').split () :
continue
for input in tr.findall (".//%s" % tag ('input')) :
name = input.get ('id').split ('.') [-1]
val = input.get ('value')
d [name] = val
if not d :
continue
yield d
# end def tbl_iter
# end class Status
class Table_Iter (Page_Tree) :
def table_iter (self) :
root = self.tree.getroot ()
for div in root.findall (".//%s" % tag ("div")) :
if div.get ('id') == 'maincontent' :
break
tbl = div.find (".//%s" % tag ("table"))
if tbl is None :
return
for tr in tbl :
if tr [0].tag == tag ('th') :
continue
yield (self.tree.get_text (x) for x in tr)
# end def table_iter
# end class Table_Iter
class OLSR_Connections (Table_Iter) :
url = 'cgi-bin/luci/freifunk/olsr/'
retries = 2
timeout = 10
html_charset = 'utf-8' # force utf-8 encoding
def parse (self) :
self.neighbors = {}
for l in self.table_iter () :
neighbor, ip, lq, nlq, etx = l
lq, nlq, etx = (float (x) for x in (lq, nlq, etx))
self.neighbors [neighbor] = [ip, lq, nlq, etx]
# end def parse
# end class OLSR_Connections
class OLSR_Routes (Table_Iter) :
url = 'cgi-bin/luci/freifunk/olsr/routes'
retries = 2
timeout = 10
html_charset = 'utf-8' # force utf-8 encoding
def parse (self) :
self.iface_by_gw = {}
for l in self.table_iter () :
announced, gw, iface, metric, etx = l
if gw in self.iface_by_gw :
assert iface == self.iface_by_gw [gw]
else :
self.iface_by_gw [gw] = iface
# end def parse
# end class OLSR_Routes
class OpenWRT (autosuper) :
def __init__ (self, site, request) :
self.site = site
self.request = request
if 'interfaces' in self.request or 'ips' in self.request :
st = Status (site = site)
conn = OLSR_Connections (site = site)
route = OLSR_Routes (site = site)
self.version = st.version
assert len (st.wlans) <= 1
interfaces = {}
ips = {}
count = 0
for gw, ifname in pyk.iteritems (route.iface_by_gw) :
ip, lq, nlq, etx = conn.neighbors [gw]
i4 = Inet4 (ip, None, None, iface = ifname)
ips [i4] = 1
is_wlan = True
if lq == nlq == etx == 1.0 :
is_wlan = False
if ifname in interfaces :
iface = interfaces [ifname]
if not iface.is_wlan and is_wlan :
iface.is_wlan = True
iface.wlan_info = st.wlans [0]
else :
iface = Interface (count, ifname, None)
iface.is_wlan = is_wlan
if is_wlan :
iface.wlan_info = st.wlans [0]
count += 1
interfaces [ifname] = iface
if i4 not in iface.inet4 :
iface.append_inet4 (i4)
wl_if = None
for iface in pyk.itervalues (interfaces) :
if iface.is_wlan :
if wl_if :
m = "Duplicate wlan: %s/%s" % (iface.name, wl_if.name)
raise ValueError (m)
wl_if = iface
# check own ip
n = 'unknown'
i4 = Inet4 (self.request ['ip'], None, None, iface = n)
if i4 not in ips :
assert n not in interfaces
iface = interfaces [n] = Interface (count, n, None)
iface.append_inet4 (i4)
iface.is_wlan = False
if not wl_if and st.wlans :
iface.is_wlan = True
iface.wlan_info = st.wlans [0]
ips [i4] = True
self.request ['ips'] = ips
self.request ['interfaces'] = interfaces
self.request ['version'] = st.version
# end def __init__
# end class OpenWRT
| 34.449198 | 78 | 0.472369 |
f7001ad17b839c3551d7b4c8edcc8b1d1d322b6f | 6,412 | py | Python | asv/plugins/conda.py | prisae/asv | 57c386d7cc27f91ecd8daf1ad2e0413f2efdd39c | [
"BSD-3-Clause"
] | 2 | 2019-08-18T11:05:25.000Z | 2019-11-17T02:07:18.000Z | asv/plugins/conda.py | prisae/asv | 57c386d7cc27f91ecd8daf1ad2e0413f2efdd39c | [
"BSD-3-Clause"
] | 1 | 2019-02-19T17:11:38.000Z | 2019-02-19T17:11:38.000Z | asv/plugins/conda.py | prisae/asv | 57c386d7cc27f91ecd8daf1ad2e0413f2efdd39c | [
"BSD-3-Clause"
] | null | null | null | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, unicode_literals, print_function
import re
import os
import tempfile
import six
from .. import environment
from ..console import log
from .. import util
WIN = (os.name == "nt")
def _find_conda():
"""Find the conda executable robustly across conda versions.
Returns
-------
conda : str
Path to the conda executable.
Raises
------
IOError
If the executable cannot be found in either the CONDA_EXE environment
variable or in the PATH.
Notes
-----
In POSIX platforms in conda >= 4.4, conda can be set up as a bash function
rather than an executable. (This is to enable the syntax
``conda activate env-name``.) In this case, the environment variable
``CONDA_EXE`` contains the path to the conda executable. In other cases,
we use standard search for the appropriate name in the PATH.
See https://github.com/airspeed-velocity/asv/issues/645 for more details.
"""
if 'CONDA_EXE' in os.environ:
conda = os.environ['CONDA_EXE']
else:
conda = util.which('conda')
return conda
class Conda(environment.Environment):
"""
Manage an environment using conda.
Dependencies are installed using ``conda``. The benchmarked
project is installed using ``pip`` (since ``conda`` doesn't have a
method to install from an arbitrary ``setup.py``).
"""
tool_name = "conda"
_matches_cache = {}
def __init__(self, conf, python, requirements):
"""
Parameters
----------
conf : Config instance
python : str
Version of Python. Must be of the form "MAJOR.MINOR".
requirements : dict
Dictionary mapping a PyPI package name to a version
identifier string.
"""
self._python = python
self._requirements = requirements
self._conda_channels = conf.conda_channels
super(Conda, self).__init__(conf, python, requirements)
@classmethod
def matches(cls, python):
# Calling conda can take a long time, so remember the result
if python not in cls._matches_cache:
cls._matches_cache[python] = cls._matches(python)
return cls._matches_cache[python]
@classmethod
def _matches(cls, python):
if not re.match(r'^[0-9].*$', python):
# The python name should be a version number
return False
try:
conda = _find_conda()
except IOError:
return False
else:
# This directory never gets created, since we're just
# doing a dry run below. All it needs to be is something
# that doesn't already exist.
path = os.path.join(tempfile.gettempdir(), 'check')
# Check that the version number is valid
try:
util.check_call([
conda,
'create',
'--yes',
'-p',
path,
'python={0}'.format(python),
'--dry-run'], display_error=False, dots=False)
except util.ProcessError:
return False
else:
return True
def _setup(self):
try:
conda = _find_conda()
except IOError as e:
raise util.UserError(str(e))
log.info("Creating conda environment for {0}".format(self.name))
# create a temporary environment.yml file
# and use that to generate the env for benchmarking
env_file = tempfile.NamedTemporaryFile(mode='w', delete=False, suffix=".yml")
try:
env_file.write('name: {0}\n'
'channels:\n'.format(self.name))
env_file.writelines((' - %s\n' % ch for ch in self._conda_channels))
env_file.write('dependencies:\n'
' - python={0}\n'
' - wheel\n'
' - pip\n'.format(self._python))
# categorize & write dependencies based on pip vs. conda
conda_args, pip_args = self._get_requirements(conda)
env_file.writelines((' - %s\n' % s for s in conda_args))
if pip_args:
# and now specify the packages that are to be installed in
# the pip subsection
env_file.write(' - pip:\n')
env_file.writelines((' - %s\n' % s for s in pip_args))
env_file.close()
util.check_output([conda] + ['env', 'create', '-f', env_file.name,
'-p', self._path, '--force'])
except Exception as exc:
if os.path.isfile(env_file.name):
with open(env_file.name, 'r') as f:
text = f.read()
log.info("conda env create failed: in {} with:\n{}".format(self._path, text))
raise
finally:
os.unlink(env_file.name)
def _get_requirements(self, conda):
if self._requirements:
# retrieve and return all conda / pip dependencies
conda_args = []
pip_args = []
for key, val in six.iteritems(self._requirements):
if key.startswith('pip+'):
if val:
pip_args.append("{0}=={1}".format(key[4:], val))
else:
pip_args.append(key[4:])
else:
if val:
conda_args.append("{0}={1}".format(key, val))
else:
conda_args.append(key)
return conda_args, pip_args
else:
return [], []
def run(self, args, **kwargs):
log.debug("Running '{0}' in {1}".format(' '.join(args), self.name))
return self.run_executable('python', args, **kwargs)
def run_executable(self, executable, args, **kwargs):
# Conda doesn't guarantee that user site directories are excluded
kwargs["env"] = dict(kwargs.pop("env", os.environ),
PYTHONNOUSERSITE=str("True"))
return super(Conda, self).run_executable(executable, args, **kwargs)
| 33.570681 | 93 | 0.547723 |
f700701e51582a6f314450ea9547949094b4db62 | 3,429 | py | Python | fineract/objects/group.py | mobidevke/py-fineract | 712b0c20686accd7d7e0a2356ccaf59c5fe4f7dd | [
"Apache-2.0"
] | 7 | 2019-03-11T16:17:33.000Z | 2020-10-22T21:57:51.000Z | fineract/objects/group.py | mobidevke/py-fineract | 712b0c20686accd7d7e0a2356ccaf59c5fe4f7dd | [
"Apache-2.0"
] | 3 | 2019-11-05T20:22:16.000Z | 2019-12-11T17:09:04.000Z | fineract/objects/group.py | mobidevke/py-fineract | 712b0c20686accd7d7e0a2356ccaf59c5fe4f7dd | [
"Apache-2.0"
] | 2 | 2020-11-19T16:00:36.000Z | 2021-11-19T09:36:13.000Z | from fineract.objects.fineract_object import DataFineractObject
from fineract.objects.types import Type
class Group(DataFineractObject):
"""
This class represents a Group.
"""
def __repr__(self):
return self.get__repr__({'group_id': self.id})
def _init_attributes(self):
self.id = None
self.account_no = None
self.external_id = None
self.name = None
self.status = None
self.active = None
self.activation_date = None
self.office_id = None
self.office_name = None
self.hierarchy = None
def _use_attributes(self, attributes):
self.id = attributes.get('id', None)
self.account_no = attributes.get('accountNo', None)
self.external_id = attributes.get('externalId', None)
self.name = attributes.get('name', None)
self.status = self._make_fineract_object(GroupStatus, attributes.get('status', None))
self.active = attributes.get('active', None)
self.activation_date = self._make_date_object(attributes.get('activationDate', None))
self.office_id = attributes.get('officeId', None)
self.office_name = attributes.get('officeName', None)
self.hierarchy = attributes.get('hierarchy', None)
def add_members(self, members_list):
params = {
'clientMembers': members_list
}
data = self.request_handler.make_request(
'POST',
'/groups/{}?command=associateClients'.format(self.id),
json=params
)
return data['groupId'] == self.id
def remove_members(self, members_list):
params = {
'clientMembers': members_list
}
data = self.request_handler.make_request(
'POST',
'/groups/{}?command=disassociateClients'.format(self.id),
json=params
)
return data['groupId'] == self.id
@classmethod
def create(cls, request_handler, name, office_id, active=True, activation_date=None):
"""Create a group
:param request_handler:
:param name:
:param office_id:
:param active:
:param activation_date:
:rtype: :class:`fineract.objects.group.Group`
"""
data = {
'name': name,
'officeId': office_id,
'active': active,
'activationDate': activation_date or cls._get_current_date()
}
res = request_handler.make_request(
'POST',
'/groups',
json=data
)
group_id = res['groupId']
return cls(request_handler,
request_handler.make_request(
'GET',
'/groups/{}'.format(group_id)
), False)
@classmethod
def get_group_by_name(cls, request_handler, name):
"""Get a group by name
:param request_handler:
:param name:
:rtype: :class:`fineract.objects.group.Group`
"""
data = request_handler.make_request(
'GET',
'/groups'
)
if data:
for item in data:
if item['name'] == name:
print(item)
return cls(request_handler, item, False)
return None
class GroupStatus(Type):
"""
This class represents a Group status.
"""
pass
| 29.307692 | 93 | 0.567221 |
f7007e7d6cadbb4707818ec05e6fcbc50ba52dfb | 2,656 | py | Python | sysinv/sysinv/sysinv/sysinv/common/service.py | starlingx-staging/stx-config | ccbf0392d1941e7cad6673f6351bd905a5a5d419 | [
"Apache-2.0"
] | null | null | null | sysinv/sysinv/sysinv/sysinv/common/service.py | starlingx-staging/stx-config | ccbf0392d1941e7cad6673f6351bd905a5a5d419 | [
"Apache-2.0"
] | null | null | null | sysinv/sysinv/sysinv/sysinv/common/service.py | starlingx-staging/stx-config | ccbf0392d1941e7cad6673f6351bd905a5a5d419 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# Copyright © 2012 eNovance <licensing@enovance.com>
#
# Author: Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import socket
from oslo_config import cfg
from sysinv.openstack.common import context
from sysinv.openstack.common import log
from sysinv.openstack.common import periodic_task
from sysinv.openstack.common import rpc
from sysinv.openstack.common.rpc import service as rpc_service
from oslo_service import service
cfg.CONF.register_opts([
cfg.IntOpt('periodic_interval',
default=60,
help='seconds between running periodic tasks'),
cfg.StrOpt('host',
default=socket.getfqdn(),
help='Name of this node. This can be an opaque identifier. '
'It is not necessarily a hostname, FQDN, or IP address. '
'However, the node name must be valid within '
'an AMQP key, and if using ZeroMQ, a valid '
'hostname, FQDN, or IP address'),
])
CONF = cfg.CONF
class PeriodicService(rpc_service.Service, periodic_task.PeriodicTasks):
def start(self):
super(PeriodicService, self).start()
admin_context = context.RequestContext('admin', 'admin', is_admin=True)
self.tg.add_timer(cfg.CONF.periodic_interval,
self.manager.periodic_tasks,
context=admin_context)
def prepare_service(argv=None):
if argv is None:
argv = []
rpc.set_defaults(control_exchange='sysinv')
cfg.set_defaults(log.log_opts,
default_log_levels=['amqplib=WARN',
'qpid.messaging=INFO',
'sqlalchemy=WARN',
'keystoneclient=INFO',
'stevedore=INFO',
'eventlet.wsgi.server=WARN'
])
cfg.CONF(argv[1:], project='sysinv')
log.setup('sysinv')
def process_launcher():
return service.ProcessLauncher(CONF)
| 34.947368 | 79 | 0.622364 |
f7008bf87b5a1c0780d7272b314fa3142ffb3ef3 | 18,984 | py | Python | netbox_agent/server.py | freberkivra/netbox-agent | 5f0aae6c011cd43f7d9e6d322f90a6b0f5195c61 | [
"Apache-2.0"
] | 24 | 2019-08-05T15:14:20.000Z | 2020-02-02T11:05:45.000Z | netbox_agent/server.py | freberkivra/netbox-agent | 5f0aae6c011cd43f7d9e6d322f90a6b0f5195c61 | [
"Apache-2.0"
] | 39 | 2019-08-04T18:12:07.000Z | 2020-01-30T21:42:38.000Z | netbox_agent/server.py | freberkivra/netbox-agent | 5f0aae6c011cd43f7d9e6d322f90a6b0f5195c61 | [
"Apache-2.0"
] | 8 | 2019-09-03T20:51:22.000Z | 2020-01-15T06:00:23.000Z | import netbox_agent.dmidecode as dmidecode
from netbox_agent.config import config
from netbox_agent.config import netbox_instance as nb
from netbox_agent.inventory import Inventory
from netbox_agent.location import Datacenter, Rack, Tenant
from netbox_agent.misc import create_netbox_tags, get_device_role, get_device_type, get_device_platform
from netbox_agent.network import ServerNetwork
from netbox_agent.power import PowerSupply
from pprint import pprint
import subprocess
import logging
import socket
import sys
class ServerBase():
def __init__(self, dmi=None):
if dmi:
self.dmi = dmi
else:
self.dmi = dmidecode.parse()
self.baseboard = dmidecode.get_by_type(self.dmi, 'Baseboard')
self.bios = dmidecode.get_by_type(self.dmi, 'BIOS')
self.chassis = dmidecode.get_by_type(self.dmi, 'Chassis')
self.system = dmidecode.get_by_type(self.dmi, 'System')
self.device_platform = get_device_platform(config.device.platform)
self.network = None
self.tags = list(set([
x.strip() for x in config.device.tags.split(',') if x.strip()
])) if config.device.tags else []
self.nb_tags = list(create_netbox_tags(self.tags))
config_cf = set([
f.strip() for f in config.device.custom_fields.split(",")
if f.strip()
])
self.custom_fields = {}
self.custom_fields.update(dict([
(k.strip(), v.strip()) for k, v in
[f.split("=", 1) for f in config_cf]
]))
def get_tenant(self):
tenant = Tenant()
return tenant.get()
def get_netbox_tenant(self):
tenant = self.get_tenant()
if tenant is None:
return None
nb_tenant = nb.tenancy.tenants.get(
slug=self.get_tenant()
)
return nb_tenant
def get_datacenter(self):
dc = Datacenter()
return dc.get()
def get_netbox_datacenter(self):
dc = self.get_datacenter()
if dc is None:
logging.error("Specificing a datacenter (Site) is mandatory in Netbox")
sys.exit(1)
nb_dc = nb.dcim.sites.get(
slug=dc,
)
if nb_dc is None:
logging.error("Site (slug: {}) has not been found".format(dc))
sys.exit(1)
return nb_dc
def update_netbox_location(self, server):
dc = self.get_datacenter()
nb_rack = self.get_netbox_rack()
nb_dc = self.get_netbox_datacenter()
update = False
if dc and server.site and server.site.slug != nb_dc.slug:
logging.info('Datacenter location has changed from {} to {}, updating'.format(
server.site.slug,
nb_dc.slug,
))
update = True
server.site = nb_dc.id
if (
server.rack
and nb_rack
and server.rack.id != nb_rack.id
):
logging.info('Rack location has changed from {} to {}, updating'.format(
server.rack,
nb_rack,
))
update = True
server.rack = nb_rack
if nb_rack is None:
server.face = None
server.position = None
return update, server
def update_netbox_expansion_location(self, server, expansion):
update = False
if expansion.tenant != server.tenant:
expansion.tenant = server.tenant
update = True
if expansion.site != server.site:
expansion.site = server.site
update = True
if expansion.rack != server.rack:
expansion.rack = server.rack
update = True
return update
def get_rack(self):
rack = Rack()
return rack.get()
def get_netbox_rack(self):
rack = self.get_rack()
datacenter = self.get_netbox_datacenter()
if not rack:
return None
if rack and not datacenter:
logging.error("Can't get rack if no datacenter is configured or found")
sys.exit(1)
return nb.dcim.racks.get(
name=rack,
site_id=datacenter.id,
)
def get_product_name(self):
"""
Return the Chassis Name from dmidecode info
"""
return self.system[0]['Product Name'].strip()
def get_service_tag(self):
"""
Return the Service Tag from dmidecode info
"""
return self.system[0]['Serial Number'].strip()
def get_expansion_service_tag(self):
"""
Return the virtual Service Tag from dmidecode info host
with 'expansion'
"""
return self.system[0]['Serial Number'].strip() + " expansion"
def get_hostname(self):
if config.hostname_cmd is None:
return '{}'.format(socket.gethostname())
return subprocess.getoutput(config.hostname_cmd)
def is_blade(self):
raise NotImplementedError
def get_blade_slot(self):
raise NotImplementedError
def get_chassis(self):
raise NotImplementedError
def get_chassis_name(self):
raise NotImplementedError
def get_chassis_service_tag(self):
raise NotImplementedError
def get_bios_version(self):
raise NotImplementedError
def get_bios_version_attr(self):
raise NotImplementedError
def get_bios_release_date(self):
raise NotImplementedError
def get_power_consumption(self):
raise NotImplementedError
def get_expansion_product(self):
raise NotImplementedError
def _netbox_create_chassis(self, datacenter, tenant, rack):
device_type = get_device_type(self.get_chassis())
device_role = get_device_role(config.device.chassis_role)
serial = self.get_chassis_service_tag()
logging.info('Creating chassis blade (serial: {serial})'.format(
serial=serial))
new_chassis = nb.dcim.devices.create(
name=self.get_chassis_name(),
device_type=device_type.id,
serial=serial,
device_role=device_role.id,
site=datacenter.id if datacenter else None,
tenant=tenant.id if tenant else None,
rack=rack.id if rack else None,
tags=[{'name': x} for x in self.tags],
custom_fields=self.custom_fields,
)
return new_chassis
def _netbox_create_blade(self, chassis, datacenter, tenant, rack):
device_role = get_device_role(config.device.blade_role)
device_type = get_device_type(self.get_product_name())
serial = self.get_service_tag()
hostname = self.get_hostname()
logging.info(
'Creating blade (serial: {serial}) {hostname} on chassis {chassis_serial}'.format(
serial=serial, hostname=hostname, chassis_serial=chassis.serial
))
new_blade = nb.dcim.devices.create(
name=hostname,
serial=serial,
device_role=device_role.id,
device_type=device_type.id,
parent_device=chassis.id,
site=datacenter.id if datacenter else None,
tenant=tenant.id if tenant else None,
rack=rack.id if rack else None,
tags=[{'name': x} for x in self.tags],
custom_fields=self.custom_fields,
)
return new_blade
def _netbox_create_blade_expansion(self, chassis, datacenter, tenant, rack):
device_role = get_device_role(config.device.blade_role)
device_type = get_device_type(self.get_expansion_product())
serial = self.get_expansion_service_tag()
hostname = self.get_hostname() + " expansion"
logging.info(
'Creating expansion (serial: {serial}) {hostname} on chassis {chassis_serial}'.format(
serial=serial, hostname=hostname, chassis_serial=chassis.serial
))
new_blade = nb.dcim.devices.create(
name=hostname,
serial=serial,
device_role=device_role.id,
device_type=device_type.id,
parent_device=chassis.id,
site=datacenter.id if datacenter else None,
tenant=tenant.id if tenant else None,
rack=rack.id if rack else None,
tags=[{'name': x} for x in self.tags],
)
return new_blade
def _netbox_deduplicate_server(self):
serial = self.get_service_tag()
hostname = self.get_hostname()
server = nb.dcim.devices.get(name=hostname)
if server and server.serial != serial:
server.delete()
def _netbox_create_server(self, datacenter, tenant, rack):
device_role = get_device_role(config.device.server_role)
device_type = get_device_type(self.get_product_name())
if not device_type:
raise Exception('Chassis "{}" doesn\'t exist'.format(self.get_chassis()))
serial = self.get_service_tag()
hostname = self.get_hostname()
logging.info('Creating server (serial: {serial}) {hostname}'.format(
serial=serial, hostname=hostname))
new_server = nb.dcim.devices.create(
name=hostname,
serial=serial,
device_role=device_role.id,
device_type=device_type.id,
platform=self.device_platform,
site=datacenter.id if datacenter else None,
tenant=tenant.id if tenant else None,
rack=rack.id if rack else None,
tags=[{'name': x} for x in self.tags],
)
return new_server
def get_netbox_server(self, expansion=False):
if expansion is False:
return nb.dcim.devices.get(serial=self.get_service_tag())
else:
return nb.dcim.devices.get(serial=self.get_expansion_service_tag())
def _netbox_set_or_update_blade_slot(self, server, chassis, datacenter):
# before everything check if right chassis
actual_device_bay = server.parent_device.device_bay \
if server.parent_device else None
actual_chassis = actual_device_bay.device \
if actual_device_bay else None
slot = self.get_blade_slot()
if actual_chassis and \
actual_chassis.serial == chassis.serial and \
actual_device_bay.name == slot:
return
real_device_bays = nb.dcim.device_bays.filter(
device_id=chassis.id,
name=slot,
)
real_device_bays = nb.dcim.device_bays.filter(
device_id=chassis.id,
name=slot,
)
if real_device_bays:
logging.info(
'Setting device ({serial}) new slot on {slot} '
'(Chassis {chassis_serial})..'.format(
serial=server.serial, slot=slot, chassis_serial=chassis.serial
))
# reset actual device bay if set
if actual_device_bay:
# Forces the evaluation of the installed_device attribute to
# workaround a bug probably due to lazy loading optimization
# that prevents the value change detection
actual_device_bay.installed_device
actual_device_bay.installed_device = None
actual_device_bay.save()
# setup new device bay
real_device_bay = next(real_device_bays)
real_device_bay.installed_device = server
real_device_bay.save()
else:
logging.error('Could not find slot {slot} for chassis'.format(
slot=slot
))
def _netbox_set_or_update_blade_expansion_slot(self, expansion, chassis, datacenter):
# before everything check if right chassis
actual_device_bay = expansion.parent_device.device_bay if expansion.parent_device else None
actual_chassis = actual_device_bay.device if actual_device_bay else None
slot = self.get_blade_expansion_slot()
if actual_chassis and \
actual_chassis.serial == chassis.serial and \
actual_device_bay.name == slot:
return
real_device_bays = nb.dcim.device_bays.filter(
device_id=chassis.id,
name=slot,
)
if not real_device_bays:
logging.error('Could not find slot {slot} expansion for chassis'.format(
slot=slot
))
return
logging.info(
'Setting device expansion ({serial}) new slot on {slot} '
'(Chassis {chassis_serial})..'.format(
serial=expansion.serial, slot=slot, chassis_serial=chassis.serial
))
# reset actual device bay if set
if actual_device_bay:
# Forces the evaluation of the installed_device attribute to
# workaround a bug probably due to lazy loading optimization
# that prevents the value change detection
actual_device_bay.installed_device
actual_device_bay.installed_device = None
actual_device_bay.save()
# setup new device bay
real_device_bay = next(real_device_bays)
real_device_bay.installed_device = expansion
real_device_bay.save()
def netbox_create_or_update(self, config):
"""
Netbox method to create or update info about our server/blade
Handle:
* new chassis for a blade
* new slot for a blade
* hostname update
* Network infos
* Inventory management
* PSU management
"""
datacenter = self.get_netbox_datacenter()
rack = self.get_netbox_rack()
tenant = self.get_netbox_tenant()
if config.purge_old_devices:
self._netbox_deduplicate_server()
if self.is_blade():
chassis = nb.dcim.devices.get(
serial=self.get_chassis_service_tag()
)
# Chassis does not exist
if not chassis:
chassis = self._netbox_create_chassis(datacenter, tenant, rack)
server = nb.dcim.devices.get(serial=self.get_service_tag())
if not server:
server = self._netbox_create_blade(chassis, datacenter, tenant, rack)
# Set slot for blade
self._netbox_set_or_update_blade_slot(server, chassis, datacenter)
else:
server = nb.dcim.devices.get(serial=self.get_service_tag())
if not server:
server = self._netbox_create_server(datacenter, tenant, rack)
logging.debug('Updating Server...')
# check network cards
if config.register or config.update_all or config.update_network:
self.network = ServerNetwork(server=self)
self.network.create_or_update_netbox_network_cards()
update_inventory = config.inventory and (config.register or
config.update_all or config.update_inventory)
# update inventory if feature is enabled
self.inventory = Inventory(server=self)
if update_inventory:
self.inventory.create_or_update()
# update psu
if config.register or config.update_all or config.update_psu:
self.power = PowerSupply(server=self)
self.power.create_or_update_power_supply()
self.power.report_power_consumption()
expansion = nb.dcim.devices.get(serial=self.get_expansion_service_tag())
if self.own_expansion_slot() and config.expansion_as_device:
logging.debug('Update Server expansion...')
if not expansion:
expansion = self._netbox_create_blade_expansion(chassis, datacenter, tenant, rack)
# set slot for blade expansion
self._netbox_set_or_update_blade_expansion_slot(expansion, chassis, datacenter)
if update_inventory:
# Updates expansion inventory
inventory = Inventory(server=self, update_expansion=True)
inventory.create_or_update()
elif self.own_expansion_slot() and expansion:
expansion.delete()
expansion = None
update = 0
# for every other specs
# check hostname
if server.name != self.get_hostname():
server.name = self.get_hostname()
update += 1
server_tags = sorted(set([x.name for x in server.tags]))
tags = sorted(set(self.tags))
if server_tags != tags:
new_tags_ids = [x.id for x in self.nb_tags]
if not config.preserve_tags:
server.tags = new_tags_ids
else:
server_tags_ids = [x.id for x in server.tags]
server.tags = sorted(set(new_tags_ids + server_tags_ids))
update += 1
if server.custom_fields != self.custom_fields:
server.custom_fields = self.custom_fields
update += 1
if config.update_all or config.update_location:
ret, server = self.update_netbox_location(server)
update += ret
if server.platform != self.device_platform:
server.platform = self.device_platform
update += 1
if update:
server.save()
if expansion:
update = 0
expansion_name = server.name + ' expansion'
if expansion.name != expansion_name:
expansion.name = expansion_name
update += 1
if self.update_netbox_expansion_location(server, expansion):
update += 1
if update:
expansion.save()
logging.debug('Finished updating Server!')
def print_debug(self):
self.network = ServerNetwork(server=self)
print('Datacenter:', self.get_datacenter())
print('Netbox Datacenter:', self.get_netbox_datacenter())
print('Rack:', self.get_rack())
print('Netbox Rack:', self.get_netbox_rack())
print('Is blade:', self.is_blade())
print('Got expansion:', self.own_expansion_slot())
print('Product Name:', self.get_product_name())
print('Platform:', self.device_platform)
print('Chassis:', self.get_chassis())
print('Chassis service tag:', self.get_chassis_service_tag())
print('Service tag:', self.get_service_tag())
print('NIC:',)
pprint(self.network.get_network_cards())
pass
def own_expansion_slot(self):
"""
Indicates if the device hosts an expansion card
"""
return False
def own_gpu_expansion_slot(self):
"""
Indicates if the device hosts a GPU expansion card
"""
return False
def own_drive_expansion_slot(self):
"""
Indicates if the device hosts a drive expansion bay
"""
return False
| 36.43762 | 103 | 0.608038 |
f700bd3e668d5f4fe3f075fecf18bb44137fc1c9 | 11,470 | py | Python | tools/azure-sdk-tools/packaging_tools/swaggertosdk/SwaggerToSdkCore.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | 1 | 2022-02-01T18:50:12.000Z | 2022-02-01T18:50:12.000Z | tools/azure-sdk-tools/packaging_tools/swaggertosdk/SwaggerToSdkCore.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | tools/azure-sdk-tools/packaging_tools/swaggertosdk/SwaggerToSdkCore.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | """SwaggerToSdk core tools.
"""
from enum import Enum, unique
import json
import logging
import os
import re
import tempfile
from pathlib import Path
import requests
from github import Github, UnknownObjectException
from .autorest_tools import (
autorest_latest_version_finder,
autorest_bootstrap_version_finder,
autorest_swagger_to_sdk_conf,
)
from azure_devtools.ci_tools.github_tools import get_files, GithubLink
_LOGGER = logging.getLogger(__name__)
CONFIG_FILE = "swagger_to_sdk_config_autorest.json"
CONFIG_FILE_DPG = "swagger_to_sdk_config_dpg.json"
DEFAULT_COMMIT_MESSAGE = "Generated from {hexsha}"
def build_file_content():
autorest_version = autorest_latest_version_finder()
autorest_bootstrap_version = autorest_bootstrap_version_finder()
return {
"autorest": autorest_version,
"autorest_bootstrap": autorest_bootstrap_version,
}
def get_repo_tag_meta(meta_conf):
repotag = meta_conf.get("repotag")
if repotag:
return repotag
# Guess for now, "repotag" should be added everywhere
if "go" in meta_conf["autorest_options"]:
return "azure-sdk-for-go"
if "ruby" in meta_conf["autorest_options"]:
return "azure-sdk-for-ruby"
if "java" in meta_conf["autorest_options"]:
return "azure-sdk-for-java"
if "nodejs" in meta_conf["autorest_options"]:
return "azure-sdk-for-node"
if "typescript" in meta_conf["autorest_options"]:
return "azure-sdk-for-js"
raise ValueError("No repotag found or infered")
@unique
class Language(str, Enum):
GOLANG = "go"
RUBY = "ruby"
JAVA = "java"
NODEJS = "nodejs"
CSHARP = "csharp"
PYTHON = "python"
TYPESCRIPT = "typescript"
def get_language_from_conf(meta_conf):
"""Detect the language based on the default Autorest options.
Assuming all language use --mylanguage in the config file.
If I don't find anything, well just say I don't know...
This is based on autorest language flags.
:rtype: Language
"""
autorest_options_lang = set(meta_conf["autorest_options"].keys())
languages = set()
for value in Language:
if value in autorest_options_lang:
languages.add(value)
if not languages:
_LOGGER.warning("No detected language from this conf")
return None # I don't what this conf is about?
language = languages.pop()
if languages:
_LOGGER.warning("This SwaggerToSdk conf seems to generate too much language in one call, assume we don't know")
return None
return language
def get_context_tag_from_git_object(git_object):
files_list = [file.filename for file in get_files(git_object)]
return get_context_tag_from_file_list(files_list)
def get_context_tag_from_file_list(files_list):
context_tags = set()
for filename in files_list:
filepath = Path(filename)
filename = filepath.as_posix()
if "/examples/" in filename:
# Do not compute context for example that are not used in SDK
continue
# Match if RP name
match = re.match(r"specification/(.*)/Microsoft.\w*/(stable|preview)/", filename, re.I)
if match:
context_tags.add(match.groups()[0])
continue
# Match if stable/preview but not RP like ARM (i.e. Cognitive Services)
match = re.match(r"specification/(.*)/(stable|preview)/", filename, re.I)
if match:
context_tags.add(match.groups()[0])
continue
# Match Readme
# Do it last step, because if some weird Readme for ServiceFabric...
match = re.match(r"specification/(.*)/readme.\w*.?md", filename, re.I)
if match:
context_tags.add(match.groups()[0])
continue
# No context-tags
return context_tags
def this_conf_will_generate_for_this_pr(git_object, config):
"""Try to guess if this PR has a chance to generate something for this conf.
Right now, just match the language in the conf with the presence
of ONLY "readme.language.md" files.
"""
lang = get_language_from_conf(config)
filenames = [file.filename.lower() for file in get_files(git_object)]
readme_lang = [name for name in filenames if re.match(r"(.*)readme.\w+.md", name)]
if len(readme_lang) != len(filenames):
return True # This means there is files that are not language specific readme
return bool([name for name in readme_lang if name.endswith("readme.{}.md".format(lang))])
def get_readme_files_from_git_object(git_object, base_dir=Path(".")):
files_list = [file.filename for file in get_files(git_object)]
return get_readme_files_from_file_list(files_list, base_dir)
def get_readme_files_from_file_list(files_list, base_dir=Path(".")):
"""Get readme files from this PR.
Algo is to look for context, and then search for Readme inside this context.
"""
readme_files = set()
context_tags = get_context_tag_from_file_list(files_list)
for context_tag in context_tags:
expected_folder = Path(base_dir) / Path("specification/{}".format(context_tag))
if not expected_folder.is_dir():
_LOGGER.warning("From context {} I didn't find folder {}".format(context_tag, expected_folder))
continue
for expected_readme in [l for l in expected_folder.iterdir() if l.is_file()]:
# Need to do a case-insensitive test.
match = re.match(r"readme.\w*.?md", expected_readme.name, re.I)
if match:
readme_files.add(expected_readme.relative_to(Path(base_dir)))
return readme_files
def read_config(sdk_git_folder, config_file):
"""Read the configuration file and return JSON"""
config_path = os.path.join(sdk_git_folder, config_file)
with open(config_path, "r") as config_fd:
return json.loads(config_fd.read())
def read_config_from_github(sdk_id, branch="main", gh_token=None):
raw_link = str(get_configuration_github_path(sdk_id, branch))
_LOGGER.debug("Will try to download: %s", raw_link)
_LOGGER.debug("Token is defined: %s", gh_token is not None)
headers = {"Authorization": "token {}".format(gh_token)} if gh_token else {}
response = requests.get(raw_link, headers=headers)
if response.status_code != 200:
raise ValueError(
"Unable to download conf file for SDK {} branch {}: status code {}".format(
sdk_id, branch, response.status_code
)
)
return json.loads(response.text)
def extract_conf_from_readmes(swagger_files_in_pr, restapi_git_folder, sdk_git_id, config, force_generation=False):
readme_files_in_pr = {
readme for readme in swagger_files_in_pr if getattr(readme, "name", readme).lower().endswith("readme.md")
}
for readme_file in readme_files_in_pr:
build_swaggertosdk_conf_from_json_readme(
readme_file, sdk_git_id, config, base_folder=restapi_git_folder, force_generation=force_generation
)
def get_readme_path(readme_file, base_folder="."):
"""Get a readable Readme path.
If start with http, assume online, ignore base_folder and convert to raw link if necessary.
If base_folder is not None, assume relative to base_folder.
"""
if not isinstance(readme_file, Path) and readme_file.startswith("http"):
return GithubLink.from_string(readme_file).as_raw_link()
else:
if base_folder is None:
base_folder = "."
return str(Path(base_folder) / Path(readme_file))
def build_swaggertosdk_conf_from_json_readme(readme_file, sdk_git_id, config, base_folder=".", force_generation=False):
"""Get the JSON conf of this README, and create SwaggerToSdk conf.
Readme path can be any readme syntax accepted by autorest.
readme_file will be project key as-is.
:param str readme_file: A path that Autorest accepts. Raw GH link or absolute path.
:param str sdk_dit_id: Repo ID. IF org/login is provided, will be stripped.
:param dict config: Config where to update the "projects" key.
:param bool force_generation: If no Swagger to SDK section is found, force once with the Readme as input
"""
readme_full_path = get_readme_path(readme_file, base_folder)
with tempfile.TemporaryDirectory() as temp_dir:
readme_as_conf = autorest_swagger_to_sdk_conf(readme_full_path, temp_dir, config)
generated_config = {
"markdown": readme_full_path,
}
sdk_git_short_id = sdk_git_id.split("/")[-1].lower()
_LOGGER.info("Looking for tag {} in readme {}".format(sdk_git_short_id, readme_file))
for swagger_to_sdk_conf in readme_as_conf:
if not isinstance(swagger_to_sdk_conf, dict):
continue
repo = swagger_to_sdk_conf.get("repo", "")
repo = repo.split("/")[-1].lower() # Be sure there is no org/login part
if repo == sdk_git_short_id:
_LOGGER.info("This Readme contains a swagger-to-sdk section for repo {}".format(repo))
generated_config.update(
{
"autorest_options": swagger_to_sdk_conf.get("autorest_options", {}),
"after_scripts": swagger_to_sdk_conf.get("after_scripts", []),
}
)
config.setdefault("projects", {})[str(readme_file)] = generated_config
return generated_config
else:
_LOGGER.info("Skip mismatch {} from {}".format(repo, sdk_git_short_id))
if not force_generation:
_LOGGER.info(
"Didn't find tag {} in readme {}. Did you forget to update the SwaggerToSdk section?".format(
sdk_git_short_id, readme_file
)
)
else:
_LOGGER.info("Didn't find tag {} in readme {}. Forcing it.".format(sdk_git_short_id, readme_file))
config.setdefault("projects", {})[str(readme_file)] = generated_config
def get_input_paths(global_conf, local_conf):
"""Returns a 2-tuple:
- Markdown Path or None
- Input-file Paths or empty list
"""
del global_conf # Unused
relative_markdown_path = None # Markdown is optional
input_files = [] # Input file could be empty
if "markdown" in local_conf:
relative_markdown_path = Path(local_conf["markdown"])
input_files = local_conf.get("autorest_options", {}).get("input-file", [])
if input_files and not isinstance(input_files, list):
input_files = [input_files]
input_files = [Path(input_file) for input_file in input_files]
if not relative_markdown_path and not input_files:
raise ValueError("No input file found")
return (relative_markdown_path, input_files)
def solve_relative_path(autorest_options, sdk_root):
"""Solve relative path in conf.
If a key is prefixed by "sdkrel:", it's solved against SDK root.
"""
SDKRELKEY = "sdkrel:"
solved_autorest_options = {}
for key, value in autorest_options.items():
if key.startswith(SDKRELKEY):
_LOGGER.debug("Found a sdkrel pair: %s/%s", key, value)
subkey = key[len(SDKRELKEY) :]
solved_value = Path(sdk_root, value).resolve()
solved_autorest_options[subkey] = str(solved_value)
else:
solved_autorest_options[key] = value
return solved_autorest_options
def get_configuration_github_path(sdk_id, branch="master"):
return GithubLink(sdk_id, "raw", branch, CONFIG_FILE)
| 38.233333 | 119 | 0.682476 |
f700c767ff92c13aef1a23a878df02eea4e86053 | 3,656 | py | Python | src/Application/PythonScriptModule/pymodules_old/circuits/core/values.py | antont/tundra | 5c9b0a3957071f08ab425dff701cdbb34f9e1868 | [
"Apache-2.0"
] | 1 | 2018-04-02T15:38:10.000Z | 2018-04-02T15:38:10.000Z | src/Application/PythonScriptModule/pymodules_old/circuits/core/values.py | antont/tundra | 5c9b0a3957071f08ab425dff701cdbb34f9e1868 | [
"Apache-2.0"
] | null | null | null | src/Application/PythonScriptModule/pymodules_old/circuits/core/values.py | antont/tundra | 5c9b0a3957071f08ab425dff701cdbb34f9e1868 | [
"Apache-2.0"
] | 1 | 2021-09-04T12:37:34.000Z | 2021-09-04T12:37:34.000Z | # Package: values
# Date: 11th April 2010
# Author: James Mills, prologic at shortcircuit dot net dot au
"""Values
This defines the Value object used by components and events.
"""
from types import ListType
from itertools import imap
from events import Event
class ValueChanged(Event):
"""Value Changed Event
This Event is triggered when the return Value of an Event Handler has
changed it's value.
"""
def __init__(self, value):
"x.__init__(...) initializes x; see x.__class__.__doc__ for signature"
super(ValueChanged, self).__init__(value)
class Value(object):
"""Create a new future Value Object
Creates a new future Value Object which is used by Event Objects and the
Manager to store the result(s) of an Event Handler's exeuction of some
Event in the system.
:param event: The Event this Value is associated with.
:type event: Event instance
:param manager: The Manager/Component used to trigger notifications.
:type manager: A Manager/Component instance.
:param onSet: The channel used when triggering ValueChagned events.
:type onSet: A (channel, target) tuple.
:ivar result: True if this value has been changed.
:ivar errors: True if while setting this value an exception occured.
This is a Future/Promise implementation.
"""
def __init__(self, event=None, manager=None, onSet=None):
"x.__init__(...) initializes x; see x.__class__.__doc__ for signature"
self.event = event
self.manager = manager
self.onSet = onSet
self.result = False
self.errors = False
self._parent = self
self._value = None
def __getstate__(self):
keys = ("event", "onSet", "result", "errors", "_value")
return dict([(k, getattr(self, k, None)) for k in keys])
def __contains__(self, y):
value = self.value
return y in value if type(value) is ListType else y == value
def __getitem__(self, y):
v = self.value[y]
if isinstance(v, Value):
return v.value
else:
return v
def __iter__(self):
return imap(lambda v: v.value if isinstance(v, Value) else v,
self.value)
def __repr__(self):
"x.__repr__() <==> repr(x)"
value = ""
if self.result:
value = repr(self.value)
format = "<Value (%s) result: %r errors: %r for %r"
return format % (value, self.result, self.errors, self.event)
def __str__(self):
"x.__str__() <==> str(x)"
return str(self.value)
def getValue(self):
value = self._value
while isinstance(value, Value):
value = value._value
return value
def setValue(self, value):
if isinstance(value, Value):
value._parent = self
if self.result and type(self._value) is ListType:
self._value.append(value)
elif self.result:
self._value = [self._value]
self._value.append(value)
else:
self._value = value
def notify(o, v):
if not isinstance(v, Value) and v is not None:
o.result = True
if o.manager is not None and o.onSet is not None:
o.manager.fireEvent(ValueChanged(o), *o.onSet)
elif isinstance(v, Value):
o.errors = v.errors
o.result = v.result
if not o._parent == o:
notify(o._parent, v)
notify(self, value)
value = property(getValue, setValue, None, "Value of this Value")
| 28.341085 | 78 | 0.602298 |
f700e68836d56c80b1eb23849bcf903eda4dfa6c | 5,105 | py | Python | nova/virt/hyperv/imagecache.py | ebalduf/nova-backports | 6bf97ec73467de522d34ab7a17ca0e0874baa7f9 | [
"Apache-2.0"
] | null | null | null | nova/virt/hyperv/imagecache.py | ebalduf/nova-backports | 6bf97ec73467de522d34ab7a17ca0e0874baa7f9 | [
"Apache-2.0"
] | null | null | null | nova/virt/hyperv/imagecache.py | ebalduf/nova-backports | 6bf97ec73467de522d34ab7a17ca0e0874baa7f9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2013 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Image caching and management.
"""
import os
from os_win import utilsfactory
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import units
import nova.conf
from nova import exception
from nova import utils
from nova.virt.hyperv import pathutils
from nova.virt import images
LOG = logging.getLogger(__name__)
CONF = nova.conf.CONF
class ImageCache(object):
def __init__(self):
self._pathutils = pathutils.PathUtils()
self._vhdutils = utilsfactory.get_vhdutils()
def _get_root_vhd_size_gb(self, instance):
if instance.old_flavor:
return instance.old_flavor.root_gb
else:
return instance.root_gb
def _resize_and_cache_vhd(self, instance, vhd_path):
vhd_size = self._vhdutils.get_vhd_size(vhd_path)['VirtualSize']
root_vhd_size_gb = self._get_root_vhd_size_gb(instance)
root_vhd_size = root_vhd_size_gb * units.Gi
root_vhd_internal_size = (
self._vhdutils.get_internal_vhd_size_by_file_size(
vhd_path, root_vhd_size))
if root_vhd_internal_size < vhd_size:
raise exception.FlavorDiskSmallerThanImage(
flavor_size=root_vhd_size, image_size=vhd_size)
if root_vhd_internal_size > vhd_size:
path_parts = os.path.splitext(vhd_path)
resized_vhd_path = '%s_%s%s' % (path_parts[0],
root_vhd_size_gb,
path_parts[1])
@utils.synchronized(resized_vhd_path)
def copy_and_resize_vhd():
if not self._pathutils.exists(resized_vhd_path):
try:
LOG.debug("Copying VHD %(vhd_path)s to "
"%(resized_vhd_path)s",
{'vhd_path': vhd_path,
'resized_vhd_path': resized_vhd_path})
self._pathutils.copyfile(vhd_path, resized_vhd_path)
LOG.debug("Resizing VHD %(resized_vhd_path)s to new "
"size %(root_vhd_size)s",
{'resized_vhd_path': resized_vhd_path,
'root_vhd_size': root_vhd_size})
self._vhdutils.resize_vhd(resized_vhd_path,
root_vhd_internal_size,
is_file_max_size=False)
except Exception:
with excutils.save_and_reraise_exception():
if self._pathutils.exists(resized_vhd_path):
self._pathutils.remove(resized_vhd_path)
copy_and_resize_vhd()
return resized_vhd_path
def get_cached_image(self, context, instance):
image_id = instance.image_ref
base_vhd_dir = self._pathutils.get_base_vhd_dir()
base_vhd_path = os.path.join(base_vhd_dir, image_id)
@utils.synchronized(base_vhd_path)
def fetch_image_if_not_existing():
vhd_path = None
for format_ext in ['vhd', 'vhdx']:
test_path = base_vhd_path + '.' + format_ext
if self._pathutils.exists(test_path):
vhd_path = test_path
break
if not vhd_path:
try:
images.fetch(context, image_id, base_vhd_path)
format_ext = self._vhdutils.get_vhd_format(base_vhd_path)
vhd_path = base_vhd_path + '.' + format_ext.lower()
self._pathutils.rename(base_vhd_path, vhd_path)
except Exception:
with excutils.save_and_reraise_exception():
if self._pathutils.exists(base_vhd_path):
self._pathutils.remove(base_vhd_path)
return vhd_path
vhd_path = fetch_image_if_not_existing()
if CONF.use_cow_images and vhd_path.split('.')[-1].lower() == 'vhd':
# Resize the base VHD image as it's not possible to resize a
# differencing VHD. This does not apply to VHDX images.
resized_vhd_path = self._resize_and_cache_vhd(instance, vhd_path)
if resized_vhd_path:
return resized_vhd_path
return vhd_path
| 39.573643 | 78 | 0.591773 |
f700f20444454593e2536cb9e2591f4eae5a213c | 7,178 | py | Python | src/config.py | volovodenko/English | 860ae0f971909b9aa299c193ea7d0161c88d0b22 | [
"Apache-2.0"
] | null | null | null | src/config.py | volovodenko/English | 860ae0f971909b9aa299c193ea7d0161c88d0b22 | [
"Apache-2.0"
] | null | null | null | src/config.py | volovodenko/English | 860ae0f971909b9aa299c193ea7d0161c88d0b22 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import os
import re
import json
import os.path
import unittest
reg_cmnt = re.compile(r"/\*.*?\*/", re.DOTALL)
class Config:
"Работа с конфигурационным файлом"
def __init__(self, main_path=None, user_path=None):
if main_path is None:
self._main_path = "config.json5"
else:
self._main_path = main_path
if user_path is None:
self._user_path = "config_user.json5"
else:
self._user_path = user_path
self._cfg_dict = {}
def __getitem__(self, key):
return self._cfg_dict[key]
def __len__(self):
return len(self._cfg_dict)
def _load_json(self, path):
data = {}
if os.path.exists(path):
txt = open(path).read()
txt = reg_cmnt.sub("", txt) # remove comments
data = json.loads(txt)
return data
def _set_default(self, cfg):
cfg["path_to_dict"] = cfg.get("path_to_dict", "dict.json")
cfg["path_to_stat"] = cfg.get("path_to_stat", "statistic.json")
cfg["words_per_lesson"] = int(cfg.get("words_per_lesson", 5))
cfg["CntStudyWords"] = int(cfg.get("CntStudyWords", 50))
cfg["MinPercent"] = float(cfg.get("MinPercent", 97.0))
cfg["MinSuccessCnt"] = int(cfg.get("MinSuccessCnt", 10))
cfg["retry_time"] = int(cfg.get("retry_time", 1800))
cfg["hide_transcription"] = cfg.get("hide_transcription", "no")
cfg["start_time_delay"] = int(cfg.get("start_time_delay", 1))
cfg["stat_count_row"] = int(cfg.get("stat_count_row", 200))
cfg["right_answer_percent"] = float(cfg.get("right_answer_percent", 10.0))
cfg["wrong_answer_percent"] = float(cfg.get("wrong_answer_percent", 40.0))
cfg["empty_answer_is_error"] = cfg.get("empty_answer_is_error", "no")
cfg["internet_dictionary_url"] = cfg.get("internet_dictionary_url",
{"EN_RU": "http://slovari.yandex.ru/{word}/en-ru/#lingvo/",
"RU_EN": "http://slovari.yandex.ru/{word}/en/#lingvo/"})
def create_default_user_config(self):
if not os.path.isfile(self._user_path):
txt = "{\n /*\n User config\n */\n\n}"
open(self._user_path, "wt").write(txt)
def reload(self):
self._cfg_dict = {}
self._cfg_dict.update(self._load_json(self._main_path))
self._cfg_dict.update(self._load_json(self._user_path))
self._set_default(self._cfg_dict)
return self._cfg_dict
def get_dict(self):
return self._cfg_dict
class ConfigTestCase(unittest.TestCase):
"Набор тестов для класса Config"
def setUp(self):
if os.path.isfile("test_config_user.json"):
os.remove("test_config_user.json")
def tearDown(self):
if os.path.isfile("test_config_user.json"):
os.remove("test_config_user.json")
def equal_cfg(self, cfg, test_dict):
for key, val in test_dict.items():
self.assertEqual(cfg[key], val)
self.assertEqual(len(cfg), 14)
def test_main(self):
"Тестирование загрузки основного файла с конфигурацией"
test_dict = {
"path_to_dict": "dict.json",
"path_to_stat": "statistic.json",
"words_per_lesson": 5,
"CntStudyWords": 50,
"MinPercent": 97.0,
"MinSuccessCnt": 10,
"retry_time": 1800,
"hide_transcription": "no",
"start_time_delay": 1,
"stat_count_row": 200,
"right_answer_percent": 10.0,
"wrong_answer_percent": 40.0,
"empty_answer_is_error": "no",
"internet_dictionary_url": {"EN_RU": "http://slovari.yandex.ru/{word}/en-ru/#lingvo/",
"RU_EN": "http://slovari.yandex.ru/{word}/en/#lingvo/"}}
cfg = Config("config.json5", "fake_config_user.json")
cfg.reload()
self.equal_cfg(cfg, test_dict)
def test_user(self):
"Тестирование загрузки пользовательского файла с конфигурацией"
test_dict = {
"path_to_dict": "dict1.json",
"path_to_stat": "statistic1.json",
"words_per_lesson": 6,
"CntStudyWords": 60,
"MinPercent": 98.0,
"MinSuccessCnt": 11,
"retry_time": 1801,
"hide_transcription": "yes",
"start_time_delay": 2,
"stat_count_row": 300,
"right_answer_percent": 20.0,
"wrong_answer_percent": 50.0,
"empty_answer_is_error": "yes",
"internet_dictionary_url": {"EN_RU": "http1://slovari.yandex.ru/{word}/en-ru/#lingvo/",
"RU_EN": "http1://slovari.yandex.ru/{word}/en/#lingvo/"}}
json.dump(test_dict, open("test_config_user.json", "w"))
cfg = Config("config.json5", "test_config_user.json")
cfg.reload()
self.equal_cfg(cfg, test_dict)
def test_user_part(self):
"Тестирование загрузки пользовательского файла с конфигурацией, который перекрывает только часть настроек"
test_dict = {
"path_to_dict": "dict1.json",
"path_to_stat": "statistic1.json",
"words_per_lesson": 6,
"CntStudyWords": 60,
"MinPercent": 98.0,
"MinSuccessCnt": 11}
json.dump(test_dict, open("test_config_user.json", "w"))
test_dict.update({
"retry_time": 1800,
"hide_transcription": "no",
"start_time_delay": 1,
"stat_count_row": 200,
"right_answer_percent": 10.0,
"wrong_answer_percent": 40.0,
"empty_answer_is_error": "no"})
cfg = Config("config.json5", "test_config_user.json")
cfg.reload()
self.equal_cfg(cfg, test_dict)
def test_not_exists(self):
"Тестирование выставления дефолтных настроек"
test_dict = {
"path_to_dict": "dict.json",
"path_to_stat": "statistic.json",
"words_per_lesson": 5,
"CntStudyWords": 50,
"MinPercent": 97.0,
"MinSuccessCnt": 10,
"retry_time": 1800,
"hide_transcription": "no",
"start_time_delay": 1,
"stat_count_row": 200,
"right_answer_percent": 10.0,
"wrong_answer_percent": 40.0,
"empty_answer_is_error": "no",
"internet_dictionary_url": {"EN_RU": "http://slovari.yandex.ru/{word}/en-ru/#lingvo/",
"RU_EN": "http://slovari.yandex.ru/{word}/en/#lingvo/"}}
cfg = Config("config.json5", "fake_config_user.json")
cfg.reload()
self.equal_cfg(cfg, test_dict)
cfg = Config("fake_config.json", "fake_config_user.json")
cfg.reload()
if __name__ == "__main__":
os.chdir(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
suite = unittest.TestLoader().loadTestsFromTestCase(ConfigTestCase)
unittest.TextTestRunner(verbosity=2).run(suite)
| 36.622449 | 114 | 0.57314 |
f70101d2e677dfa1c95b8d12717565b56481d031 | 11,171 | py | Python | server/server/organizations/models.py | connectiveproject/connective | 8866082b2147feef0e5254ac4215987b9d881396 | [
"MIT"
] | 4 | 2021-07-05T10:49:26.000Z | 2021-11-24T11:34:43.000Z | server/server/organizations/models.py | connectiveproject/connective | 8866082b2147feef0e5254ac4215987b9d881396 | [
"MIT"
] | 39 | 2021-06-21T15:02:37.000Z | 2022-02-28T15:07:42.000Z | server/server/organizations/models.py | connectiveproject/connective | 8866082b2147feef0e5254ac4215987b9d881396 | [
"MIT"
] | 17 | 2021-06-16T08:59:45.000Z | 2021-09-29T11:35:38.000Z | from django.core.validators import RegexValidator
from django.db import models
from django.utils.translation import gettext_lazy as _
from taggit.managers import TaggableManager
from server.connective_tags.models import ConnectiveTaggedItem
from server.schools.models import School
from server.utils.db_utils import get_base_model
from server.utils.model_fields import random_slug
class SchoolActivityGroupManager(models.Manager):
def get_activity_container_only_group(self, activity_group):
container_only_groups = self.filter(
activity_order=activity_group.activity_order,
group_type=SchoolActivityGroup.GroupTypes.CONTAINER_ONLY,
)
if container_only_groups.exists():
return container_only_groups[0]
class ImportedOrganization(get_base_model()):
slug = models.CharField(max_length=40, default=random_slug, unique=True)
organization_number = models.CharField(max_length=10, unique=True)
email = models.EmailField(null=True, blank=True)
description = models.CharField(max_length=4096, null=True, blank=True)
website_url = models.URLField(null=True, blank=True)
name = models.CharField(max_length=256, null=True, blank=True)
goal = models.CharField(max_length=4096, null=True, blank=True)
year_founded = models.CharField(max_length=128, null=True, blank=True)
status = models.CharField(max_length=50, null=True, blank=True)
target_audience = models.JSONField(null=True, blank=True)
number_of_employees = models.PositiveIntegerField(null=True, blank=True)
number_of_members = models.PositiveIntegerField(null=True, blank=True)
number_of_volunteers = models.PositiveIntegerField(null=True, blank=True)
location_lon = models.DecimalField(
max_digits=9,
decimal_places=6,
null=True,
blank=True,
)
location_lat = models.DecimalField(
max_digits=9,
decimal_places=6,
null=True,
blank=True,
)
address_city = models.CharField(max_length=256, null=True, blank=True)
address_street = models.CharField(max_length=256, null=True, blank=True)
address_house_num = models.CharField(max_length=30, null=True, blank=True)
address_zipcode = models.CharField(max_length=9, null=True, blank=True)
cities = models.JSONField(null=True, blank=True)
districts = models.JSONField(null=True, blank=True)
union_type = models.CharField(max_length=50, null=True, blank=True)
def __str__(self):
return f"{self.name} | {self.organization_number} | {self.slug}"
class Organization(get_base_model()):
slug = models.CharField(max_length=40, default=random_slug, unique=True)
organization_number = models.CharField(max_length=10, unique=True, null=True)
email = models.EmailField()
description = models.CharField(max_length=300)
website_url = models.URLField(null=True, blank=True)
name = models.CharField(max_length=100)
goal = models.CharField(max_length=300, null=True, blank=True)
year_founded = models.CharField(max_length=4, null=True, blank=True)
status = models.CharField(max_length=50, null=True, blank=True)
target_audience = models.JSONField(null=True, blank=True)
number_of_employees = models.PositiveIntegerField(null=True, blank=True)
number_of_members = models.PositiveIntegerField(null=True, blank=True)
number_of_volunteers = models.PositiveIntegerField(null=True, blank=True)
location_lon = models.DecimalField(
max_digits=9,
decimal_places=6,
null=True,
blank=True,
)
location_lat = models.DecimalField(
max_digits=9,
decimal_places=6,
null=True,
blank=True,
)
address_city = models.CharField(max_length=150, null=True, blank=True)
address_street = models.CharField(max_length=150, null=True, blank=True)
address_house_num = models.CharField(max_length=20, null=True, blank=True)
address_zipcode = models.CharField(max_length=9, null=True, blank=True)
cities = models.JSONField(null=True, blank=True)
districts = models.JSONField(null=True, blank=True)
union_type = models.CharField(max_length=50, null=True, blank=True)
def __str__(self):
return f"{self.name} | {self.organization_number} | {self.slug}"
class Activity(get_base_model()):
class Domain(models.TextChoices):
SCIENCE_AND_TECH = "SCIENCE_AND_TECH", "Science And Tech"
EXTREME_SPORTS = "EXTREME_SPORTS", "Extreme Sports"
FIELD = "FIELD", "Field"
OTHER = "OTHER", "Other"
tags = TaggableManager(blank=True, through=ConnectiveTaggedItem)
slug = models.CharField(max_length=40, default=random_slug, unique=True)
name = models.CharField(max_length=35)
target_audience = models.JSONField()
domain = models.CharField(max_length=55, null=True, choices=Domain.choices)
originization = models.ForeignKey(
Organization,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="activities",
)
activity_website_url = models.URLField(max_length=750, null=True, blank=True)
activity_email = models.EmailField(null=True, blank=True)
description = models.CharField(max_length=550, default="")
contact_name = models.CharField(max_length=60, default="")
logo = models.ImageField(blank=True, null=True)
phone_number = models.CharField(
blank=True,
max_length=15,
validators=[
RegexValidator(
regex=r"^\d{9,15}$",
message=_("phone number must be between 9-15 digits"),
)
],
)
def __str__(self):
try:
return f"{self.name} | {self.slug} | {self.originization.name}"
except AttributeError:
return f"{self.name} | {self.slug}"
class ImportedActivity(get_base_model()):
slug = models.CharField(max_length=40, default=random_slug, unique=True)
activity_code = models.IntegerField()
name = models.CharField(max_length=550)
raw_name = models.CharField(max_length=550)
target_audience = models.JSONField()
organization_number = models.IntegerField()
organization_name = models.CharField(max_length=1550, default="")
target_gender = models.JSONField()
target_gender = models.JSONField()
target_population = models.JSONField()
target_time = models.JSONField()
target_size = models.JSONField()
target_migzar = models.JSONField()
target_pikuah = models.JSONField()
profession = models.JSONField()
goal = models.CharField(max_length=1550, default="")
is_active = models.BooleanField()
activity_website_url = models.URLField(max_length=750, null=True, blank=True)
activity_email = models.EmailField(null=True, blank=True)
description = models.CharField(max_length=1550, default="")
contact_name = models.CharField(max_length=100, default="")
phone_number = models.CharField(
blank=True,
max_length=15,
validators=[
RegexValidator(
regex=r"^\d{9,15}$",
message=_("phone number must be between 9-15 digits"),
)
],
)
def __str__(self):
return f"{self.name} | {self.slug} | {self.activity_code}"
class ActivityMedia(get_base_model()):
slug = models.CharField(max_length=40, default=random_slug, unique=True)
name = models.CharField(max_length=40, null=True, blank=True)
image_url = models.ImageField(blank=True, null=True)
video_url = models.URLField(blank=True, null=True)
activity = models.ForeignKey(
Activity,
on_delete=models.CASCADE,
related_name="rich_media",
)
def __str__(self):
return f"{self.name} | {self.slug} | {self.activity.name}"
class OrganizationMember(get_base_model()):
user = models.OneToOneField(
"users.User", on_delete=models.CASCADE, related_name="organization_member"
)
organization = models.ForeignKey(
Organization,
on_delete=models.CASCADE,
related_name="organization_member",
)
def __str__(self):
return f"{self.user.email} | {self.organization.name}"
class SchoolActivityOrder(get_base_model()):
class Meta:
constraints = [
models.UniqueConstraint(fields=["school", "activity"], name="unique_order")
]
class Status(models.TextChoices):
CANCELLED = "CANCELLED", "Cancelled"
PENDING_ADMIN_APPROVAL = "PENDING_ADMIN_APPROVAL", "Pending Admin Approval"
APPROVED = "APPROVED", "Approved"
DENIED = "DENIED", "Denied"
base_status = Status.PENDING_ADMIN_APPROVAL
slug = models.CharField(max_length=40, default=random_slug, unique=True)
requested_by = models.ForeignKey(
"users.User",
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="requested_orders",
)
last_updated_by = models.ForeignKey(
"users.User",
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name="last_updated_by_me_orders",
)
school = models.ForeignKey(
School, on_delete=models.CASCADE, related_name="school_activity_orders"
)
activity = models.ForeignKey(
Activity, on_delete=models.CASCADE, related_name="school_activity_orders"
)
status = models.CharField(
_("status"), max_length=50, choices=Status.choices, default=base_status
)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
status_reason = models.CharField(
max_length=250,
blank=True,
)
def __str__(self):
return f"{self.activity} | {self.school} | {self.status} | {self.pk}"
class SchoolActivityGroup(get_base_model()):
class GroupTypes(models.TextChoices):
CONTAINER_ONLY = "CONTAINER_ONLY", "Container Only"
DISABLED_CONSUMERS = "DISABLED_CONSUMERS", "Disabled Consumers"
NO_REGISTRATION = "NO_REGISTRATION", "No Registration"
DEFAULT = "DEFAULT", "Default"
objects = SchoolActivityGroupManager()
slug = models.CharField(max_length=40, default=random_slug, unique=True)
activity_order = models.ForeignKey(
SchoolActivityOrder, on_delete=models.CASCADE, related_name="activity_groups"
)
name = models.CharField(_("name"), max_length=50)
description = models.CharField(_("description"), max_length=550)
consumers = models.ManyToManyField(
"users.Consumer",
related_name="activity_groups",
blank=True,
)
group_type = models.CharField(
_("group type"),
max_length=50,
choices=GroupTypes.choices,
default=GroupTypes.DEFAULT,
)
instructor = models.ForeignKey(
"users.Instructor",
on_delete=models.SET_NULL,
related_name="managed_activity_groups",
null=True,
blank=True,
)
def __str__(self):
return f"""
{self.name} : {self.group_type} : {self.slug} :
{self.activity_order.activity.name} : {self.activity_order.school.name}
"""
| 37.612795 | 87 | 0.688569 |
f70153728cb260c3c86bc652b2c6fedfd73c3c53 | 4,548 | py | Python | core/assembly_system.py | YifanQie/Deep_Learning_for_Manufacturing | 9ba19e41f69c561b04b8573ab9c52c0969f45bfd | [
"MIT"
] | 27 | 2019-10-31T15:16:13.000Z | 2022-03-29T03:56:57.000Z | core/assembly_system.py | YifanQie/Deep_Learning_for_Manufacturing | 9ba19e41f69c561b04b8573ab9c52c0969f45bfd | [
"MIT"
] | 4 | 2020-03-25T14:18:04.000Z | 2022-02-10T00:34:58.000Z | core/assembly_system.py | YifanQie/Deep_Learning_for_Manufacturing | 9ba19e41f69c561b04b8573ab9c52c0969f45bfd | [
"MIT"
] | 7 | 2020-02-23T22:12:37.000Z | 2021-12-08T20:14:41.000Z | import numpy as np
import pandas as pd
""" Contains core classes and methods for initializing a Assembly System, the inputs are provided in assemblyconfig file in utilities"""
class AssemblySystem:
"""Assembly System Class
:param assembly_type: Type of assembly Single-Station/Multi-Station
:type assembly_system: str (required)
:param assembly_kccs: Number of KCCs for the assembly
:type assembly_kccs: int (required)
:param assembly_kpis: Number of Kpis for the assembly
:type assembly_kpis: int (required)
"""
def __init__(self,assembly_type,assembly_kccs,assembly_kpis):
self.assembly_type=assembly_type
self.assembly_kccs=assembly_kccs
self.assembly_kpis=assembly_kpis
class PartType(AssemblySystem):
"""Part System Class, inherits the Assembly System Class, additional parameters for this class include
:param voxel_dim: Dimension of the voxel
:type assembly_system: int (required)
:param voxel_dim: Dimension of the voxel Channel, single channel output - 1 or multi channel - 2,3 (use 1 for deviations in one direction, 2 or 3 if data for multiple deviation directions are present)
:type assembly_system: int (required)
:param voxel_dim: Dimension of the voxel
:type assembly_system: int (required)
The class contains two functions - get_nominal_cop and get_nominal_cop_database
"""
def __init__(self,assembly_type,assembly_kccs,assembly_kpis,part_name,part_type,voxel_dim,voxel_channels,point_dim):
super().__init__(assembly_type,assembly_kccs,assembly_kpis)
self.part_name=part_name
self.part_type=part_type
self.voxel_dim=voxel_dim
self.voxel_channels=voxel_channels
self.point_dim=point_dim
def get_nominal_cop(self,file_name):
"""Import nominal cloud-of-point of the assembly from a text/csv file
:param file_name: Name of the input file
:type file_name: str (required)
:returns: numpy array of nominal COP
:rtype: numpy.array [point_dim,3]
"""
df=pd.read_csv(file_name, sep=',',header=None)
nominal_cop=df.values
return nominal_cop
def get_nominal_cop_database(self,conn_str,table_name):
"""Import nominal cloud-of-point of the assembly from a SQL database assumes the table only contains three columns of the nominal COPs in order of the Node IDs
:param conn_str: Connection String for Database
:type conn_str: str (required)
:param table_name: Name of table in the database
:type table_name: str (required)
:returns: numpy array of dim points * 3
:rtype: numpy.array [point_dim,3]
"""
engine = create_engine(conn_str)
squery ='select * from '+table_name
df_nom = pd.read_sql_query(squery,con=engine)
df_nom = df_nom.values
return df_nom
class VRMSimulationModel(PartType):
"""VRM Simulation Model class inherits the part type class, additional parameters of this class include
:param noise_level: The level of artificial noise to be added to simulated data, typically set to 0.1 mm from the measurement system class depending on the scanner
:type noise_level: float (required)
:param noise_type: The type of noise to be added, can be Gaussian or uniform , for Gaussian noise_level is set as standard deviation and mean as zero for uniform the min and max are set -noise_level and +noise_level respectively
:type noise_type: str (optional)
:param convergency_flag: Flag to denote if the simulation model had converged while simulating, is set to 1 by default
:type convergency_flag: int (optional)
The class contains one function kpi_calculator that needs to be defined by the user depending on the assembly output
"""
def __init__(self,assembly_type,assembly_kccs,assembly_kpis,part_name,part_type,voxel_dim,voxel_channels,point_dim,noise_level,noise_type='uniform',convergency_flag=1):
super().__init__(assembly_type,assembly_kccs,assembly_kpis,part_name,part_type,voxel_dim,voxel_channels,point_dim)
self.noise_level=noise_level
self.noise_type=noise_type
self.convergency_flag=convergency_flag
def kpi_calculator(self,cop_data,kpi_params=[]):
""" User defined function to calculate KPI from Cloud of Point Data [KPI]=f(Cop)
:param cop_data: CoP data for a given sample
:type cop_data: np_array [point_dim,3] (required)
:param kpi_params: Various parameters required to calculate the KPI, can be blank if no parameters are required to calculate KPI from CoP
:type kpi_params: list (optional)
:returns: list of multivariate KPIs for the given CoP
:rtype: list
"""
kpi=[None]*self.assembly_kpis
#define function here
return kpi | 39.547826 | 230 | 0.776165 |
f701a87736fbc584f7e9ffd3e6d8d63f457be0ba | 2,204 | py | Python | lhotse/manipulation.py | freewym/lhotse | 66e9bbaf25b75011388ab00189baa162c3c1d435 | [
"Apache-2.0"
] | null | null | null | lhotse/manipulation.py | freewym/lhotse | 66e9bbaf25b75011388ab00189baa162c3c1d435 | [
"Apache-2.0"
] | null | null | null | lhotse/manipulation.py | freewym/lhotse | 66e9bbaf25b75011388ab00189baa162c3c1d435 | [
"Apache-2.0"
] | null | null | null | from functools import reduce
from itertools import chain
from operator import add
from typing import Iterable, Optional, TypeVar
from lhotse.audio import Recording, RecordingSet
from lhotse.cut import Cut, CutSet, MixedCut
from lhotse.features import FeatureSet, Features
from lhotse.supervision import SupervisionSegment, SupervisionSet
from lhotse.utils import Pathlike, load_yaml
ManifestItem = TypeVar('ManifestItem', Recording, SupervisionSegment, Features, Cut, MixedCut)
Manifest = TypeVar('Manifest', RecordingSet, SupervisionSet, FeatureSet, CutSet)
def combine(*manifests: Manifest) -> Manifest:
"""Combine multiple manifests of the same type into one."""
return reduce(add, manifests)
def to_manifest(items: Iterable[ManifestItem]) -> Optional[Manifest]:
"""
Take an iterable of data types in Lhotse such as Recording, SupervisonSegment or Cut, and create the manifest of the
corresponding type. When the iterable is empty, returns None.
"""
items = iter(items)
try:
first_item = next(items)
except StopIteration:
return None
items = chain([first_item], items)
if isinstance(first_item, Recording):
return RecordingSet.from_recordings(items)
if isinstance(first_item, SupervisionSegment):
return SupervisionSet.from_segments(items)
if isinstance(first_item, (Cut, MixedCut)):
return CutSet.from_cuts(items)
if isinstance(first_item, Features):
raise ValueError("FeatureSet generic construction from iterable is not possible, as the config information "
"would have been lost. Call FeatureSet.from_features() directly instead.")
raise ValueError(f"Unknown type of manifest item: {first_item}")
def load_manifest(path: Pathlike) -> Manifest:
"""Generic utility for reading an arbitrary manifest."""
raw_data = load_yaml(path)
data_set = None
for manifest_type in [RecordingSet, SupervisionSet, FeatureSet, CutSet]:
try:
data_set = manifest_type.from_dicts(raw_data)
except Exception:
pass
if data_set is None:
raise ValueError(f'Unknown type of manifest: {path}')
return data_set
| 38 | 120 | 0.72686 |
f701ad039addc3139e0d9bb52293365f52a99e55 | 5,544 | py | Python | tests/unit/modules/brew_test.py | skrobul/salt | ef7fb71082cce7a9783e00b9c65062fefae09263 | [
"Apache-2.0"
] | 2 | 2017-09-17T21:10:35.000Z | 2019-08-26T03:00:12.000Z | tests/unit/modules/brew_test.py | skrobul/salt | ef7fb71082cce7a9783e00b9c65062fefae09263 | [
"Apache-2.0"
] | null | null | null | tests/unit/modules/brew_test.py | skrobul/salt | ef7fb71082cce7a9783e00b9c65062fefae09263 | [
"Apache-2.0"
] | 3 | 2021-02-23T08:12:48.000Z | 2021-02-23T08:13:13.000Z | # -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Nicole Thomas <nicole@satlstack.com>`
'''
# Import Salt Testing Libs
from salttesting import TestCase
from salttesting.mock import MagicMock, patch
from salttesting.helpers import ensure_in_syspath
ensure_in_syspath('../../')
# Import Salt Libs
from salt.modules import brew
# Global Variables
brew.__context__ = {}
brew.__salt__ = {}
TAPS_STRING = 'homebrew/dupes\nhomebrew/science\nhomebrew/x11'
TAPS_LIST = ['homebrew/dupes', 'homebrew/science', 'homebrew/x11']
HOMEBREW_BIN = '/usr/local/bin/brew'
class BrewTestCase(TestCase):
'''
TestCase for salt.modules.brew module
'''
# '_list_taps' function tests: 1
def test_list_taps(self):
'''
Tests the return of the list of taps
'''
mock_taps = MagicMock(return_value=TAPS_STRING)
with patch.dict(brew.__salt__, {'cmd.run': mock_taps}):
self.assertEqual(brew._list_taps(), TAPS_LIST)
# '_tap' function tests: 3
@patch('salt.modules.brew._list_taps', MagicMock(return_value=TAPS_LIST))
def test_tap_installed(self):
'''
Tests if tap argument is already installed or not
'''
self.assertTrue(brew._tap('homebrew/science'))
@patch('salt.modules.brew._list_taps', MagicMock(return_value={}))
def test_tap_failure(self):
'''
Tests if the tap installation failed
'''
mock_failure = MagicMock(return_value=1)
with patch.dict(brew.__salt__, {'cmd.retcode': mock_failure}):
self.assertFalse(brew._tap('homebrew/test'))
@patch('salt.modules.brew._list_taps', MagicMock(return_value=TAPS_LIST))
def test_tap(self):
'''
Tests adding unofficial Github repos to the list of brew taps
'''
mock_success = MagicMock(return_value=0)
with patch.dict(brew.__salt__, {'cmd.retcode': mock_success}):
self.assertTrue(brew._tap('homebrew/test'))
# '_homebrew_bin' function tests: 1
def test_homebrew_bin(self):
'''
Tests the path to the homebrew binary
'''
mock_path = MagicMock(return_value='/usr/local')
with patch.dict(brew.__salt__, {'cmd.run': mock_path}):
self.assertEqual(brew._homebrew_bin(), '/usr/local/bin/brew')
# 'list_pkgs' function tests: 2
# Only tested a few basics
# Full functionality should be tested in integration phase
def test_list_pkgs_removed(self):
'''
Tests removed implementation
'''
self.assertEqual(brew.list_pkgs(removed=True), {})
def test_list_pkgs_versions_true(self):
'''
Tests if pkg.list_pkgs is already in context and is a list
'''
mock_context = {'foo': ['bar']}
with patch.dict(brew.__context__, {'pkg.list_pkgs': mock_context}):
self.assertEqual(brew.list_pkgs(versions_as_list=True),
mock_context)
# 'version' function tests: 1
def test_version(self):
'''
Tests version name returned
'''
mock_version = MagicMock(return_value='0.1.5')
with patch.dict(brew.__salt__, {'pkg_resource.version': mock_version}):
self.assertEqual(brew.version('foo'), '0.1.5')
# 'latest_version' function tests: 0
# It has not been fully implemented
# 'remove' function tests: 1
# Only tested a few basics
# Full functionality should be tested in integration phase
@patch('salt.modules.brew.list_pkgs',
MagicMock(return_value={'test': '0.1.5'}))
def test_remove(self):
'''
Tests if package to be removed exists
'''
mock_params = MagicMock(return_value=({'foo': None}, 'repository'))
with patch.dict(brew.__salt__,
{'pkg_resource.parse_targets': mock_params}):
self.assertEqual(brew.remove('foo'), {})
# 'refresh_db' function tests: 2
@patch('salt.modules.brew._homebrew_bin',
MagicMock(return_value=HOMEBREW_BIN))
def test_refresh_db_failure(self):
'''
Tests an update of homebrew package repository failure
'''
mock_user = MagicMock(return_value='foo')
mock_failure = MagicMock(return_value=1)
with patch.dict(brew.__salt__, {'file.get_user': mock_user,
'cmd.retcode': mock_failure}):
self.assertFalse(brew.refresh_db())
@patch('salt.modules.brew._homebrew_bin',
MagicMock(return_value=HOMEBREW_BIN))
def test_refresh_db(self):
'''
Tests a successful update of homebrew package repository
'''
mock_user = MagicMock(return_value='foo')
mock_success = MagicMock(return_value=0)
with patch.dict(brew.__salt__, {'file.get_user': mock_user,
'cmd.retcode': mock_success}):
self.assertTrue(brew.refresh_db())
# 'install' function tests: 1
# Only tested a few basics
# Full functionality should be tested in integration phase
def test_install(self):
'''
Tests if package to be installed exists
'''
mock_params = MagicMock(return_value=[None, None])
with patch.dict(brew.__salt__,
{'pkg_resource.parse_targets': mock_params}):
self.assertEqual(brew.install('name=foo'), {})
if __name__ == '__main__':
from integration import run_tests
run_tests(BrewTestCase, needs_daemon=False)
| 33.197605 | 79 | 0.628066 |
f701dbb60581a894fa82d654ad38824ba276b7a5 | 4,113 | py | Python | model/seg_models/pspnet.py | AceCoooool/segmentation | 2f4d5ac193cab580eb8ba789e79db6dadcfecfd0 | [
"MIT"
] | 2 | 2019-06-08T13:09:08.000Z | 2020-09-21T04:03:09.000Z | model/seg_models/pspnet.py | AceCoooool/segmentation | 2f4d5ac193cab580eb8ba789e79db6dadcfecfd0 | [
"MIT"
] | 2 | 2019-05-20T11:56:02.000Z | 2019-06-02T13:22:55.000Z | model/seg_models/pspnet.py | AceCoooool/segmentation | 2f4d5ac193cab580eb8ba789e79db6dadcfecfd0 | [
"MIT"
] | 1 | 2020-09-22T03:55:39.000Z | 2020-09-22T03:55:39.000Z | """Pyramid Scene Parsing Network"""
import os
import torch
from torch import nn
import torch.nn.functional as F
from model.seg_models.segbase import SegBaseModel
from model.module.basic import _FCNHead
__all__ = ['PSPNet', 'get_psp',
'get_psp_resnet101_voc',
'get_psp_resnet101_citys']
# head
def _PSP1x1Conv(in_channels, out_channels):
return nn.Sequential(nn.Conv2d(in_channels, out_channels, kernel_size=1, bias=False),
nn.BatchNorm2d(out_channels), nn.ReLU(inplace=True))
class _PyramidPooling(nn.Module):
def __init__(self, in_channels):
super(_PyramidPooling, self).__init__()
out_channels = in_channels // 4
self.conv1 = _PSP1x1Conv(in_channels, out_channels)
self.conv2 = _PSP1x1Conv(in_channels, out_channels)
self.conv3 = _PSP1x1Conv(in_channels, out_channels)
self.conv4 = _PSP1x1Conv(in_channels, out_channels)
@staticmethod
def pool(x, size):
return F.adaptive_avg_pool2d(x, output_size=size)
@staticmethod
def upsample(x, h, w):
return F.interpolate(x, (h, w), mode='bilinear', align_corners=True)
def forward(self, x):
_, _, h, w = x.shape
feat1 = self.upsample(self.conv1(self.pool(x, 1)), h, w)
feat2 = self.upsample(self.conv2(self.pool(x, 2)), h, w)
feat3 = self.upsample(self.conv3(self.pool(x, 3)), h, w)
feat4 = self.upsample(self.conv4(self.pool(x, 4)), h, w)
return torch.cat([x, feat1, feat2, feat3, feat4], dim=1)
class _PSPHead(nn.Module):
def __init__(self, nclass, **kwargs):
super(_PSPHead, self).__init__(**kwargs)
self.psp = _PyramidPooling(2048)
self.block = list()
self.block.append(nn.Conv2d(4096, 512, kernel_size=3, padding=1, bias=False))
self.block.append(nn.BatchNorm2d(512))
self.block.append(nn.ReLU(inplace=True))
self.block.append(nn.Dropout(0.1))
self.block.append(nn.Conv2d(512, nclass, kernel_size=1))
self.block = nn.Sequential(*self.block)
def forward(self, x):
x = self.psp(x)
return self.block(x)
class PSPNet(SegBaseModel):
def __init__(self, nclass, backbone='resnet50', aux=True, dilated=True, jpu=False,
pretrained_base=True, base_size=520, crop_size=480, **kwargs):
super(PSPNet, self).__init__(nclass, aux, backbone, base_size=base_size, dilated=dilated, jpu=jpu,
crop_size=crop_size, pretrained_base=pretrained_base, **kwargs)
self.head = _PSPHead(nclass, **kwargs)
if self.aux:
self.auxlayer = _FCNHead(1024, nclass, **kwargs)
self.__setattr__('others', ['head', 'auxlayer'] if self.aux else ['head'])
def forward(self, x):
c3, c4 = self.base_forward(x)
outputs = []
x = self.head(c4)
x = F.interpolate(x, self._up_kwargs, mode='bilinear', align_corners=True)
outputs.append(x)
if self.aux:
auxout = self.auxlayer(c3)
auxout = F.interpolate(auxout, self._up_kwargs, mode='bilinear', align_corners=True)
outputs.append(auxout)
return tuple(outputs)
def get_psp(dataset='pascal_voc', backbone='resnet101', pretrained=False, pretrained_base=True,
jpu=False, root=os.path.expanduser('~/.torch/models'), **kwargs):
acronyms = {
'pascal_voc': 'voc',
'citys': 'citys',
}
from data import datasets
# infer number of classes
model = PSPNet(datasets[dataset].NUM_CLASS, backbone=backbone,
pretrained_base=pretrained_base, jpu=jpu, **kwargs)
if pretrained:
from model.model_store import get_model_file
name = 'psp_%s_%s' % (backbone, acronyms[dataset])
name = name + '_jpu' if jpu else name
model.load_state_dict(torch.load(get_model_file(name, root=root)))
return model
def get_psp_resnet101_voc(**kwargs):
return get_psp('pascal_voc', 'resnet101', **kwargs)
def get_psp_resnet101_citys(**kwargs):
return get_psp('citys', 'resnet101', **kwargs)
| 36.39823 | 106 | 0.644299 |
f701f97e1f188d4e04e78e513ce8208e4d9f71ef | 1,360 | py | Python | deploy.py | blockchainhelppro/CelvinRost | aa2661747d06e4610928466521e4da1db77aeadc | [
"MIT"
] | 2 | 2018-08-15T21:27:59.000Z | 2018-08-21T17:56:12.000Z | deploy.py | blockchainhelppro/CelvinRost | aa2661747d06e4610928466521e4da1db77aeadc | [
"MIT"
] | null | null | null | deploy.py | blockchainhelppro/CelvinRost | aa2661747d06e4610928466521e4da1db77aeadc | [
"MIT"
] | 1 | 2021-12-06T04:03:32.000Z | 2021-12-06T04:03:32.000Z | import itertools
import toposort
from populus.utils.contracts import (
compute_direct_dependency_graph,
compute_recursive_contract_dependencies,
)
def compute_deploy_order(dependency_graph):
"""
Given a dictionary that maps contract to their dependencies,
determine the overall dependency ordering for that set of contracts.
"""
return toposort.toposort_flatten(dict(dependency_graph))
def get_deploy_order(contracts_to_deploy, compiled_contracts):
# Extract and dependencies that exist due to library linking.
dependency_graph = compute_direct_dependency_graph(compiled_contracts.values())
global_deploy_order = compute_deploy_order(dependency_graph)
# Compute the full set of dependencies needed to deploy the desired
# contracts.
all_deploy_dependencies = set(itertools.chain.from_iterable(
compute_recursive_contract_dependencies(contract_name, dependency_graph)
for contract_name in contracts_to_deploy
))
all_contracts_to_deploy = all_deploy_dependencies.union(contracts_to_deploy)
# Now compute the order that the contracts should be deployed based on
# their dependencies.
deploy_order = tuple(
contract_name
for contract_name
in global_deploy_order
if contract_name in all_contracts_to_deploy
)
return deploy_order
| 33.170732 | 83 | 0.772794 |
f7020126c0821383f6a8544cd6c1e7094992bb87 | 25 | py | Python | btd6_memory_info/generated/Unity/Collections/LowLevel/Unsafe/UnsafeUtility/unsafe_utility.py | 56kyle/bloons_auto | 419d55b51d1cddc49099593970adf1c67985b389 | [
"MIT"
] | null | null | null | btd6_memory_info/generated/Unity/Collections/LowLevel/Unsafe/UnsafeUtility/unsafe_utility.py | 56kyle/bloons_auto | 419d55b51d1cddc49099593970adf1c67985b389 | [
"MIT"
] | null | null | null | btd6_memory_info/generated/Unity/Collections/LowLevel/Unsafe/UnsafeUtility/unsafe_utility.py | 56kyle/bloons_auto | 419d55b51d1cddc49099593970adf1c67985b389 | [
"MIT"
] | null | null | null | class UnsafeUtility: pass | 25 | 25 | 0.88 |
f702024c3c01565b670bab7999a264ce4f0d7f8d | 260 | py | Python | slack_app/tasks.py | webscopeio/django-slack-app | 65abb3717460c51a19c1238eb0572f25c47b2a42 | [
"MIT"
] | 3 | 2020-06-23T10:02:48.000Z | 2020-10-28T11:59:28.000Z | slack_app/tasks.py | webscopeio/django-slack-integration | 65abb3717460c51a19c1238eb0572f25c47b2a42 | [
"MIT"
] | 2 | 2020-02-17T11:42:03.000Z | 2020-02-18T13:46:38.000Z | slack_app/tasks.py | webscopeio/django-slack-integration | 65abb3717460c51a19c1238eb0572f25c47b2a42 | [
"MIT"
] | 4 | 2020-10-11T11:02:58.000Z | 2022-03-14T08:23:42.000Z | from celery import shared_task
from .signals import slack_event_received
@shared_task
def receive_slack_signal_task(sender, event_type, event_data, **data):
slack_event_received.send(sender=sender, event_type=event_type, event_data=event_data, **data)
| 26 | 98 | 0.819231 |
f70218f2b4f389dac4b6b4a28a071cb1c97475d0 | 5,869 | py | Python | office365/sharepoint/tenant/administration/tenant.py | wreiner/Office365-REST-Python-Client | 476bbce4f5928a140b4f5d33475d0ac9b0783530 | [
"MIT"
] | null | null | null | office365/sharepoint/tenant/administration/tenant.py | wreiner/Office365-REST-Python-Client | 476bbce4f5928a140b4f5d33475d0ac9b0783530 | [
"MIT"
] | null | null | null | office365/sharepoint/tenant/administration/tenant.py | wreiner/Office365-REST-Python-Client | 476bbce4f5928a140b4f5d33475d0ac9b0783530 | [
"MIT"
] | null | null | null | from office365.runtime.client_value_collection import ClientValueCollection
from office365.runtime.queries.service_operation_query import ServiceOperationQuery
from office365.runtime.resource_path import ResourcePath
from office365.sharepoint.base_entity import BaseEntity
from office365.sharepoint.tenant.administration.hubSiteProperties import HubSiteProperties
from office365.sharepoint.tenant.administration.secondary_administrators_fields_data import \
SecondaryAdministratorsFieldsData
from office365.sharepoint.tenant.administration.secondary_administrators_info import SecondaryAdministratorsInfo
from office365.sharepoint.tenant.administration.site_properties import SiteProperties
from office365.sharepoint.tenant.administration.site_properties_collection import SitePropertiesCollection
from office365.sharepoint.tenant.administration.sitePropertiesEnumerableFilter import SitePropertiesEnumerableFilter
from office365.sharepoint.tenant.administration.spo_operation import SpoOperation
class Tenant(BaseEntity):
def __init__(self, context):
super().__init__(context, ResourcePath("Microsoft.Online.SharePoint.TenantAdministration.Tenant"),
"Microsoft.Online.SharePoint.TenantAdministration")
def get_site_secondary_administrators(self, site_id):
"""
Gets site collection administrators
:type site_id: str
"""
return_type = ClientValueCollection(SecondaryAdministratorsInfo)
payload = SecondaryAdministratorsFieldsData(site_id)
qry = ServiceOperationQuery(self, "GetSiteSecondaryAdministrators", None, payload,
"secondaryAdministratorsFieldsData", return_type)
self.context.add_query(qry)
return return_type
def set_site_secondary_administrators(self, site_id, emails, names=None):
"""
Sets site collection administrators
:type names: list[str] or None
:type emails: list[str]
:type site_id: str
"""
payload = SecondaryAdministratorsFieldsData(site_id, emails, names)
qry = ServiceOperationQuery(self, "SetSiteSecondaryAdministrators", None, payload,
"secondaryAdministratorsFieldsData", None)
self.context.add_query(qry)
return self
def register_hub_site(self, site_url):
"""
Registers an existing site as a hub site.
:param str site_url:
:return:
"""
return_type = HubSiteProperties(self.context)
params = {"siteUrl": site_url}
qry = ServiceOperationQuery(self, "RegisterHubSite", None, params, None, return_type)
self.context.add_query(qry)
return return_type
def unregister_hub_site(self, siteUrl):
"""
Unregisters a hub site so that it is no longer a hub site.
:param str siteUrl:
:return:
"""
params = {"siteUrl": siteUrl}
qry = ServiceOperationQuery(self, "UnregisterHubSite", None, params, None, None)
self.context.add_query(qry)
return self
def create_site(self, site_create_props):
"""Queues a site collection for creation with the specified properties.
:param SiteCreationProperties site_create_props:
A SiteCreationProperties object that contains the initial properties
of the new site collection.
"""
result = SpoOperation(self.context)
qry = ServiceOperationQuery(self, "CreateSite", None, site_create_props, "siteCreationProperties", result)
self.context.add_query(qry)
return result
def remove_site(self, site_url):
"""Deletes the site with the specified URL
:param str site_url: A string representing the URL of the site.
"""
result = SpoOperation(self.context)
qry = ServiceOperationQuery(self, "removeSite", [site_url], None, None, result)
self.context.add_query(qry)
return result
def remove_deleted_site(self, site_url):
pass
def restore_deleted_site(self, site_url):
pass
def get_site_properties_by_url(self, url, include_detail):
"""
:param str url: A string that represents the site URL.
:param bool include_detail: A Boolean value that indicates whether to include all of the SPSite properties.
"""
site_props = SiteProperties(self.context)
self._sites.add_child(site_props)
payload = {
'url': url,
'includeDetail': include_detail
}
qry = ServiceOperationQuery(self, "getSitePropertiesByUrl", None, payload, None, site_props)
self.context.add_query(qry)
return site_props
def get_site_properties_from_sharepoint_by_filters(self, _filter, start_index=0, include_detail=False):
"""
:param bool include_detail:
:param int start_index:
:param str _filter:
"""
site_props_col = SitePropertiesCollection(self.context)
qry = ServiceOperationQuery(self, "getSitePropertiesFromSharePointByFilters",
None,
SitePropertiesEnumerableFilter(_filter, start_index, include_detail),
"speFilter",
site_props_col)
self.context.add_query(qry)
return site_props_col
@property
def root_site_url(self):
"""
:rtype: str or None
"""
return self.properties.get('RootSiteUrl', None)
@property
def _sites(self):
"""Gets a collection of sites."""
if self.is_property_available('sites'):
return self.properties['sites']
else:
return SitePropertiesCollection(self.context, ResourcePath("sites", self.resource_path))
| 39.655405 | 116 | 0.679332 |
f7022b106191f7e769f494a9e9e6e19c38892823 | 1,472 | py | Python | qnarre/doc/justifier.py | quantapix/qnarre.com | f51d5945c20ef8182c4aa11f1b407d064c190c70 | [
"MIT"
] | null | null | null | qnarre/doc/justifier.py | quantapix/qnarre.com | f51d5945c20ef8182c4aa11f1b407d064c190c70 | [
"MIT"
] | null | null | null | qnarre/doc/justifier.py | quantapix/qnarre.com | f51d5945c20ef8182c4aa11f1b407d064c190c70 | [
"MIT"
] | null | null | null | # Copyright 2019 Quantapix Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
class Justifier:
def __init__(self, **kw):
super().__init__(**kw)
self.justs = [0] * 9
self.offsets = [(0, 0, 0, 1, 1, 1, 1, 1, 1),
(0, -1, -2, 0, 0, 0, 1, 1, 1),
(0, -1, -2, 0, -1, -2, 0, 0, 0)]
def init_justs(self, justs):
for i in justs:
i = i // 3
os = self.offsets[i]
if os:
self.justs = [sum(x) for x in zip(self.justs, os)]
self.offsets[i] = None
def calc_just(self, justs):
for i in justs:
i = self.justs[i] + (i % 3)
if i == 1:
return 'justify-content-center'
elif i > 1:
return 'justify-content-end'
return 'justify-content-start'
| 35.902439 | 79 | 0.536005 |
f7022f7075bdd6537b307688382d872a3f7fd177 | 53 | py | Python | Interfaces/__init__.py | ahmadryan/TurbAn | b8866d103a2ca2f5fbad73bcd4416f19299f22b2 | [
"BSD-2-Clause-Patent"
] | null | null | null | Interfaces/__init__.py | ahmadryan/TurbAn | b8866d103a2ca2f5fbad73bcd4416f19299f22b2 | [
"BSD-2-Clause-Patent"
] | null | null | null | Interfaces/__init__.py | ahmadryan/TurbAn | b8866d103a2ca2f5fbad73bcd4416f19299f22b2 | [
"BSD-2-Clause-Patent"
] | 10 | 2019-03-22T15:30:12.000Z | 2021-02-10T02:55:50.000Z | from . import Simulations
from . import Spacecraft
| 17.666667 | 26 | 0.773585 |
f7023d3f50a4bcdd656f0e33b9e318facfcd714f | 892 | py | Python | kubi_ecs_logger/models/fields/destination.py | kumina/kubi_ecs_logger | 64d9519e0759a24253a4edc53e0c024675033d1c | [
"BSD-3-Clause"
] | 6 | 2019-12-15T12:47:06.000Z | 2022-01-11T08:54:58.000Z | kubi_ecs_logger/models/fields/destination.py | kumina/kubi_ecs_logger | 64d9519e0759a24253a4edc53e0c024675033d1c | [
"BSD-3-Clause"
] | null | null | null | kubi_ecs_logger/models/fields/destination.py | kumina/kubi_ecs_logger | 64d9519e0759a24253a4edc53e0c024675033d1c | [
"BSD-3-Clause"
] | null | null | null | from marshmallow import fields
from .field_set import FieldSet, FieldSetSchema
class Destination(FieldSet):
def __init__(self,
address: str = None,
bytes: int = None,
domain: str = None,
ip: str = None,
mac: str = None,
packets: int = None,
port: int = None,
*args, **kwargs):
super().__init__(*args, **kwargs)
self.address = address
self.bytes = bytes
self.domain = domain
self.ip = ip
self.mac = mac
self.packets = packets
self.port = port
class DestinationSchema(FieldSetSchema):
address = fields.String()
bytes = fields.Integer()
domain = fields.String()
ip = fields.String()
mac = fields.String()
packets = fields.Integer()
port = fields.Integer()
| 24.777778 | 47 | 0.533632 |
f7025167168843760aa99b53b10d6a7a0fc912e1 | 2,035 | py | Python | .eggs/boto-2.48.0-py2.7.egg/boto/sdb/db/key.py | MQQ/git-bigstore | 95f1e37fcda7fdce80502593cec31a44c604cf8a | [
"Apache-2.0"
] | null | null | null | .eggs/boto-2.48.0-py2.7.egg/boto/sdb/db/key.py | MQQ/git-bigstore | 95f1e37fcda7fdce80502593cec31a44c604cf8a | [
"Apache-2.0"
] | null | null | null | .eggs/boto-2.48.0-py2.7.egg/boto/sdb/db/key.py | MQQ/git-bigstore | 95f1e37fcda7fdce80502593cec31a44c604cf8a | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2006,2007,2008 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
class Key(object):
@classmethod
def from_path(cls, *args, **kwds):
raise NotImplementedError("Paths are not currently supported")
def __init__(self, encoded=None, obj=None):
self.name = None
if obj:
self.id = obj.id
self.kind = obj.kind()
else:
self.id = None
self.kind = None
def app(self):
raise NotImplementedError("Applications are not currently supported")
def kind(self):
return self.kind
def id(self):
return self.id
def name(self):
raise NotImplementedError("Key Names are not currently supported")
def id_or_name(self):
return self.id
def has_id_or_name(self):
return self.id is not None
def parent(self):
raise NotImplementedError("Key parents are not currently supported")
def __str__(self):
return self.id_or_name()
| 33.916667 | 77 | 0.696314 |
f7025258811b22755058146106a8a59727a8d6a1 | 14,181 | py | Python | lib/geomet/wkt.py | davasqueza/eriskco_conector_CloudSQL | 99304b5eed06e9bba3646535a82d7fc98b0838b7 | [
"Apache-2.0"
] | null | null | null | lib/geomet/wkt.py | davasqueza/eriskco_conector_CloudSQL | 99304b5eed06e9bba3646535a82d7fc98b0838b7 | [
"Apache-2.0"
] | null | null | null | lib/geomet/wkt.py | davasqueza/eriskco_conector_CloudSQL | 99304b5eed06e9bba3646535a82d7fc98b0838b7 | [
"Apache-2.0"
] | null | null | null | # Copyright 2013 Lars Butler & individual contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tokenize
try:
import StringIO
except ImportError:
import io
StringIO = io
INVALID_WKT_FMT = 'Invalid WKT: `%s`'
def dump(obj, dest_file):
"""
Dump GeoJSON-like `dict` to WKT and write it to the `dest_file`.
:param dict obj:
A GeoJSON-like dictionary. It must at least the keys 'type' and
'coordinates'.
:param dest_file:
Open and writable file-like object.
"""
dest_file.write(dumps(obj))
def load(source_file):
"""
Load a GeoJSON `dict` object from a ``source_file`` containing WKT.
:param source_file:
Open and readable file-like object.
:returns:
A GeoJSON `dict` representing the geometry read from the file.
"""
return loads(source_file.read())
def dumps(obj, decimals=16):
"""
Dump a GeoJSON-like `dict` to a WKT string.
"""
geom_type = obj['type']
exporter = _dumps_registry.get(geom_type)
if exporter is None:
_unsupported_geom_type(geom_type)
fmt = '%%.%df' % decimals
return exporter(obj, fmt)
def loads(string):
"""
Construct a GeoJSON `dict` from WKT (`string`).
"""
sio = StringIO.StringIO(string)
# NOTE: This is not the intended purpose of `tokenize`, but it works.
tokens = (x[1] for x in tokenize.generate_tokens(sio.readline))
tokens = _tokenize_wkt(tokens)
geom_type = next(tokens)
importer = _loads_registry.get(geom_type)
if importer is None:
_unsupported_geom_type(geom_type)
return importer(tokens, string)
def _tokenize_wkt(tokens):
"""
Since the tokenizer treats "-" and numeric strings as separate values,
combine them and yield them as a single token. This utility encapsulates
parsing of negative numeric values from WKT can be used generically in all
parsers.
"""
negative = False
for t in tokens:
if t == '-':
negative = True
continue
else:
if negative:
yield '-%s' % t
else:
yield t
negative = False
def _unsupported_geom_type(geom_type):
raise ValueError("Unsupported geometry type '%s'" % geom_type)
def _dump_point(obj, fmt):
"""
Dump a GeoJSON-like Point object to WKT.
:param dict obj:
A GeoJSON-like `dict` representing a Point.
:param str fmt:
Format string which indicates the number of digits to display after the
decimal point when formatting coordinates.
:returns:
WKT representation of the input GeoJSON Point ``obj``.
"""
coords = obj['coordinates']
pt = 'POINT (%s)' % ' '.join(fmt % c for c in coords)
return pt
def _dump_linestring(obj, fmt):
"""
Dump a GeoJSON-like LineString object to WKT.
Input parameters and return value are the LINESTRING equivalent to
:func:`_dump_point`.
"""
coords = obj['coordinates']
ls = 'LINESTRING (%s)'
ls %= ', '.join(' '.join(fmt % c for c in pt) for pt in coords)
return ls
def _dump_polygon(obj, fmt):
"""
Dump a GeoJSON-like Polygon object to WKT.
Input parameters and return value are the POLYGON equivalent to
:func:`_dump_point`.
"""
coords = obj['coordinates']
poly = 'POLYGON (%s)'
rings = (', '.join(' '.join(fmt % c for c in pt) for pt in ring)
for ring in coords)
rings = ('(%s)' % r for r in rings)
poly %= ', '.join(rings)
return poly
def _dump_multipoint(obj, fmt):
"""
Dump a GeoJSON-like MultiPoint object to WKT.
Input parameters and return value are the MULTIPOINT equivalent to
:func:`_dump_point`.
"""
coords = obj['coordinates']
mp = 'MULTIPOINT (%s)'
points = (' '.join(fmt % c for c in pt) for pt in coords)
# Add parens around each point.
points = ('(%s)' % pt for pt in points)
mp %= ', '.join(points)
return mp
def _dump_multilinestring(obj, fmt):
"""
Dump a GeoJSON-like MultiLineString object to WKT.
Input parameters and return value are the MULTILINESTRING equivalent to
:func:`_dump_point`.
"""
coords = obj['coordinates']
mlls = 'MULTILINESTRING (%s)'
linestrs = ('(%s)' % ', '.join(' '.join(fmt % c for c in pt)
for pt in linestr) for linestr in coords)
mlls %= ', '.join(ls for ls in linestrs)
return mlls
def _dump_multipolygon(obj, fmt):
"""
Dump a GeoJSON-like MultiPolygon object to WKT.
Input parameters and return value are the MULTIPOLYGON equivalent to
:func:`_dump_point`.
"""
coords = obj['coordinates']
mp = 'MULTIPOLYGON (%s)'
polys = (
# join the polygons in the multipolygon
', '.join(
# join the rings in a polygon,
# and wrap in parens
'(%s)' % ', '.join(
# join the points in a ring,
# and wrap in parens
'(%s)' % ', '.join(
# join coordinate values of a vertex
' '.join(fmt % c for c in pt)
for pt in ring)
for ring in poly)
for poly in coords)
)
mp %= polys
return mp
def _dump_geometrycollection(obj, fmt):
"""
Dump a GeoJSON-like GeometryCollection object to WKT.
Input parameters and return value are the GEOMETRYCOLLECTION equivalent to
:func:`_dump_point`.
The WKT conversions for each geometry in the collection are delegated to
their respective functions.
"""
gc = 'GEOMETRYCOLLECTION (%s)'
geoms = obj['geometries']
geoms_wkt = []
for geom in geoms:
geom_type = geom['type']
geoms_wkt.append(_dumps_registry.get(geom_type)(geom, fmt))
gc %= ','.join(geoms_wkt)
return gc
def _load_point(tokens, string):
"""
:param tokens:
A generator of string tokens for the input WKT, begining just after the
geometry type. The geometry type is consumed before we get to here. For
example, if :func:`loads` is called with the input 'POINT(0.0 1.0)',
``tokens`` would generate the following values:
.. code-block:: python
['(', '0.0', '1.0', ')']
:param str string:
The original WKT string.
:returns:
A GeoJSON `dict` Point representation of the WKT ``string``.
"""
if not next(tokens) == '(':
raise ValueError(INVALID_WKT_FMT % string)
coords = []
try:
for t in tokens:
if t == ')':
break
else:
coords.append(float(t))
except tokenize.TokenError:
raise ValueError(INVALID_WKT_FMT % string)
return dict(type='Point', coordinates=coords)
def _load_linestring(tokens, string):
"""
Has similar inputs and return value to to :func:`_load_point`, except is
for handling LINESTRING geometry.
:returns:
A GeoJSON `dict` LineString representation of the WKT ``string``.
"""
if not next(tokens) == '(':
raise ValueError(INVALID_WKT_FMT % string)
# a list of lists
# each member list represents a point
coords = []
try:
pt = []
for t in tokens:
if t == ')':
coords.append(pt)
break
elif t == ',':
# it's the end of the point
coords.append(pt)
pt = []
else:
pt.append(float(t))
except tokenize.TokenError:
raise ValueError(INVALID_WKT_FMT % string)
return dict(type='LineString', coordinates=coords)
def _load_polygon(tokens, string):
"""
Has similar inputs and return value to to :func:`_load_point`, except is
for handling POLYGON geometry.
:returns:
A GeoJSON `dict` Polygon representation of the WKT ``string``.
"""
open_parens = next(tokens), next(tokens)
if not open_parens == ('(', '('):
raise ValueError(INVALID_WKT_FMT % string)
# coords contains a list of rings
# each ring contains a list of points
# each point is a list of 2-4 values
coords = []
ring = []
on_ring = True
try:
pt = []
for t in tokens:
if t == ')' and on_ring:
# The ring is finished
ring.append(pt)
coords.append(ring)
on_ring = False
elif t == ')' and not on_ring:
# it's the end of the polygon
break
elif t == '(':
# it's a new ring
ring = []
pt = []
on_ring = True
elif t == ',' and on_ring:
# it's the end of a point
ring.append(pt)
pt = []
elif t == ',' and not on_ring:
# there's another ring.
# do nothing
pass
else:
pt.append(float(t))
except tokenize.TokenError:
raise ValueError(INVALID_WKT_FMT % string)
return dict(type='Polygon', coordinates=coords)
def _load_multipoint(tokens, string):
"""
Has similar inputs and return value to to :func:`_load_point`, except is
for handling MULTIPOINT geometry.
:returns:
A GeoJSON `dict` MultiPoint representation of the WKT ``string``.
"""
open_paren = next(tokens)
if not open_paren == '(':
raise ValueError(INVALID_WKT_FMT % string)
coords = []
pt = []
paren_depth = 1
try:
for t in tokens:
if t == '(':
paren_depth += 1
elif t == ')':
paren_depth -= 1
if paren_depth == 0:
break
elif t == '':
pass
elif t == ',':
# the point is done
coords.append(pt)
pt = []
else:
pt.append(float(t))
except tokenize.TokenError:
raise ValueError(INVALID_WKT_FMT % string)
# Given the way we're parsing, we'll probably have to deal with the last
# point after the loop
if len(pt) > 0:
coords.append(pt)
return dict(type='MultiPoint', coordinates=coords)
def _load_multipolygon(tokens, string):
"""
Has similar inputs and return value to to :func:`_load_point`, except is
for handling MULTIPOLYGON geometry.
:returns:
A GeoJSON `dict` MultiPolygon representation of the WKT ``string``.
"""
open_paren = next(tokens)
if not open_paren == '(':
raise ValueError(INVALID_WKT_FMT % string)
polygons = []
while True:
try:
poly = _load_polygon(tokens, string)
polygons.append(poly['coordinates'])
t = next(tokens)
if t == ')':
# we're done; no more polygons.
break
except StopIteration:
# If we reach this, the WKT is not valid.
raise ValueError(INVALID_WKT_FMT % string)
return dict(type='MultiPolygon', coordinates=polygons)
def _load_multilinestring(tokens, string):
"""
Has similar inputs and return value to to :func:`_load_point`, except is
for handling MULTILINESTRING geometry.
:returns:
A GeoJSON `dict` MultiLineString representation of the WKT ``string``.
"""
open_paren = next(tokens)
if not open_paren == '(':
raise ValueError(INVALID_WKT_FMT % string)
linestrs = []
while True:
try:
linestr = _load_linestring(tokens, string)
linestrs.append(linestr['coordinates'])
t = next(tokens)
if t == ')':
# we're done; no more linestrings.
break
except StopIteration:
# If we reach this, the WKT is not valid.
raise ValueError(INVALID_WKT_FMT % string)
return dict(type='MultiLineString', coordinates=linestrs)
def _load_geometrycollection(tokens, string):
"""
Has similar inputs and return value to to :func:`_load_point`, except is
for handling GEOMETRYCOLLECTIONs.
Delegates parsing to the parsers for the individual geometry types.
:returns:
A GeoJSON `dict` GeometryCollection representation of the WKT
``string``.
"""
open_paren = next(tokens)
if not open_paren == '(':
raise ValueError(INVALID_WKT_FMT % string)
geoms = []
result = dict(type='GeometryCollection', geometries=geoms)
while True:
try:
t = next(tokens)
if t == ')':
break
elif t == ',':
# another geometry still
continue
else:
geom_type = t
load_func = _loads_registry.get(geom_type)
geom = load_func(tokens, string)
geoms.append(geom)
except StopIteration:
raise ValueError(INVALID_WKT_FMT % string)
return result
_dumps_registry = {
'Point': _dump_point,
'LineString': _dump_linestring,
'Polygon': _dump_polygon,
'MultiPoint': _dump_multipoint,
'MultiLineString': _dump_multilinestring,
'MultiPolygon': _dump_multipolygon,
'GeometryCollection': _dump_geometrycollection,
}
_loads_registry = {
'POINT': _load_point,
'LINESTRING': _load_linestring,
'POLYGON': _load_polygon,
'MULTIPOINT': _load_multipoint,
'MULTILINESTRING': _load_multilinestring,
'MULTIPOLYGON': _load_multipolygon,
'GEOMETRYCOLLECTION': _load_geometrycollection,
}
| 28.192843 | 79 | 0.586348 |
f70271444a8a7d243bda48a6efd9534b633a6c2b | 1,169 | py | Python | server/openapi_server/controllers/text_date_annotation_controller.py | cascadianblue/phi-annotator | 0da6c102ec1068e6b15c613e2a90a78f79d15935 | [
"Apache-2.0"
] | null | null | null | server/openapi_server/controllers/text_date_annotation_controller.py | cascadianblue/phi-annotator | 0da6c102ec1068e6b15c613e2a90a78f79d15935 | [
"Apache-2.0"
] | 19 | 2021-07-29T03:14:38.000Z | 2022-03-01T06:03:14.000Z | server/openapi_server/controllers/text_date_annotation_controller.py | cascadianblue/phi-annotator | 0da6c102ec1068e6b15c613e2a90a78f79d15935 | [
"Apache-2.0"
] | null | null | null | import connexion
from openapi_server.annotator.phi_types import PhiType
from openapi_server.get_annotations import get_annotations
from openapi_server.models.error import Error # noqa: E501
from openapi_server.models.text_date_annotation_request import \
TextDateAnnotationRequest # noqa: E501
from openapi_server.models.text_date_annotation_response import \
TextDateAnnotationResponse # noqa: E501
def create_text_date_annotations(): # noqa: E501
"""Annotate dates in a clinical note
Return the date annotations found in a clinical note # noqa: E501
:rtype: TextDateAnnotations
"""
res = None
status = None
if connexion.request.is_json:
try:
annotation_request = TextDateAnnotationRequest.from_dict(
connexion.request.get_json()) # noqa: E501
note = annotation_request.note
annotations = get_annotations(note, phi_type=PhiType.DATE)
res = TextDateAnnotationResponse(annotations)
status = 200
except Exception as error:
status = 500
res = Error("Internal error", status, str(error))
return res, status
| 35.424242 | 70 | 0.707442 |
f702747b82118bbd64d8fc67a01e1f638cbb45dd | 26,042 | py | Python | src/transformersX/models/cutoffbert/modeling_cutoffbert.py | stevezheng23/fewshot_nlp_pt | aaca4658aaa48a5a45dfd7d5ee7282d7f7c74be2 | [
"Apache-2.0"
] | 2 | 2021-08-06T05:43:55.000Z | 2022-03-17T22:31:21.000Z | src/transformersX/models/cutoffbert/modeling_cutoffbert.py | stevezheng23/fewshot_nlp_pt | aaca4658aaa48a5a45dfd7d5ee7282d7f7c74be2 | [
"Apache-2.0"
] | null | null | null | src/transformersX/models/cutoffbert/modeling_cutoffbert.py | stevezheng23/fewshot_nlp_pt | aaca4658aaa48a5a45dfd7d5ee7282d7f7c74be2 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""PyTorch CUTOFFBERT model. """
import math
import os
import warnings
import numpy as np
from dataclasses import dataclass
from typing import Optional, Tuple
import torch
import torch.utils.checkpoint
import torch.nn.functional as F
from packaging import version
from torch import nn
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss, KLDivLoss
from torch.distributions.beta import Beta
from ...activations import ACT2FN
from ...file_utils import (
ModelOutput,
add_code_sample_docstrings,
add_start_docstrings,
add_start_docstrings_to_model_forward,
replace_return_docstrings,
)
from ...modeling_outputs import (
BaseModelOutputWithPastAndCrossAttentions,
BaseModelOutputWithPoolingAndCrossAttentions,
CausalLMOutputWithCrossAttentions,
MaskedLMOutput,
MultipleChoiceModelOutput,
NextSentencePredictorOutput,
QuestionAnsweringModelOutput,
SequenceClassifierOutput,
TokenClassifierOutput,
DualPassageEncoderModelOutput,
)
from ...modeling_utils import (
PreTrainedModel,
apply_chunking_to_forward,
find_pruneable_heads_and_indices,
prune_linear_layer,
)
from ...utils import logging
from .configuration_cutoffbert import CutoffBertConfig
from ..bert.modeling_bert import BertEmbeddings as CutoffBertEmbeddings
from ..bert.modeling_bert import BertEncoder as CutoffBertEncoder
from ..bert.modeling_bert import BertPooler as CutoffBertPooler
logger = logging.get_logger(__name__)
_CHECKPOINT_FOR_DOC = "bert-base-uncased"
_CONFIG_FOR_DOC = "CutoffBertConfig"
_TOKENIZER_FOR_DOC = "CutoffBertTokenizer"
CUTOFFBERT_PRETRAINED_MODEL_ARCHIVE_LIST = [
"bert-base-uncased",
"bert-large-uncased",
"bert-base-cased",
"bert-large-cased",
"bert-base-multilingual-uncased",
"bert-base-multilingual-cased",
# See all BERT models at https://huggingface.co/models?filter=bert
]
def load_tf_weights_in_cutoffbert(model, config, tf_checkpoint_path):
"""Load tf checkpoints in a pytorch model."""
try:
import re
import numpy as np
import tensorflow as tf
except ImportError:
logger.error(
"Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see "
"https://www.tensorflow.org/install/ for installation instructions."
)
raise
tf_path = os.path.abspath(tf_checkpoint_path)
logger.info(f"Converting TensorFlow checkpoint from {tf_path}")
# Load weights from TF model
init_vars = tf.train.list_variables(tf_path)
names = []
arrays = []
for name, shape in init_vars:
logger.info(f"Loading TF weight {name} with shape {shape}")
array = tf.train.load_variable(tf_path, name)
names.append(name)
arrays.append(array)
for name, array in zip(names, arrays):
name = name.split("/")
# adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v
# which are not required for using pretrained model
if any(
n in ["adam_v", "adam_m", "AdamWeightDecayOptimizer", "AdamWeightDecayOptimizer_1", "global_step"]
for n in name
):
logger.info(f"Skipping {'/'.join(name)}")
continue
pointer = model
for m_name in name:
if re.fullmatch(r"[A-Za-z]+_\d+", m_name):
scope_names = re.split(r"_(\d+)", m_name)
else:
scope_names = [m_name]
if scope_names[0] == "kernel" or scope_names[0] == "gamma":
pointer = getattr(pointer, "weight")
elif scope_names[0] == "output_bias" or scope_names[0] == "beta":
pointer = getattr(pointer, "bias")
elif scope_names[0] == "output_weights":
pointer = getattr(pointer, "weight")
elif scope_names[0] == "squad":
pointer = getattr(pointer, "classifier")
else:
try:
pointer = getattr(pointer, scope_names[0])
except AttributeError:
logger.info(f"Skipping {'/'.join(name)}")
continue
if len(scope_names) >= 2:
num = int(scope_names[1])
pointer = pointer[num]
if m_name[-11:] == "_embeddings":
pointer = getattr(pointer, "weight")
elif m_name == "kernel":
array = np.transpose(array)
try:
assert (
pointer.shape == array.shape
), f"Pointer shape {pointer.shape} and array shape {array.shape} mismatched"
except AssertionError as e:
e.args += (pointer.shape, array.shape)
raise
logger.info(f"Initialize PyTorch weight {name}")
pointer.data = torch.from_numpy(array)
return model
class CutoffBertPreTrainedModel(PreTrainedModel):
"""
An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained
models.
"""
config_class = CutoffBertConfig
load_tf_weights = load_tf_weights_in_cutoffbert
base_model_prefix = "bert"
_keys_to_ignore_on_load_missing = [r"position_ids"]
def _init_weights(self, module):
"""Initialize the weights"""
if isinstance(module, nn.Linear):
# Slightly different from the TF version which uses truncated_normal for initialization
# cf https://github.com/pytorch/pytorch/pull/5617
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.bias is not None:
module.bias.data.zero_()
elif isinstance(module, nn.Embedding):
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.padding_idx is not None:
module.weight.data[module.padding_idx].zero_()
elif isinstance(module, nn.LayerNorm):
module.bias.data.zero_()
module.weight.data.fill_(1.0)
CUTOFFBERT_START_DOCSTRING = r"""
This model inherits from :class:`~transformers.PreTrainedModel`. Check the superclass documentation for the generic
methods the library implements for all its model (such as downloading or saving, resizing the input embeddings,
pruning heads etc.)
This model is also a PyTorch `torch.nn.Module <https://pytorch.org/docs/stable/nn.html#torch.nn.Module>`__
subclass. Use it as a regular PyTorch Module and refer to the PyTorch documentation for all matter related to
general usage and behavior.
Parameters:
config (:class:`~transformers.BertConfig`): Model configuration class with all the parameters of the model.
Initializing with a config file does not load the weights associated with the model, only the
configuration. Check out the :meth:`~transformers.PreTrainedModel.from_pretrained` method to load the model
weights.
"""
CUTOFFBERT_INPUTS_DOCSTRING = r"""
Args:
input_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`):
Indices of input sequence tokens in the vocabulary.
Indices can be obtained using :class:`~transformers.BertTokenizer`. See
:meth:`transformers.PreTrainedTokenizer.encode` and :meth:`transformers.PreTrainedTokenizer.__call__` for
details.
`What are input IDs? <../glossary.html#input-ids>`__
attention_mask (:obj:`torch.FloatTensor` of shape :obj:`({0})`, `optional`):
Mask to avoid performing attention on padding token indices. Mask values selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
`What are attention masks? <../glossary.html#attention-mask>`__
token_type_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
Segment token indices to indicate first and second portions of the inputs. Indices are selected in ``[0,
1]``:
- 0 corresponds to a `sentence A` token,
- 1 corresponds to a `sentence B` token.
`What are token type IDs? <../glossary.html#token-type-ids>`_
position_ids (:obj:`torch.LongTensor` of shape :obj:`({0})`, `optional`):
Indices of positions of each input sequence tokens in the position embeddings. Selected in the range ``[0,
config.max_position_embeddings - 1]``.
`What are position IDs? <../glossary.html#position-ids>`_
head_mask (:obj:`torch.FloatTensor` of shape :obj:`(num_heads,)` or :obj:`(num_layers, num_heads)`, `optional`):
Mask to nullify selected heads of the self-attention modules. Mask values selected in ``[0, 1]``:
- 1 indicates the head is **not masked**,
- 0 indicates the head is **masked**.
inputs_embeds (:obj:`torch.FloatTensor` of shape :obj:`({0}, hidden_size)`, `optional`):
Optionally, instead of passing :obj:`input_ids` you can choose to directly pass an embedded representation.
This is useful if you want more control over how to convert :obj:`input_ids` indices into associated
vectors than the model's internal embedding lookup matrix.
output_attentions (:obj:`bool`, `optional`):
Whether or not to return the attentions tensors of all attention layers. See ``attentions`` under returned
tensors for more detail.
output_hidden_states (:obj:`bool`, `optional`):
Whether or not to return the hidden states of all layers. See ``hidden_states`` under returned tensors for
more detail.
return_dict (:obj:`bool`, `optional`):
Whether or not to return a :class:`~transformers.file_utils.ModelOutput` instead of a plain tuple.
"""
@add_start_docstrings(
"The bare CutoffBert Model transformer outputting raw hidden-states without any specific head on top.",
CUTOFFBERT_START_DOCSTRING,
)
class CutoffBertModel(CutoffBertPreTrainedModel):
"""
The model can behave as an encoder (with only self-attention) as well as a decoder, in which case a layer of
cross-attention is added between the self-attention layers, following the architecture described in `Attention is
all you need <https://arxiv.org/abs/1706.03762>`__ by Ashish Vaswani, Noam Shazeer, Niki Parmar, Jakob Uszkoreit,
Llion Jones, Aidan N. Gomez, Lukasz Kaiser and Illia Polosukhin.
To behave as an decoder the model needs to be initialized with the :obj:`is_decoder` argument of the configuration
set to :obj:`True`. To be used in a Seq2Seq model, the model needs to initialized with both :obj:`is_decoder`
argument and :obj:`add_cross_attention` set to :obj:`True`; an :obj:`encoder_hidden_states` is then expected as an
input to the forward pass.
"""
def __init__(self, config, add_pooling_layer=True):
super().__init__(config)
self.config = config
self.embeddings = CutoffBertEmbeddings(config)
self.encoder = CutoffBertEncoder(config)
self.pooler = CutoffBertPooler(config) if add_pooling_layer else None
self.init_weights()
def get_input_embeddings(self):
return self.embeddings.word_embeddings
def set_input_embeddings(self, value):
self.embeddings.word_embeddings = value
def _prune_heads(self, heads_to_prune):
"""
Prunes heads of the model. heads_to_prune: dict of {layer_num: list of heads to prune in this layer} See base
class PreTrainedModel
"""
for layer, heads in heads_to_prune.items():
self.encoder.layer[layer].attention.prune_heads(heads)
@add_start_docstrings_to_model_forward(CUTOFFBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
tokenizer_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=BaseModelOutputWithPoolingAndCrossAttentions,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
past_key_values=None,
use_cache=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
encoder_hidden_states (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length, hidden_size)`, `optional`):
Sequence of hidden-states at the output of the last layer of the encoder. Used in the cross-attention if
the model is configured as a decoder.
encoder_attention_mask (:obj:`torch.FloatTensor` of shape :obj:`(batch_size, sequence_length)`, `optional`):
Mask to avoid performing attention on the padding token indices of the encoder input. This mask is used in
the cross-attention if the model is configured as a decoder. Mask values selected in ``[0, 1]``:
- 1 for tokens that are **not masked**,
- 0 for tokens that are **masked**.
past_key_values (:obj:`tuple(tuple(torch.FloatTensor))` of length :obj:`config.n_layers` with each tuple having 4 tensors of shape :obj:`(batch_size, num_heads, sequence_length - 1, embed_size_per_head)`):
Contains precomputed key and value hidden states of the attention blocks. Can be used to speed up decoding.
If :obj:`past_key_values` are used, the user can optionally input only the last :obj:`decoder_input_ids`
(those that don't have their past key value states given to this model) of shape :obj:`(batch_size, 1)`
instead of all :obj:`decoder_input_ids` of shape :obj:`(batch_size, sequence_length)`.
use_cache (:obj:`bool`, `optional`):
If set to :obj:`True`, :obj:`past_key_values` key value states are returned and can be used to speed up
decoding (see :obj:`past_key_values`).
"""
output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions
output_hidden_states = (
output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states
)
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if self.config.is_decoder:
use_cache = use_cache if use_cache is not None else self.config.use_cache
else:
use_cache = False
if input_ids is not None and inputs_embeds is not None:
raise ValueError("You cannot specify both input_ids and inputs_embeds at the same time")
elif input_ids is not None:
input_shape = input_ids.size()
batch_size, seq_length = input_shape
elif inputs_embeds is not None:
input_shape = inputs_embeds.size()[:-1]
batch_size, seq_length = input_shape
else:
raise ValueError("You have to specify either input_ids or inputs_embeds")
device = input_ids.device if input_ids is not None else inputs_embeds.device
# past_key_values_length
past_key_values_length = past_key_values[0][0].shape[2] if past_key_values is not None else 0
if attention_mask is None:
attention_mask = torch.ones(((batch_size, seq_length + past_key_values_length)), device=device)
if token_type_ids is None:
if hasattr(self.embeddings, "token_type_ids"):
buffered_token_type_ids = self.embeddings.token_type_ids[:, :seq_length]
buffered_token_type_ids_expanded = buffered_token_type_ids.expand(batch_size, seq_length)
token_type_ids = buffered_token_type_ids_expanded
else:
token_type_ids = torch.zeros(input_shape, dtype=torch.long, device=device)
# We can provide a self-attention mask of dimensions [batch_size, from_seq_length, to_seq_length]
# ourselves in which case we just need to make it broadcastable to all heads.
extended_attention_mask: torch.Tensor = self.get_extended_attention_mask(attention_mask, input_shape, device)
# If a 2D or 3D attention mask is provided for the cross-attention
# we need to make broadcastable to [batch_size, num_heads, seq_length, seq_length]
if self.config.is_decoder and encoder_hidden_states is not None:
encoder_batch_size, encoder_sequence_length, _ = encoder_hidden_states.size()
encoder_hidden_shape = (encoder_batch_size, encoder_sequence_length)
if encoder_attention_mask is None:
encoder_attention_mask = torch.ones(encoder_hidden_shape, device=device)
encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask)
else:
encoder_extended_attention_mask = None
# Prepare head mask if needed
# 1.0 in head_mask indicate we keep the head
# attention_probs has shape bsz x n_heads x N x N
# input head_mask has shape [num_heads] or [num_hidden_layers x num_heads]
# and head_mask is converted to shape [num_hidden_layers x batch x num_heads x seq_length x seq_length]
head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers)
embedding_output = self.embeddings(
input_ids=input_ids,
position_ids=position_ids,
token_type_ids=token_type_ids,
inputs_embeds=inputs_embeds,
past_key_values_length=past_key_values_length,
)
encoder_outputs = self.encoder(
embedding_output,
attention_mask=extended_attention_mask,
head_mask=head_mask,
encoder_hidden_states=encoder_hidden_states,
encoder_attention_mask=encoder_extended_attention_mask,
past_key_values=past_key_values,
use_cache=use_cache,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
sequence_output = encoder_outputs[0]
pooled_output = self.pooler(sequence_output) if self.pooler is not None else None
if not return_dict:
return (sequence_output, pooled_output) + encoder_outputs[1:]
return BaseModelOutputWithPoolingAndCrossAttentions(
last_hidden_state=sequence_output,
pooler_output=pooled_output,
past_key_values=encoder_outputs.past_key_values,
hidden_states=encoder_outputs.hidden_states,
attentions=encoder_outputs.attentions,
cross_attentions=encoder_outputs.cross_attentions,
)
@add_start_docstrings(
"""
CutoffBert Model transformer with a sequence classification head on top (a linear layer on top of the pooled
output) + Cut-off data augmentation support.
""",
CUTOFFBERT_START_DOCSTRING,
)
class CutoffBertForSequenceClassification(CutoffBertPreTrainedModel):
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
self.cls_token_id = config.cls_token_id
self.sep_token_id = config.sep_token_id
self.mask_token_id = config.mask_token_id
self.masking_prob = config.cutoff_masking_prob
self.temperature = config.cutoff_temperature
self.mask_loss_wgt = config.cutoff_mask_loss_wgt
self.js_loss_wgt = config.cutoff_js_loss_wgt
self.config = config
self.bert = CutoffBertModel(config)
classifier_dropout = (
config.classifier_dropout if config.classifier_dropout is not None else config.hidden_dropout_prob
)
self.dropout = nn.Dropout(classifier_dropout)
self.classifier = nn.Linear(config.hidden_size, config.num_labels)
self.init_weights()
def _apply_cutoff(self, inputs):
masked_inputs = inputs.clone()
valid_masking_indices = (inputs != self.cls_token_id) & (inputs != self.sep_token_id)
random_masking_indices = torch.bernoulli(torch.full(inputs.shape, self.masking_prob, device=inputs.device)).bool()
masking_indices = random_masking_indices & valid_masking_indices
masked_inputs[masking_indices] = self.mask_token_id
return masked_inputs
@add_start_docstrings_to_model_forward(CUTOFFBERT_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
@add_code_sample_docstrings(
tokenizer_class=_TOKENIZER_FOR_DOC,
checkpoint=_CHECKPOINT_FOR_DOC,
output_type=SequenceClassifierOutput,
config_class=_CONFIG_FOR_DOC,
)
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
labels=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
r"""
labels (:obj:`torch.LongTensor` of shape :obj:`(batch_size,)`, `optional`):
Labels for computing the sequence classification/regression loss. Indices should be in :obj:`[0, ...,
config.num_labels - 1]`. If :obj:`config.num_labels == 1` a regression loss is computed (Mean-Square loss),
If :obj:`config.num_labels > 1` a classification loss is computed (Cross-Entropy).
"""
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
if labels is None:
outputs = self.bert(
input_ids,
attention_mask=attention_mask,
token_type_ids=token_type_ids,
position_ids=position_ids,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
pooled_output = self.dropout(outputs[1])
logits = self.classifier(pooled_output)
if not return_dict:
return (logits,) + outputs[2:]
return SequenceClassifierOutput(
logits=logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
)
b, l = input_ids.size()
masked_input_ids = self._apply_cutoff(input_ids.clone())
flatten_input_ids = torch.stack((input_ids, masked_input_ids), dim=1).reshape(-1, l)
flatten_attention_mask = attention_mask.unsqueeze(1).expand(-1, 2, -1).reshape(-1, l) if attention_mask is not None else None
flatten_token_type_ids = token_type_ids.unsqueeze(1).expand(-1, 2, -1).reshape(-1, l) if token_type_ids is not None else None
flatten_position_ids = position_ids.unsqueeze(1).expand(-1, 2, -1).reshape(-1, l) if position_ids is not None else None
flatten_inputs_embeds = inputs_embeds.unsqueeze(1).expand(-1, 2, -1, -1).reshape(-1, l, self.config.hidden_size) if inputs_embeds is not None else None
flatten_outputs = self.bert(
flatten_input_ids,
attention_mask=flatten_attention_mask,
token_type_ids=flatten_token_type_ids,
position_ids=flatten_position_ids,
head_mask=head_mask,
inputs_embeds=flatten_inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
flatten_pooled_output = self.dropout(flatten_outputs[1])
flatten_logits = self.classifier(flatten_pooled_output)
logits, masked_logits = flatten_logits.reshape(b, 2, self.config.num_labels).chunk(2, dim=1)
logits, masked_logits = logits.squeeze(dim=1).contiguous(), masked_logits.squeeze(dim=1).contiguous()
loss_fct = CrossEntropyLoss()
loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
if self.mask_loss_wgt is not None and self.mask_loss_wgt > 0.0:
mask_loss = loss_fct(masked_logits.view(-1, self.num_labels), labels.view(-1))
loss += mask_loss * self.mask_loss_wgt
if self.js_loss_wgt is not None and self.js_loss_wgt > 0.0:
kl_loss_fct = KLDivLoss(reduction="batchmean")
src_logits, trg_logits = logits, masked_logits
mean_logits = (src_logits + trg_logits) * 0.5
src_loss = kl_loss_fct(
F.log_softmax(src_logits / self.temperature, dim=-1),
F.softmax(mean_logits / self.temperature, dim=-1)
) * (self.temperature ** 2)
trg_loss = kl_loss_fct(
F.log_softmax(trg_logits / self.temperature, dim=-1),
F.softmax(mean_logits / self.temperature, dim=-1)
) * (self.temperature ** 2)
js_loss = (src_loss + trg_loss) * 0.5
loss += js_loss * self.js_loss_wgt
if not return_dict:
return (loss, logits)
return SequenceClassifierOutput(
loss=loss,
logits=logits,
)
| 44.9 | 213 | 0.672183 |
f7028f059677a83cf6bbecfd7df23260f585b48f | 785 | py | Python | sdk/media/azure-mgmt-media/azure/mgmt/media/aio/__init__.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | 1 | 2022-02-01T18:50:12.000Z | 2022-02-01T18:50:12.000Z | sdk/media/azure-mgmt-media/azure/mgmt/media/aio/__init__.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | sdk/media/azure-mgmt-media/azure/mgmt/media/aio/__init__.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._azure_media_services import AzureMediaServices
__all__ = ['AzureMediaServices']
# `._patch.py` is used for handwritten extensions to the generated code
# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md
from ._patch import patch_sdk
patch_sdk()
| 49.0625 | 114 | 0.636943 |
f702939c992f164058c986345c72844ea2c3df0a | 2,852 | py | Python | tests/data_tests/writer_tests/json_writer_test.py | alueschow/polymatheia | e46a38b3686139bbab3a2fcfaa914d4ca938654e | [
"MIT"
] | 3 | 2020-09-15T15:15:34.000Z | 2021-06-15T10:35:07.000Z | tests/data_tests/writer_tests/json_writer_test.py | alueschow/polymatheia | e46a38b3686139bbab3a2fcfaa914d4ca938654e | [
"MIT"
] | 7 | 2020-09-03T12:53:34.000Z | 2020-10-05T09:14:29.000Z | tests/data_tests/writer_tests/json_writer_test.py | alueschow/polymatheia | e46a38b3686139bbab3a2fcfaa914d4ca938654e | [
"MIT"
] | 2 | 2020-10-13T09:12:21.000Z | 2021-04-15T14:19:06.000Z | """Tests for the :mod:`~polymatheia.data.writer` package."""
import json
import os
from shutil import rmtree
from polymatheia.data import NavigableDict
from polymatheia.data.writer import JSONWriter
DOCUMENTS = [NavigableDict(r) for r in [
{
'id': '1',
'name': {
'first': 'A',
'last': 'Person'
},
'age': 32,
'special tags': 'The first'
},
{
'id': '2',
'name': {
'first': ['Another', {'abbr': 'Nameless'}],
'last': 'Parrot'
},
'age': 23,
},
{
'id': '3',
'name': {
'first': 'The',
'last': 'Last'
},
'age': 65,
},
]]
def test_local_json_writing():
"""Test writing to the local filesystem."""
rmtree('tmp/json_writer_test', ignore_errors=True)
writer = JSONWriter('tmp/json_writer_test', 'id')
writer.write(DOCUMENTS)
count = 0
for basepath, _, filenames in os.walk('tmp/json_writer_test'):
for filename in filenames:
if filename.endswith('.json'):
count = count + len(filenames)
with open(os.path.join(basepath, filename)) as in_f:
doc = json.load(in_f)
assert 'id' in doc
assert 'name' in doc
if doc['id'] == '2':
assert 'first' in doc['name']
assert len(doc['name']['first']) == 2
else:
assert 'first' in doc['name']
assert 'last' in doc['name']
assert 'age' in doc
if doc['id'] == '1':
assert 'special tags' in doc
assert count == 3
def test_local_json_writing_pre_split_id_path():
"""Test writing to the local filesystem."""
rmtree('tmp/json_writer_test', ignore_errors=True)
writer = JSONWriter('tmp/json_writer_test', ['id'])
writer.write(DOCUMENTS)
count = 0
for basepath, _, filenames in os.walk('tmp/json_writer_test'):
for filename in filenames:
if filename.endswith('.json'):
count = count + len(filenames)
with open(os.path.join(basepath, filename)) as in_f:
doc = json.load(in_f)
assert 'id' in doc
assert 'name' in doc
if doc['id'] == '2':
assert 'first' in doc['name']
assert len(doc['name']['first']) == 2
else:
assert 'first' in doc['name']
assert 'last' in doc['name']
assert 'age' in doc
if doc['id'] == '1':
assert 'special tags' in doc
assert count == 3
| 31.688889 | 68 | 0.471599 |
f702a1da97b07980eb0080e9eb0cd536cdc7c88d | 28,785 | py | Python | python/ray/tests/test_runtime_env.py | RaphaelCS/ray | 5f4d9085d2452186bff563fb2856e643c4c82095 | [
"Apache-2.0"
] | 1 | 2022-02-24T02:38:04.000Z | 2022-02-24T02:38:04.000Z | python/ray/tests/test_runtime_env.py | swag1ong/ray | fdbeef604692aa308973988b32405ec0d70f9f40 | [
"Apache-2.0"
] | null | null | null | python/ray/tests/test_runtime_env.py | swag1ong/ray | fdbeef604692aa308973988b32405ec0d70f9f40 | [
"Apache-2.0"
] | null | null | null | import os
import pytest
import sys
import random
import tempfile
import requests
from pathlib import Path
import ray
from ray.test_utils import (run_string_as_driver,
run_string_as_driver_nonblocking)
from ray._private.utils import (get_wheel_filename, get_master_wheel_url,
get_release_wheel_url)
import ray.experimental.internal_kv as kv
from time import sleep
driver_script = """
from time import sleep
import sys
import logging
sys.path.insert(0, "{working_dir}")
import ray
import ray.util
import os
try:
import test_module
except:
pass
try:
job_config = ray.job_config.JobConfig(
runtime_env={runtime_env}
)
if not job_config.runtime_env:
job_config=None
if os.environ.get("USE_RAY_CLIENT"):
ray.client("{address}").env({runtime_env}).namespace("").connect()
else:
ray.init(address="{address}",
job_config=job_config,
logging_level=logging.DEBUG,
namespace=""
)
except ValueError:
print("ValueError")
sys.exit(0)
except TypeError:
print("TypeError")
sys.exit(0)
except:
print("ERROR")
sys.exit(0)
if os.environ.get("EXIT_AFTER_INIT"):
sys.exit(0)
@ray.remote
def run_test():
return test_module.one()
@ray.remote
def check_file(name):
try:
with open(name) as f:
return f.read()
except:
return "FAILED"
@ray.remote
class TestActor(object):
@ray.method(num_returns=1)
def one(self):
return test_module.one()
{execute_statement}
if os.environ.get("USE_RAY_CLIENT"):
ray.util.disconnect()
else:
ray.shutdown()
sleep(10)
"""
def create_file(p):
if not p.parent.exists():
p.parent.mkdir()
with p.open("w") as f:
f.write("Test")
@pytest.fixture(scope="function")
def working_dir():
with tempfile.TemporaryDirectory() as tmp_dir:
path = Path(tmp_dir)
module_path = path / "test_module"
module_path.mkdir(parents=True)
init_file = module_path / "__init__.py"
test_file = module_path / "test.py"
with test_file.open(mode="w") as f:
f.write("""
def one():
return 1
""")
with init_file.open(mode="w") as f:
f.write("""
from test_module.test import one
""")
old_dir = os.getcwd()
os.chdir(tmp_dir)
yield tmp_dir
os.chdir(old_dir)
def start_client_server(cluster, client_mode):
from ray._private.runtime_env import PKG_DIR
if not client_mode:
return (cluster.address, {}, PKG_DIR)
ray.worker._global_node._ray_params.ray_client_server_port = "10003"
ray.worker._global_node.start_ray_client_server()
return ("localhost:10003", {"USE_RAY_CLIENT": "1"}, PKG_DIR)
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
def test_travel():
import uuid
with tempfile.TemporaryDirectory() as tmp_dir:
dir_paths = set()
file_paths = set()
item_num = 0
excludes = []
root = Path(tmp_dir) / "test"
def construct(path, excluded=False, depth=0):
nonlocal item_num
path.mkdir(parents=True)
if not excluded:
dir_paths.add(str(path))
if depth > 8:
return
if item_num > 500:
return
dir_num = random.randint(0, 10)
file_num = random.randint(0, 10)
for _ in range(dir_num):
uid = str(uuid.uuid4()).split("-")[0]
dir_path = path / uid
exclud_sub = random.randint(0, 5) == 0
if not excluded and exclud_sub:
excludes.append(str(dir_path.relative_to(root)))
if not excluded:
construct(dir_path, exclud_sub or excluded, depth + 1)
item_num += 1
if item_num > 1000:
return
for _ in range(file_num):
uid = str(uuid.uuid4()).split("-")[0]
with (path / uid).open("w") as f:
v = random.randint(0, 1000)
f.write(str(v))
if not excluded:
if random.randint(0, 5) == 0:
excludes.append(
str((path / uid).relative_to(root)))
else:
file_paths.add((str(path / uid), str(v)))
item_num += 1
construct(root)
exclude_spec = ray._private.runtime_env._get_excludes(root, excludes)
visited_dir_paths = set()
visited_file_paths = set()
def handler(path):
if path.is_dir():
visited_dir_paths.add(str(path))
else:
with open(path) as f:
visited_file_paths.add((str(path), f.read()))
ray._private.runtime_env._dir_travel(root, [exclude_spec], handler)
assert file_paths == visited_file_paths
assert dir_paths == visited_dir_paths
"""
The following test cases are related with runtime env. It following these steps
1) Creating a temporary dir with fixture working_dir
2) Using a template named driver_script defined globally
3) Overwrite runtime_env and execute_statement in the template
4) Execute it as a separate driver and return the result
"""
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_empty_working_dir(ray_start_cluster_head, client_mode):
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
env["EXIT_AFTER_INIT"] = "1"
with tempfile.TemporaryDirectory() as working_dir:
runtime_env = f"""{{
"working_dir": r"{working_dir}",
"py_modules": [r"{working_dir}"]
}}"""
# Execute the following cmd in driver with runtime_env
execute_statement = "sys.exit(0)"
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out != "ERROR"
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_invalid_working_dir(ray_start_cluster_head, working_dir, client_mode):
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
env["EXIT_AFTER_INIT"] = "1"
runtime_env = "{ 'working_dir': 10 }"
# Execute the following cmd in driver with runtime_env
execute_statement = ""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env).strip().split()[-1]
assert out == "TypeError"
runtime_env = "{ 'py_modules': [10] }"
# Execute the following cmd in driver with runtime_env
execute_statement = ""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env).strip().split()[-1]
assert out == "TypeError"
runtime_env = f"{{ 'working_dir': os.path.join(r'{working_dir}', 'na') }}"
# Execute the following cmd in driver with runtime_env
execute_statement = ""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env).strip().split()[-1]
assert out == "ValueError"
runtime_env = f"{{ 'py_modules': [os.path.join(r'{working_dir}', 'na')] }}"
# Execute the following cmd in driver with runtime_env
execute_statement = ""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env).strip().split()[-1]
assert out == "ValueError"
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_single_node(ray_start_cluster_head, working_dir, client_mode):
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
# Setup runtime env here
runtime_env = f"""{{ "working_dir": "{working_dir}" }}"""
# Execute the following cmd in driver with runtime_env
execute_statement = "print(sum(ray.get([run_test.remote()] * 1000)))"
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split()[-1] == "1000"
assert len(list(Path(PKG_DIR).iterdir())) == 1
assert len(kv._internal_kv_list("gcs://")) == 0
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_two_node(two_node_cluster, working_dir, client_mode):
cluster, _ = two_node_cluster
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
# Testing runtime env with working_dir
runtime_env = f"""{{ "working_dir": "{working_dir}" }}"""
# Execute the following cmd in driver with runtime_env
execute_statement = "print(sum(ray.get([run_test.remote()] * 1000)))"
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split()[-1] == "1000"
assert len(list(Path(PKG_DIR).iterdir())) == 1
assert len(kv._internal_kv_list("gcs://")) == 0
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_two_node_module(two_node_cluster, working_dir, client_mode):
cluster, _ = two_node_cluster
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
# test runtime_env iwth py_modules
runtime_env = """{ "py_modules": [test_module.__path__[0]] }"""
# Execute the following cmd in driver with runtime_env
execute_statement = "print(sum(ray.get([run_test.remote()] * 1000)))"
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split()[-1] == "1000"
assert len(list(Path(PKG_DIR).iterdir())) == 1
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_two_node_local_file(two_node_cluster, working_dir, client_mode):
with open(os.path.join(working_dir, "test_file"), "w") as f:
f.write("1")
cluster, _ = two_node_cluster
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
# test runtime_env iwth working_dir
runtime_env = f"""{{ "working_dir": "{working_dir}" }}"""
# Execute the following cmd in driver with runtime_env
execute_statement = """
vals = ray.get([check_file.remote('test_file')] * 1000)
print(sum([int(v) for v in vals]))
"""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split()[-1] == "1000"
assert len(list(Path(PKG_DIR).iterdir())) == 1
assert len(kv._internal_kv_list("gcs://")) == 0
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_exclusion(ray_start_cluster_head, working_dir, client_mode):
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
working_path = Path(working_dir)
create_file(working_path / "tmp_dir" / "test_1")
create_file(working_path / "tmp_dir" / "test_2")
create_file(working_path / "tmp_dir" / "test_3")
create_file(working_path / "tmp_dir" / "sub_dir" / "test_1")
create_file(working_path / "tmp_dir" / "sub_dir" / "test_2")
create_file(working_path / "test1")
create_file(working_path / "test2")
create_file(working_path / "test3")
tmp_dir_test_3 = str((working_path / "tmp_dir" / "test_3").absolute())
runtime_env = f"""{{
"working_dir": r"{working_dir}",
}}"""
execute_statement = """
vals = ray.get([
check_file.remote('test1'),
check_file.remote('test2'),
check_file.remote('test3'),
check_file.remote(os.path.join('tmp_dir', 'test_1')),
check_file.remote(os.path.join('tmp_dir', 'test_2')),
check_file.remote(os.path.join('tmp_dir', 'test_3')),
check_file.remote(os.path.join('tmp_dir', 'sub_dir', 'test_1')),
check_file.remote(os.path.join('tmp_dir', 'sub_dir', 'test_2')),
])
print(','.join(vals))
"""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
# Test it works before
assert out.strip().split("\n")[-1] == \
"Test,Test,Test,Test,Test,Test,Test,Test"
runtime_env = f"""{{
"working_dir": r"{working_dir}",
"excludes": [
# exclude by relative path
r"test2",
# exclude by dir
r"{str(Path("tmp_dir") / "sub_dir")}",
# exclude part of the dir
r"{str(Path("tmp_dir") / "test_1")}",
# exclude part of the dir
r"{str(Path("tmp_dir") / "test_2")}",
]
}}"""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split("\n")[-1] == \
"Test,FAILED,Test,FAILED,FAILED,Test,FAILED,FAILED"
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_exclusion_2(ray_start_cluster_head, working_dir, client_mode):
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
working_path = Path(working_dir)
def create_file(p):
if not p.parent.exists():
p.parent.mkdir(parents=True)
with p.open("w") as f:
f.write("Test")
create_file(working_path / "tmp_dir" / "test_1")
create_file(working_path / "tmp_dir" / "test_2")
create_file(working_path / "tmp_dir" / "test_3")
create_file(working_path / "tmp_dir" / "sub_dir" / "test_1")
create_file(working_path / "tmp_dir" / "sub_dir" / "test_2")
create_file(working_path / "test1")
create_file(working_path / "test2")
create_file(working_path / "test3")
create_file(working_path / "cache" / "test_1")
create_file(working_path / "tmp_dir" / "cache" / "test_1")
create_file(working_path / "another_dir" / "cache" / "test_1")
tmp_dir_test_3 = str((working_path / "tmp_dir" / "test_3").absolute())
runtime_env = f"""{{
"working_dir": r"{working_dir}",
}}"""
execute_statement = """
vals = ray.get([
check_file.remote('test1'),
check_file.remote('test2'),
check_file.remote('test3'),
check_file.remote(os.path.join('tmp_dir', 'test_1')),
check_file.remote(os.path.join('tmp_dir', 'test_2')),
check_file.remote(os.path.join('tmp_dir', 'test_3')),
check_file.remote(os.path.join('tmp_dir', 'sub_dir', 'test_1')),
check_file.remote(os.path.join('tmp_dir', 'sub_dir', 'test_2')),
check_file.remote(os.path.join("cache", "test_1")),
check_file.remote(os.path.join("tmp_dir", "cache", "test_1")),
check_file.remote(os.path.join("another_dir", "cache", "test_1")),
])
print(','.join(vals))
"""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
# Test it works before
assert out.strip().split("\n")[-1] == \
"Test,Test,Test,Test,Test,Test,Test,Test,Test,Test,Test"
with open(f"{working_dir}/.gitignore", "w") as f:
f.write("""
# Comment
test_[12]
/test1
!/tmp_dir/sub_dir/test_1
cache/
""")
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
t = out.strip().split("\n")[-1]
assert out.strip().split("\n")[-1] == \
"FAILED,Test,Test,FAILED,FAILED,Test,Test,FAILED,FAILED,FAILED,FAILED"
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_runtime_env_getter(ray_start_cluster_head, working_dir, client_mode):
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
runtime_env = f"""{{ "working_dir": "{working_dir}" }}"""
# Execute the following cmd in driver with runtime_env
execute_statement = """
print(ray.get_runtime_context().runtime_env["working_dir"])
"""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split()[-1] == working_dir
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_two_node_uri(two_node_cluster, working_dir, client_mode):
cluster, _ = two_node_cluster
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
import ray._private.runtime_env as runtime_env
import tempfile
with tempfile.NamedTemporaryFile(suffix="zip") as tmp_file:
pkg_name = runtime_env.get_project_package_name(working_dir, [], [])
pkg_uri = runtime_env.Protocol.PIN_GCS.value + "://" + pkg_name
runtime_env.create_project_package(working_dir, [], [], tmp_file.name)
runtime_env.push_package(pkg_uri, tmp_file.name)
runtime_env = f"""{{ "uris": ["{pkg_uri}"] }}"""
# Execute the following cmd in driver with runtime_env
execute_statement = "print(sum(ray.get([run_test.remote()] * 1000)))"
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split()[-1] == "1000"
assert len(list(Path(PKG_DIR).iterdir())) == 1
# pinned uri will not be deleted
print(list(kv._internal_kv_list("")))
assert len(kv._internal_kv_list("pingcs://")) == 1
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_regular_actors(ray_start_cluster_head, working_dir, client_mode):
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
runtime_env = f"""{{ "working_dir": "{working_dir}" }}"""
# Execute the following cmd in driver with runtime_env
execute_statement = """
test_actor = TestActor.options(name="test_actor").remote()
print(sum(ray.get([test_actor.one.remote()] * 1000)))
"""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split()[-1] == "1000"
assert len(list(Path(PKG_DIR).iterdir())) == 1
assert len(kv._internal_kv_list("gcs://")) == 0
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
@pytest.mark.parametrize("client_mode", [True, False])
def test_detached_actors(ray_start_cluster_head, working_dir, client_mode):
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, client_mode)
runtime_env = f"""{{ "working_dir": "{working_dir}" }}"""
# Execute the following cmd in driver with runtime_env
execute_statement = """
test_actor = TestActor.options(name="test_actor", lifetime="detached").remote()
print(sum(ray.get([test_actor.one.remote()] * 1000)))
"""
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split()[-1] == "1000"
# It's a detached actors, so it should still be there
assert len(kv._internal_kv_list("gcs://")) == 1
assert len(list(Path(PKG_DIR).iterdir())) == 2
pkg_dir = [f for f in Path(PKG_DIR).glob("*") if f.is_dir()][0]
import sys
sys.path.insert(0, str(pkg_dir))
test_actor = ray.get_actor("test_actor")
assert sum(ray.get([test_actor.one.remote()] * 1000)) == 1000
ray.kill(test_actor)
from time import sleep
sleep(5)
assert len(list(Path(PKG_DIR).iterdir())) == 1
assert len(kv._internal_kv_list("gcs://")) == 0
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
def test_jobconfig_compatible_1(ray_start_cluster_head, working_dir):
# start job_config=None
# start job_config=something
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, True)
runtime_env = None
# To make the first one hanging there
execute_statement = """
sleep(600)
"""
script = driver_script.format(**locals())
# Have one running with job config = None
proc = run_string_as_driver_nonblocking(script, env)
# waiting it to be up
sleep(5)
runtime_env = f"""{{ "working_dir": "{working_dir}" }}"""
# Execute the second one which should work because Ray Client servers.
execute_statement = "print(sum(ray.get([run_test.remote()] * 1000)))"
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split()[-1] == "1000"
proc.kill()
proc.wait()
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
def test_jobconfig_compatible_2(ray_start_cluster_head, working_dir):
# start job_config=something
# start job_config=None
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, True)
runtime_env = """{ "py_modules": [test_module.__path__[0]] }"""
# To make the first one hanging there
execute_statement = """
sleep(600)
"""
script = driver_script.format(**locals())
proc = run_string_as_driver_nonblocking(script, env)
sleep(5)
runtime_env = None
# Execute the following in the second one which should
# succeed
execute_statement = "print('OK')"
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
assert out.strip().split()[-1] == "OK", out
proc.kill()
proc.wait()
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
def test_jobconfig_compatible_3(ray_start_cluster_head, working_dir):
# start job_config=something
# start job_config=something else
cluster = ray_start_cluster_head
(address, env, PKG_DIR) = start_client_server(cluster, True)
runtime_env = """{ "py_modules": [test_module.__path__[0]] }"""
# To make the first one hanging ther
execute_statement = """
sleep(600)
"""
script = driver_script.format(**locals())
proc = run_string_as_driver_nonblocking(script, env)
sleep(5)
runtime_env = f"""
{{ "working_dir": test_module.__path__[0] }}""" # noqa: F541
# Execute the following cmd in the second one and ensure that
# it is able to run.
execute_statement = "print('OK')"
script = driver_script.format(**locals())
out = run_string_as_driver(script, env)
proc.kill()
proc.wait()
assert out.strip().split()[-1] == "OK"
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
def test_util_without_job_config(shutdown_only):
from ray.cluster_utils import Cluster
with tempfile.TemporaryDirectory() as tmp_dir:
with (Path(tmp_dir) / "lib.py").open("w") as f:
f.write("""
def one():
return 1
""")
old_dir = os.getcwd()
os.chdir(tmp_dir)
cluster = Cluster()
cluster.add_node(num_cpus=1)
ray.init(address=cluster.address)
(address, env, PKG_DIR) = start_client_server(cluster, True)
script = f"""
import ray
import ray.util
import os
ray.util.connect("{address}", job_config=None)
@ray.remote
def run():
from lib import one
return one()
print(ray.get([run.remote()])[0])
"""
out = run_string_as_driver(script, env)
print(out)
os.chdir(old_dir)
@pytest.mark.skipif(sys.platform == "win32", reason="Fail to create temp dir.")
def test_init(shutdown_only):
with tempfile.TemporaryDirectory() as tmp_dir:
old_dir = os.getcwd()
os.chdir(tmp_dir)
with open("hello", "w") as f:
f.write("world")
job_config = ray.job_config.JobConfig(runtime_env={"working_dir": "."})
ray.init(job_config=job_config)
@ray.remote
class Test:
def test(self):
with open("hello") as f:
return f.read()
t = Test.remote()
assert ray.get(t.test.remote()) == "world"
os.chdir(old_dir)
def test_get_wheel_filename():
ray_version = "2.0.0.dev0"
for sys_platform in ["darwin", "linux", "win32"]:
for py_version in ["36", "37", "38"]:
filename = get_wheel_filename(sys_platform, ray_version,
py_version)
prefix = "https://s3-us-west-2.amazonaws.com/ray-wheels/latest/"
url = f"{prefix}{filename}"
assert requests.head(url).status_code == 200
def test_get_master_wheel_url():
ray_version = "2.0.0.dev0"
test_commit = "ba6cebe30fab6925e5b2d9e859ad064d53015246"
for sys_platform in ["darwin", "linux", "win32"]:
for py_version in ["36", "37", "38"]:
url = get_master_wheel_url(test_commit, sys_platform, ray_version,
py_version)
assert requests.head(url).status_code == 200, url
def test_get_release_wheel_url():
test_commits = {
"1.4.0rc1": "e7c7f6371a69eb727fa469e4cd6f4fbefd143b4c",
"1.3.0": "0b4b444fadcdc23226e11fef066b982175804232",
"1.2.0": "1b1a2496ca51b745c07c79fb859946d3350d471b"
}
for sys_platform in ["darwin", "linux", "win32"]:
for py_version in ["36", "37", "38"]:
for version, commit in test_commits.items():
url = get_release_wheel_url(commit, sys_platform, version,
py_version)
assert requests.head(url).status_code == 200, url
@pytest.mark.skipif(
sys.platform == "win32", reason="runtime_env unsupported on Windows.")
def test_decorator_task(ray_start_cluster_head):
@ray.remote(runtime_env={"env_vars": {"foo": "bar"}})
def f():
return os.environ.get("foo")
assert ray.get(f.remote()) == "bar"
@pytest.mark.skipif(
sys.platform == "win32", reason="runtime_env unsupported on Windows.")
def test_decorator_actor(ray_start_cluster_head):
@ray.remote(runtime_env={"env_vars": {"foo": "bar"}})
class A:
def g(self):
return os.environ.get("foo")
a = A.remote()
assert ray.get(a.g.remote()) == "bar"
@pytest.mark.skipif(
sys.platform == "win32", reason="runtime_env unsupported on Windows.")
def test_decorator_complex(shutdown_only):
ray.init(
job_config=ray.job_config.JobConfig(
runtime_env={"env_vars": {
"foo": "job"
}}))
@ray.remote
def env_from_job():
return os.environ.get("foo")
assert ray.get(env_from_job.remote()) == "job"
@ray.remote(runtime_env={"env_vars": {"foo": "task"}})
def f():
return os.environ.get("foo")
assert ray.get(f.remote()) == "task"
@ray.remote(runtime_env={"env_vars": {"foo": "actor"}})
class A:
def g(self):
return os.environ.get("foo")
a = A.remote()
assert ray.get(a.g.remote()) == "actor"
# Test that runtime_env can be overridden by specifying .options().
assert ray.get(
f.options(runtime_env={
"env_vars": {
"foo": "new"
}
}).remote()) == "new"
a = A.options(runtime_env={"env_vars": {"foo": "new2"}}).remote()
assert ray.get(a.g.remote()) == "new2"
def test_container_option_serialize():
runtime_env = {
"container": {
"image": "ray:latest",
"run_options": ["--name=test"]
}
}
job_config = ray.job_config.JobConfig(runtime_env=runtime_env)
job_config_serialized = job_config.serialize()
# job_config_serialized is JobConfig protobuf serialized string,
# job_config.runtime_env.raw_json has container_option info
# job_config.serialized_runtime_env also has container_option info
assert job_config_serialized.count(b"image") == 2
def test_working_dir_override_failure(shutdown_only):
ray.init()
@ray.remote(runtime_env={"working_dir": "."})
def f():
pass
with pytest.raises(NotImplementedError):
f.remote()
@ray.remote
def g():
pass
with pytest.raises(NotImplementedError):
g.options(runtime_env={"working_dir": "."}).remote()
@ray.remote(runtime_env={"working_dir": "."})
class A:
pass
with pytest.raises(NotImplementedError):
A.remote()
@ray.remote
class B:
pass
with pytest.raises(NotImplementedError):
B.options(runtime_env={"working_dir": "."}).remote()
if __name__ == "__main__":
import sys
sys.exit(pytest.main(["-sv", __file__]))
| 35.537037 | 79 | 0.640021 |
f7030d07bc7c0ce56ea130285e2eff935e0bf461 | 30 | py | Python | anchor/__init__.py | forest-snow/anchor-topic | ad947f2ff6aefc28394531fa74ba3e94e5a01fc2 | [
"MIT"
] | 13 | 2019-02-14T15:55:55.000Z | 2022-03-03T01:01:28.000Z | anchor/__init__.py | gkaramanolakis/anchor-topic | e1637fa3965bfe14d8a5241b070c675bcdf4df18 | [
"MIT"
] | 1 | 2020-11-12T22:49:16.000Z | 2020-11-12T22:49:16.000Z | anchor/__init__.py | gkaramanolakis/anchor-topic | e1637fa3965bfe14d8a5241b070c675bcdf4df18 | [
"MIT"
] | 2 | 2019-02-14T15:26:56.000Z | 2021-08-21T02:37:53.000Z | import anchor
name = 'anchor' | 10 | 15 | 0.733333 |
f7031222207e58ab6b118aabf23323dbda72761a | 1,391 | py | Python | svhn/evaluate_calibration.py | mvaldenegro/paper-subensembles-image-classification | cc3a6567b1de82b9bfb1612ad8d0e73cdd7ae09b | [
"BSD-3-Clause"
] | 5 | 2020-06-04T19:54:29.000Z | 2021-12-13T06:19:48.000Z | svhn/evaluate_calibration.py | mvaldenegro/paper-subensembles-image-classification | cc3a6567b1de82b9bfb1612ad8d0e73cdd7ae09b | [
"BSD-3-Clause"
] | null | null | null | svhn/evaluate_calibration.py | mvaldenegro/paper-subensembles-image-classification | cc3a6567b1de82b9bfb1612ad8d0e73cdd7ae09b | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
import h5py
import pandas as pd
from svhn_io import load_svhn
from keras_uncertainty.utils import classifier_calibration_curve, classifier_calibration_error
EPSILON = 1e-10
def load_hdf5_data(filename):
inp = h5py.File(filename, "r")
preds = inp["preds"][...]
inp.close()
return preds
NUM_ENSEMBLES = 15
NUM_BINS=7
#IOD_FILE_PATTERN = "cnn_svhn-num_ens-{}-preds.hdf5"
#OUTPUT_PATTERN = "svhn-calibration-sub-deepensembles_1_num-ens-{}_cnn_svhn.csv"
IOD_FILE_PATTERN = "deepensembles-cnn_svhn-num_ens-{}-preds.hdf5"
OUTPUT_PATTERN = "svhn-calibration-deepensembles-num-ens-{}_cnn_svhn.csv"
if __name__ == "__main__":
for num_ens in range(1, NUM_ENSEMBLES + 1):
(_, __), (___, y_true) = load_svhn()
y_true = y_true.flatten()
y_probs = load_hdf5_data(IOD_FILE_PATTERN.format(num_ens))
y_confs = np.max(y_probs, axis=1)
y_pred = np.argmax(y_probs, axis=1)
curve_conf, curve_acc = classifier_calibration_curve(y_pred, y_true, y_confs, num_bins=NUM_BINS)
error = classifier_calibration_error(y_pred, y_true, y_confs, num_bins=NUM_BINS)
print("Processing calibration curve for {} ensembles. Error: {}".format(num_ens, error))
output_df = pd.DataFrame(data={"conf": curve_conf, "acc": curve_acc})
output_df.to_csv(OUTPUT_PATTERN.format(num_ens), sep=';', index=False) | 33.119048 | 104 | 0.716751 |
f70332380c19749fdaaa89b3db41948706f18fba | 228 | py | Python | main.py | ConnorDoesDev/cozmo | dc2e4574d056b6e61e1e7042c3b32d7e0d00f055 | [
"CC0-1.0"
] | null | null | null | main.py | ConnorDoesDev/cozmo | dc2e4574d056b6e61e1e7042c3b32d7e0d00f055 | [
"CC0-1.0"
] | null | null | null | main.py | ConnorDoesDev/cozmo | dc2e4574d056b6e61e1e7042c3b32d7e0d00f055 | [
"CC0-1.0"
] | null | null | null | import cozmo
name = input("What is your name? ")
def cozmo_program(robot: cozmo.robot.Robot):
robot.say_text(
f"Hi! My name is Cozmo. How are you, {name}?").wait_for_completed()
cozmo.run_program(cozmo_program)
| 19 | 75 | 0.697368 |
f7037afae16524014eb92d458ac6b7aedaa8b9b9 | 1,107 | py | Python | porcupine/__init__.py | Akuli/editor | cf98c538e75a07d825f9066e25a3752fdf7c3c29 | [
"MIT"
] | 1 | 2021-07-28T10:09:26.000Z | 2021-07-28T10:09:26.000Z | porcupine/__init__.py | Akuli/editor | cf98c538e75a07d825f9066e25a3752fdf7c3c29 | [
"MIT"
] | null | null | null | porcupine/__init__.py | Akuli/editor | cf98c538e75a07d825f9066e25a3752fdf7c3c29 | [
"MIT"
] | null | null | null | """Porcupine is a simple editor.
You are probably reading this because you want to learn how Porcupine
works or write fun plugins for it. I recommend getting started with the
plugin API documentation:
https://akuli.github.io/porcupine/
"""
import sys
import appdirs
version_info = (0, 99, 2) # this is updated with scripts/release.py
__version__ = "%d.%d.%d" % version_info
__author__ = "Akuli"
__copyright__ = "Copyright (c) 2017-2022 Akuli"
__license__ = "MIT"
if sys.platform in {"win32", "darwin"}:
# these platforms like path names like "Program Files" or "Application Support"
dirs = appdirs.AppDirs("Porcupine", "Akuli")
else:
dirs = appdirs.AppDirs("porcupine", "akuli")
# Must be after creating dirs
from porcupine import _state
# TODO: document get_*_panedwindow
get_main_window = _state.get_main_window
get_parsed_args = _state.get_parsed_args
get_horizontal_panedwindow = _state.get_horizontal_panedwindow
get_vertical_panedwindow = _state.get_vertical_panedwindow
get_tab_manager = _state.get_tab_manager
filedialog_kwargs = _state.filedialog_kwargs
quit = _state.quit
| 29.918919 | 83 | 0.775068 |
f703a7594603428c46c23789bb1ed339b09979a3 | 1,136 | py | Python | kubi_ecs_logger/models/root_schema.py | kumina/kubi_ecs_logger | 64d9519e0759a24253a4edc53e0c024675033d1c | [
"BSD-3-Clause"
] | 6 | 2019-12-15T12:47:06.000Z | 2022-01-11T08:54:58.000Z | kubi_ecs_logger/models/root_schema.py | kumina/kubi_ecs_logger | 64d9519e0759a24253a4edc53e0c024675033d1c | [
"BSD-3-Clause"
] | null | null | null | kubi_ecs_logger/models/root_schema.py | kumina/kubi_ecs_logger | 64d9519e0759a24253a4edc53e0c024675033d1c | [
"BSD-3-Clause"
] | null | null | null | """
TODO: Add doc what this file is doing
"""
from marshmallow import Schema, post_dump
class RootSchema(Schema):
SKIP_VALUES = [None]
@post_dump
def remove_skip_values(self, data, many, **kwargs):
return {
key: value for key, value in data.items()
if value not in self.SKIP_VALUES
}
@post_dump(pass_original=True)
def add_extra(self, serialized, original, many, **kwargs):
from kubi_ecs_logger.models.include import INCLUDE_FIELDS
for k, v in original.__dict__.items():
if k not in serialized and v is not None:
type_name = str(type(v).__name__).lower()
if type_name in INCLUDE_FIELDS:
schema = INCLUDE_FIELDS[type_name].schema
data = schema.dump(v)
if "kind" not in data:
data["kind"] = type_name
serialized[k] = data
elif isinstance(v, (int, float, str, bool, dict)):
if not str(k).startswith('_'):
serialized[k] = v
return serialized
| 32.457143 | 66 | 0.553697 |
f703c343b85bbcb32c648a5f8668d2512894a13a | 2,586 | py | Python | IMLearn/learners/regressors/polynomial_fitting.py | RotemBadash/IML.HUJI | 2b20d074c159123f61b321a7e84312ab82400949 | [
"MIT"
] | null | null | null | IMLearn/learners/regressors/polynomial_fitting.py | RotemBadash/IML.HUJI | 2b20d074c159123f61b321a7e84312ab82400949 | [
"MIT"
] | null | null | null | IMLearn/learners/regressors/polynomial_fitting.py | RotemBadash/IML.HUJI | 2b20d074c159123f61b321a7e84312ab82400949 | [
"MIT"
] | null | null | null | from __future__ import annotations
from typing import NoReturn
from . import LinearRegression
from ...base import BaseEstimator
import numpy as np
class PolynomialFitting(BaseEstimator):
"""
Polynomial Fitting using Least Squares estimation
"""
def __init__(self, k: int) -> PolynomialFitting:
"""
Instantiate a polynomial fitting estimator
Parameters
----------
k : int
Degree of polynomial to fit
"""
super().__init__()
self.degree = k
self.linear_regression_model = LinearRegression(
include_intercept=False)
def _fit(self, X: np.ndarray, y: np.ndarray) -> NoReturn:
"""
Fit Least Squares model to polynomial transformed samples
Parameters
----------
X : ndarray of shape (n_samples, n_features)
Input data to fit an estimator for
y : ndarray of shape (n_samples, )
Responses of input data to fit to
"""
x = self.__transform(X)
self.linear_regression_model.fit(x, y)
def _predict(self, X: np.ndarray) -> np.ndarray:
"""
Predict responses for given samples using fitted estimator
Parameters
----------
X : ndarray of shape (n_samples, n_features)
Input data to predict responses for
Returns
-------
responses : ndarray of shape (n_samples, )
Predicted responses of given samples
"""
x = self.__transform(X)
return self.linear_regression_model.predict(x)
def _loss(self, X: np.ndarray, y: np.ndarray) -> float:
"""
Evaluate performance under MSE loss function
Parameters
----------
X : ndarray of shape (n_samples, n_features)
Test samples
y : ndarray of shape (n_samples, )
True labels of test samples
Returns
-------
loss : float
Performance under MSE loss function
"""
x = self.__transform(X)
return self.linear_regression_model.loss(x, y)
def __transform(self, X: np.ndarray) -> np.ndarray:
"""
Transform given input according to the univariate polynomial
transformation
Parameters
----------
X: ndarray of shape (n_samples,)
Returns
-------
transformed: ndarray of shape (n_samples, k+1)
Vandermonde matrix of given samples up to degree k
"""
return np.vander(X, N=self.degree+1, increasing=True) | 28.108696 | 68 | 0.5785 |
f703c645aa7c72fe4e04cdbf60f8a8cc84e7b617 | 1,238 | py | Python | alipay/aop/api/domain/PriceInformation.py | antopen/alipay-sdk-python-all | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | [
"Apache-2.0"
] | 213 | 2018-08-27T16:49:32.000Z | 2021-12-29T04:34:12.000Z | alipay/aop/api/domain/PriceInformation.py | antopen/alipay-sdk-python-all | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | [
"Apache-2.0"
] | 29 | 2018-09-29T06:43:00.000Z | 2021-09-02T03:27:32.000Z | alipay/aop/api/domain/PriceInformation.py | antopen/alipay-sdk-python-all | 8e51c54409b9452f8d46c7bb10eea7c8f7e8d30c | [
"Apache-2.0"
] | 59 | 2018-08-27T16:59:26.000Z | 2022-03-25T10:08:15.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class PriceInformation(object):
def __init__(self):
self._amount = None
self._type = None
@property
def amount(self):
return self._amount
@amount.setter
def amount(self, value):
self._amount = value
@property
def type(self):
return self._type
@type.setter
def type(self, value):
self._type = value
def to_alipay_dict(self):
params = dict()
if self.amount:
if hasattr(self.amount, 'to_alipay_dict'):
params['amount'] = self.amount.to_alipay_dict()
else:
params['amount'] = self.amount
if self.type:
if hasattr(self.type, 'to_alipay_dict'):
params['type'] = self.type.to_alipay_dict()
else:
params['type'] = self.type
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = PriceInformation()
if 'amount' in d:
o.amount = d['amount']
if 'type' in d:
o.type = d['type']
return o
| 22.107143 | 63 | 0.53958 |
f703ed2aa1c7638b37cd9459328cc99a0a5f16f3 | 34,077 | py | Python | main.py | RedaLegzali/mr-driller | c944b10ff17ece1445b55cf3b44a4aca411dcc0d | [
"MIT"
] | null | null | null | main.py | RedaLegzali/mr-driller | c944b10ff17ece1445b55cf3b44a4aca411dcc0d | [
"MIT"
] | null | null | null | main.py | RedaLegzali/mr-driller | c944b10ff17ece1445b55cf3b44a4aca411dcc0d | [
"MIT"
] | null | null | null | import pygame, time
from pygame.locals import *
from random import *
pygame.init()
# Variables Pygame
white = (255, 255, 255)
crystal = (162,162,162)
black = (0, 0, 0)
rose = (236,28,115)
red = pygame.Color('#ff0000')
green = pygame.Color('#00ff62')
blue = pygame.Color('#0026ff')
yellow = (222,207,4)
width = 800
height = 600
clock = pygame.time.Clock()
pop_block = pygame.mixer.Sound("Music/pop_block.wav")
# Images
walkRight = [pygame.image.load('Driller/droite1.png'), pygame.image.load('Driller/droite2.png'),
pygame.image.load('Driller/droite3.png'),pygame.image.load('Driller/droite4.png'),
pygame.image.load('Driller/droite5.png'), pygame.image.load('Driller/droite6.png'),
pygame.image.load('Driller/droite7.png'), pygame.image.load('Driller/droite8.png'),
pygame.image.load('Driller/droite9.png')]
walkLeft = [pygame.image.load('Driller/gauche1.png'), pygame.image.load('Driller/gauche2.png'),
pygame.image.load('Driller/gauche3.png'),pygame.image.load('Driller/gauche4.png'),
pygame.image.load('Driller/gauche5.png'),pygame.image.load('Driller/gauche6.png'),
pygame.image.load('Driller/gauche7.png'),pygame.image.load('Driller/gauche8.png'),
pygame.image.load('Driller/gauche9.png')]
fall = [
pygame.image.load('Driller/fall.png'),
pygame.image.load('Driller/fall1.png')
]
centre = pygame.image.load('Driller/centre.png')
blocks = [
pygame.image.load("Blocks/block_jaune.png"),
pygame.image.load("Blocks/block_vert.png"),
pygame.image.load("Blocks/block_bleu.png"),
pygame.image.load("Blocks/block_rouge.png"),
pygame.image.load("Blocks/block_blanc.png"),
pygame.image.load("Blocks/block_crystal.png"),
pygame.image.load("Blocks/block_niveau.png")
]
blocks_fissure = [
pygame.image.load("Blocks/block.png"),
pygame.image.load("Blocks/block1.png"),
pygame.image.load("Blocks/block2.png"),
pygame.image.load("Blocks/block3.png"),
pygame.image.load("Blocks/block4.png"),
pygame.image.load("Blocks/block5.png")
]
image_drill_left = pygame.image.load("Driller/drill_left.png")
image_drill_right = pygame.image.load("Driller/drill_right.png")
image_drill_down = pygame.image.load("Driller/drill_down.png")
oxy_display = pygame.image.load("Blocks/oxy_display.png")
capsule = pygame.image.load("Blocks/capsule_oxygene.png")
dead_crash = pygame.image.load("Driller/ecraser.png")
dead_air = pygame.image.load("Driller/asph.png")
ange = pygame.image.load("Driller/ange.png")
depth_display = pygame.image.load("Blocks/depth.png")
score_display = pygame.image.load("Blocks/score.png")
level_display = pygame.image.load("Blocks/level.png")
air_display = pygame.image.load("Blocks/air.png")
air_support_display=pygame.image.load("Blocks/air_support.png")
air_pourcent_display = pygame.image.load("Blocks/pourcent.png")
lives_display = pygame.image.load("Blocks/lives.png")
# Variables Globales
drill_left = False
drill_right = False
compteur_drill = 0
temps_recuperer = 0
cologne = 12
ligne = 35
game_over = False
surface = pygame.display.set_mode( (width,height) )
pygame.display.set_caption("Mr Driller")
obstacles = [[None]*cologne for l in range(ligne) ]
x = 100
y = 5
gravity = 5
left = False
right = False
walkCount = 0
fallCount = 0
pourcentage = 100
points = 0
profondeur = 0
GameOver = False
Death = 0
death_depth = []
CountDeath = 3
Capsule_Air = 10
name_list = []
# SP
def saisie():
global name_list
running = True
play = False
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == KEYDOWN:
if event.key == K_RETURN:
running = False
play = True
if event.type == pygame.KEYDOWN and len(name_list) != 30:
if event.key == pygame.K_a:
name_list.append("a")
elif event.key == pygame.K_b:
name_list.append("b")
elif event.key == pygame.K_c:
name_list.append("c")
elif event.key == pygame.K_d:
name_list.append("d")
elif event.key == pygame.K_e:
name_list.append("e")
elif event.key == pygame.K_f:
name_list.append("f")
elif event.key == pygame.K_g:
name_list.append("g")
elif event.key == pygame.K_h:
name_list.append("h")
elif event.key == pygame.K_i:
name_list.append("i")
elif event.key == pygame.K_j:
name_list.append("j")
elif event.key == pygame.K_k:
name_list.append("k")
elif event.key == pygame.K_l:
name_list.append("l")
elif event.key == pygame.K_m:
name_list.append("m")
elif event.key == pygame.K_n:
name_list.append("n")
elif event.key == pygame.K_o:
name_list.append("o")
elif event.key == pygame.K_p:
name_list.append("p")
elif event.key == pygame.K_q:
name_list.append("q")
elif event.key == pygame.K_r:
name_list.append("r")
elif event.key == pygame.K_s:
name_list.append("s")
elif event.key == pygame.K_t:
name_list.append("t")
elif event.key == pygame.K_u:
name_list.append("u")
elif event.key == pygame.K_v:
name_list.append("v")
elif event.key == pygame.K_w:
name_list.append("w")
elif event.key == pygame.K_x:
name_list.append("x")
elif event.key == pygame.K_y:
name_list.append("y")
elif event.key == pygame.K_z:
name_list.append("z")
elif event.key == pygame.K_SPACE:
name_list.append(" ")
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_BACKSPACE and len(name_list) > 0:
name_list.pop(-1)
#surface.fill( (0,0,0) )
ecran_saisie = pygame.image.load("Screens/EnterNameBetter.png")
ecran_saisie = pygame.transform.scale(ecran_saisie, (width, height))
surface.blit(ecran_saisie,(0,0))
string = ''.join(name_list)
font = pygame.font.Font("Screens/monospace.ttf" , 40)
texte = font.render(string , True , (0,0,0))
rectangle = texte.get_rect()
rectangle.topleft = (150,130)
surface.blit(texte,rectangle)
pygame.display.update()
clock.tick(60)
return play , string
def air():
global pourcentage , GameOver , Death , x , death_depth
pos_x = 620
pos_y = 300
font = pygame.font.Font("freesansbold.ttf", 30)
if pourcentage <= 0:
GameOver = True
Death = 1
if pourcentage > 100:
pourcentage = 100
text_temps = font.render(str(pourcentage), True, white)
list_rotato = [oxy_display for loop in range(pourcentage)]
surface.blit(text_temps, (pos_x+80, pos_y+40))
surface.blit(air_display,(pos_x-20,pos_y-50))
surface.blit(air_support_display,(pos_x-8,pos_y-3))
surface.blit(air_pourcent_display,(pos_x+135,pos_y+40 ))
longueur_barre = 0
for k in list_rotato:
surface.blit(k, (pos_x + longueur_barre, pos_y))
longueur_barre += 1.5
def score(points):
pos_x = 620
pos_y = 150
font = pygame.font.Font("freesansbold.ttf", 30)
pygame.draw.circle(surface,rose,(pos_x,pos_y+20),10,0)
pygame.draw.circle(surface,rose,(pos_x+30,pos_y+20),10,0)
text_score = font.render(str(points), True, white)
text = font.render("PTS", True, rose)
surface.blit(text_score, (pos_x+80, pos_y+30))
surface.blit(text, (pos_x+100, pos_y+60))
surface.blit(score_display,(pos_x-20,pos_y-30))
def depth(profondeur):
pos_x = 620
pos_y = 50
font = pygame.font.Font("freesansbold.ttf", 30)
pygame.draw.circle(surface, yellow, (pos_x, pos_y), 10, 0)
pygame.draw.circle(surface, yellow, (pos_x + 30, pos_y), 10, 0)
text_score = font.render(str(profondeur), True, white)
text = font.render("FT", True, yellow)
surface.blit(text_score, (pos_x + 80, pos_y))
surface.blit(text, (pos_x + 100, pos_y + 30))
surface.blit(depth_display,(600,0))
def lives(DeathCount):
pos_x = 560
pos_y = 400
font = pygame.font.Font("freesansbold.ttf", 30)
text_score = font.render(str(DeathCount), True, white)
text = font.render("x", True, red)
surface.blit(text_score, (pos_x + 180, pos_y+32))
surface.blit(text, (pos_x + 150, pos_y+30))
surface.blit(ange,(pos_x + 80, pos_y+5))
surface.blit(lives_display,(600,pos_y-25))
def levels():
pos_x=600
pos_y= 480
font = pygame.font.Font("freesansbold.ttf", 30)
text_level = font.render(str(level), True, white)
surface.blit(text_level, (pos_x+50 , pos_y+50))
surface.blit(level_display,(pos_x,pos_y))
def chrono(seconds):
time.sleep(1)
return (seconds + 1)
def intro():
pygame.mixer.music.load("Intro/intro_music.mp3") # je rapporte la musique
pygame.display.flip()
font = pygame.font.Font(None, 24)
clock = pygame.time.Clock()
seconds = 0
nextimg = 1
''' Chargement des images et choix de la premiere image'''
images = [
pygame.image.load("Intro/Start_screen1.png"),
pygame.image.load("Intro/Start_screen2.png"),
pygame.image.load("Intro/Start_screen3.png"),
pygame.image.load("Intro/Start_screen4.png"),
pygame.image.load("Intro/Start_screen5.png"),
pygame.image.load("Intro/Start_screen6.png"),
pygame.image.load("Intro/Start_screen7.png"),
pygame.image.load("Intro/Start_screen8.png")
]
pygame.mixer.music.play(0) # On lance la musique
running = True
play = False
while running:
seconds = chrono(seconds) # on lance le chrono
if seconds > 0 and seconds % 3 == 0: # tout les trois secondes on change d'images
nextimg += 1
if nextimg <= len(images):
choix_image = images[nextimg-1]
choix_image = pygame.transform.scale(choix_image, (width, height))
text_temps = font.render(str(seconds) + " seconds since start", 1,(255, 255, 255)) # petite indicateur de temps
surface.blit(choix_image, (0, 0))
surface.blit(text_temps, (0, 0))
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if event.type == KEYDOWN:
if event.key == K_SPACE:
running = False
play = True
pygame.display.update()
clock.tick(60)
return play
def initialise():
global obstacles
x_cube = 0
hauteur = y+200
caps = Capsule_Air
for i in range(0,ligne-5):
for j in range(cologne):
if caps != 0:
square_type = randint(1,8)
else:
square_type = randint(1,7)
if square_type == 8:
caps -= 1
square = pygame.Rect(x_cube, hauteur, 50, 50)
if square_type == 5:
obstacles[i][j] = [square, square_type,0,0]
elif square_type == 7:
obstacles[i][j] = [square, square_type,0]
else:
obstacles[i][j] = [square, square_type]
cpt = 3
while cpt >= 3:
cpt = 0
for k in range(j - 1, j - 4, -1):
if k >= 0:
if obstacles[i][k] != None:
if (obstacles[i][k])[1] == (obstacles[i][j])[1]:
cpt += 1
for l in range(i - 1, i - 4, -1):
if l >= 0:
if obstacles[l][j] != None:
if (obstacles[l][j])[1] == (obstacles[i][j])[1]:
cpt += 1
if cpt >= 3:
square_type = randint(1, 7)
if square_type == 5:
obstacles[i][j] = [square, square_type, 0, 0]
elif square_type == 7:
obstacles[i][j] = [square, square_type, 0]
else:
obstacles[i][j] = [square, square_type]
x_cube += 50
x_cube = 0
hauteur += 50
hauteur += 400
for i in range(ligne-5 , ligne):
for j in range(cologne):
square = pygame.Rect(x_cube, hauteur, 50, 50)
obstacles[i][j] = [square , 9]
x_cube += 50
x_cube = 0
hauteur += 50
def draw():
global collision_vertical , x , y , obstacles
surface.fill(black)
pygame.draw.line(surface, white, (600, 0), (600, height))
pygame.draw.line(surface, rose, (600, 125), (width, 125))
pygame.draw.line(surface, rose, (600, 250), (width, 250))
pygame.draw.line(surface, rose, (600, 375), (width, 375))
pygame.draw.line(surface, rose, (600, 500), (width, 500))
for i in range(ligne):
for j in range(cologne):
if obstacles[i][j] != None:
if (obstacles[i][j])[1] == 1:
#pygame.draw.rect(surface, red, (obstacles[i][j])[0])
surface.blit( blocks[3] , (obstacles[i][j])[0])
elif (obstacles[i][j])[1] == 2:
#pygame.draw.rect(surface, blue, (obstacles[i][j])[0])
surface.blit( blocks[2] , (obstacles[i][j])[0])
elif (obstacles[i][j])[1] == 3:
#pygame.draw.rect(surface, yellow, (obstacles[i][j])[0])
surface.blit( blocks[0] , (obstacles[i][j])[0])
elif (obstacles[i][j])[1] == 4:
#pygame.draw.rect(surface, green, (obstacles[i][j])[0])
surface.blit( blocks[1] , (obstacles[i][j])[0])
elif (obstacles[i][j])[1] == 5:
surface.blit(blocks_fissure[ (obstacles[i][j])[2] ], (obstacles[i][j])[0])
elif (obstacles[i][j])[1] == 6:
#pygame.draw.rect(surface, white, (obstacles[i][j])[0])
surface.blit(blocks[4], (obstacles[i][j])[0])
elif (obstacles[i][j])[1] == 7:
#pygame.draw.rect(surface, crystal, (obstacles[i][j])[0])
surface.blit(blocks[5], (obstacles[i][j])[0])
elif (obstacles[i][j])[1] == 8:
surface.blit(capsule, (obstacles[i][j])[0])
else:
surface.blit(blocks[6], (obstacles[i][j])[0])
def move():
global walkCount , fallCount ,x, y , liste_blocks , compteur_drill , GameOver , Death , second_death , obstacles \
, death_depth
if walkCount + 1 >= 27:
walkCount = 0
if fallCount+1 == 6:
fallCount = 0
if Death == 2:
if second_death >= 100:
image_ange = ange
image_ange = pygame.transform.scale(image_ange, (55, 55))
surface.blit(image_ange, (x - 10, y - 10))
else:
image_death = dead_crash
image_death = pygame.transform.scale(image_death, (55, 55))
surface.blit(image_death, (x - 10, y - 10))
elif Death == 1:
if second_death >= 100:
image_ange = ange
image_ange = pygame.transform.scale(image_ange, (55, 55))
surface.blit(image_ange, (x - 10, y - 10))
else:
image_air = dead_air
image_air = pygame.transform.scale(image_air, (55, 55))
surface.blit(image_air, (x - 10, y - 10))
elif not collision_horizontal:
image_fall = pygame.transform.scale(fall[fallCount // 3], (55, 55))
surface.blit(image_fall, (x - 10, y - 10))
fallCount += 1
y += gravity
elif compteur_drill != 0:
if drill_right and not drill_left:
image_d_right = image_drill_right
image_d_right = pygame.transform.scale(image_d_right, (55, 55))
surface.blit(image_d_right, (x - 10, y - 10))
elif not drill_right and drill_left:
image_d_left = image_drill_left
image_d_left = pygame.transform.scale(image_d_left, (55, 55))
surface.blit(image_d_left, (x - 10, y - 10))
else:
image_d_down = image_drill_down
image_d_down = pygame.transform.scale(image_d_down, (55, 55))
surface.blit(image_d_down, (x - 10, y - 10))
compteur_drill -= 1
else:
if left == True:
image_left = walkLeft[walkCount//3]
image_left = pygame.transform.scale(image_left, (55, 55))
surface.blit(image_left , (x-10,y-10))
walkCount += 1
elif right == True:
image_right = walkRight[walkCount // 3]
image_right = pygame.transform.scale(image_right, (55, 55))
surface.blit(image_right , (x-10,y-10))
walkCount += 1
else:
image_centre = pygame.transform.scale(centre, (55, 55))
surface.blit(image_centre, (x - 10, y - 10))
for element in liste_blocks:
square = element[0]
compteur = element[1]
seconds_gravity = element[2]
if compteur == 50:
i,j = element[5] , element[3]
destruction_block(i,j)
liste_blocks.remove(element)
else:
if compteur == 0:
if seconds_gravity == 100:
square.x = element[3]*50
square.y += gravity
element[1] += gravity
else:
if seconds_gravity % 5 == 0:
if element[4] == -2:
element[4] = 2
else:
element[4] = -2
square.x += element[4]
else:
square.y += gravity
element[1] += gravity
i,j = element[5] , element[3]
if obstacles[i][j] != None:
if (obstacles[i][j])[1] != 8:
if (square.bottom-5 > driller.top and ( square.left-5 < driller.left < square.right-5 or
square.left+5 < driller.right < square.right+5) ):
GameOver = True
Death = 2
death_depth = [i,j]
def events():
global left , right , x , y , walkCount , collision_vertical_right , collision_vertical_left , drill_right , drill_left
keys = pygame.key.get_pressed()
if compteur_drill == 0:
if not GameOver:
if keys[pygame.K_LEFT] and x > 5:
if not collision_vertical_left:
x -= 5
left = True
right = False
drill_right = False
drill_left = False
elif keys[pygame.K_RIGHT] and x < 560:
if not collision_vertical_right:
x += 5
drill_right = False
drill_left = False
right = True
left = False
else:
right = False
left = False
drill_left = False
drill_right = False
walkCount = 0
if jump == True:
if not GameOver:
if keys[pygame.K_SPACE]:
y -= 55
def collisions_player():
global collision_vertical_right , collision_vertical_left , collision_horizontal , x,y , jump , obstacles ,\
drill_ticker , drill_right , drill_left , compteur_drill , pourcentage , points , profondeur , death_depth
keys = pygame.key.get_pressed()
liste = []
for i in range(ligne):
for j in range(cologne):
if obstacles[i][j] != None:
square = (obstacles[i][j])[0]
if driller.colliderect(square):
if y == square.y - 45:
collision_horizontal = True
liste.append((i,j))
profondeur = ( ligne*(level-1) ) + i
else:
jmp = False
if x == square.x + 45:
if (obstacles[i][j])[1] != 8:
collision_vertical_left = True
if (obstacles[i][j])[1] == 7:
if (obstacles[i][j])[2] == 0:
(obstacles[i][j])[2] += 1
if not GameOver:
if keys[pygame.K_a] and drill_ticker == 0:
collisions_blocks(i, j)
drill_left = True
drill_right = False
compteur_drill = 20
drill_ticker = 20
if i != 0:
if (obstacles[i - 1][j]) != None:
if (obstacles[i - 1][j ])[1] != 8:
if (obstacles[i - 1][j])[0].bottom + 5 == driller.top:
jmp = True
if (obstacles[i - 1][j+1]) != None:
if (obstacles[i - 1][j+1])[1] != 8:
if (obstacles[i - 1][j+1])[0].bottom + 5 == driller.top:
jmp = True
if jmp == False:
jump = True
else:
jump = True
else:
obstacles[i][j] = None
points += 1
pourcentage += 20
if x == square.x - 35:
if (obstacles[i][j])[1] != 8:
collision_vertical_right = True
if (obstacles[i][j])[1] == 7:
if (obstacles[i][j])[2] == 0:
(obstacles[i][j])[2] += 1
if not GameOver:
if keys[pygame.K_e] and drill_ticker == 0:
drill_ticker = 20
collisions_blocks(i, j)
drill_right = True
drill_left = False
compteur_drill = 20
if i != 0:
if (obstacles[i-1][j]) != None:
if (obstacles[i - 1][j])[1] != 8:
if (obstacles[i-1][j])[0].bottom+5 == driller.top:
jmp = True
if (obstacles[i-1][j-1]) != None:
if (obstacles[i - 1][j - 1])[1] != 8:
if (obstacles[i-1][j-1])[0].bottom+5 == driller.top:
jmp = True
if jmp == False:
jump = True
else:
jump = True
else:
obstacles[i][j] = None
points += 1
pourcentage += 20
for element in liste:
i = element[0]
j = element[1]
if obstacles[i][j] != None:
square = (obstacles[i][j])[0]
if len(liste) == 2:
if square.x+15 == x:
if Death == 1:
death_depth = [i,j]
x -= 5
if (obstacles[i][j])[1] != 8:
if (obstacles[i][j])[1] == 7:
if (obstacles[i][j])[2] == 0:
(obstacles[i][j])[2] += 1
if not GameOver:
if keys[pygame.K_z]: # Right
if obstacles[i][j] != None:
collisions_blocks(i, j)
x -= 5
drill_ticker = 20
drill_right = True
drill_left = False
compteur_drill = 20
else:
obstacles[i][j] = None
points += 1
pourcentage += 20
elif square.x-5 == x:
if Death == 1:
death_depth = [i, j]
x += 5
if (obstacles[i][j])[1] != 8:
if (obstacles[i][j])[1] == 7:
if (obstacles[i][j])[2] == 0:
(obstacles[i][j])[2] += 1
if not GameOver:
if keys[pygame.K_z]: # Left
if obstacles[i][j] != None:
collisions_blocks(i, j)
x += 5
drill_right = False
drill_right = True
drill_ticker = 20
compteur_drill = 20
else:
obstacles[i][j] = None
points += 1
pourcentage += 20
else:
if Death == 1:
death_depth = [i, j]
if (obstacles[i][j])[1] != 8:
if (obstacles[i][j])[1] == 7:
if (obstacles[i][j])[2] == 0:
(obstacles[i][j])[2] += 1
if not GameOver:
if keys[pygame.K_z]: # Down
if obstacles[i][j] != None and drill_ticker == 0:
drill_ticker = 20
collisions_blocks(i, j)
drill_right = False
drill_right = False
compteur_drill = 20
else:
obstacles[i][j] = None
points += 1
pourcentage += 20
def gravity_blocks():
global obstacles , gravity , liste_blocks
liste = []
for i in range(1 , ligne):
for j in range(0,cologne):
if obstacles[i][j] == None and obstacles[i-1][j] != None:
liste.append( (i-1 , i , j) )
for element in liste:
i = element[1]
i_1 = element[0]
j = element[2]
j_sup = j+1
if (obstacles[i_1][j])[1] != 6:
continue_sup = False
while j_sup < cologne and i_1+1 < ligne:
if obstacles[i_1][j_sup] != None:
if (obstacles[i_1][j])[1] == (obstacles[i_1][j_sup])[1]:
if obstacles[i_1+1][j_sup] != None:
continue_sup = True
break
else:
break
else:
break
j_sup += 1
if continue_sup:
continue
j_inf = j-1
continue_inf = False
while j_inf < cologne and i_1 + 1 < ligne:
if obstacles[i_1][j_inf] != None:
if (obstacles[i_1][j])[1] == (obstacles[i_1][j_inf])[1]:
if obstacles[i_1 + 1][j_inf] != None:
continue_inf = True
break
else:
break
else:
break
j_inf -= 1
if continue_inf:
continue
obstacles[i][j] = obstacles[i_1][j]
obstacles[i_1][j] = None
liste_blocks.append( [ (obstacles[i][j])[0] , 0 , 0 , j , 2, i ] )
def collisions_blocks(i,j):
global obstacles , points , NextLevel
if (obstacles[i][j])[1] == 9:
NextLevel = True
elif (obstacles[i][j])[1] != 8:
liste = [ (i,j) ]
compteur = 1
while compteur != 0:
compteur = 0
for element in liste:
position_i = element[0]
position_j = element[1]
i_sup = position_i + 1
i_inf = position_i - 1
j_sup = position_j + 1
j_inf = position_j - 1
if i_sup < ligne and obstacles[i_sup][position_j] != None:
if (i_sup , position_j) not in liste:
if (obstacles[position_i][position_j])[1] == (obstacles[i_sup][position_j])[1]:
liste.append((i_sup, position_j))
compteur += 1
if i_inf >= 0 and obstacles[i_inf][position_j] != None:
if (i_inf , position_j) not in liste:
if (obstacles[position_i][position_j])[1] == (obstacles[i_inf][position_j])[1]:
liste.append((i_inf, position_j))
compteur += 1
if j_sup < cologne and obstacles[position_i][j_sup] != None:
if (position_i,j_sup) not in liste:
if (obstacles[position_i][position_j])[1] == (obstacles[position_i][j_sup])[1]:
liste.append((position_i, j_sup))
compteur += 1
if j_inf >= 0 and obstacles[position_i][j_inf] != None:
if (position_i,j_inf) not in liste:
if (obstacles[position_i][position_j])[1] == (obstacles[position_i][j_inf])[1]:
liste.append((position_i, j_inf))
compteur += 1
pop_block.play()
for element in liste:
i = element[0]
j = element[1]
if len(obstacles[i][j]) == 4:
if (obstacles[i][j])[2] < 5:
(obstacles[i][j])[2] += 1
else:
obstacles[i][j] = None
points += 1
def destruction_block(i,j):
global obstacles , merge_blocks , pourcentage , points
liste = [(i, j)]
compteur = 1
cpt_global = 1
while compteur != 0:
compteur = 0
for element in liste:
position_i = element[0]
position_j = element[1]
i_sup = position_i + 1
i_inf = position_i - 1
j_sup = position_j + 1
j_inf = position_j - 1
if obstacles[position_i][position_j] != None:
if i_sup < ligne and obstacles[i_sup][position_j] != None:
if (i_sup, position_j) not in liste:
if (obstacles[position_i][position_j])[1] == (obstacles[i_sup][position_j])[1]:
liste.append((i_sup, position_j))
compteur += 1
cpt_global += 1
if i_inf >= 0 and obstacles[i_inf][position_j] != None:
if (i_inf, position_j) not in liste:
if (obstacles[position_i][position_j])[1] == (obstacles[i_inf][position_j])[1]:
liste.append((i_inf, position_j))
compteur += 1
cpt_global += 1
if j_sup < cologne and obstacles[position_i][j_sup] != None:
if (position_i, j_sup) not in liste:
if (obstacles[position_i][position_j])[1] == (obstacles[position_i][j_sup])[1]:
liste.append((position_i, j_sup))
compteur += 1
cpt_global += 1
if j_inf >= 0 and obstacles[position_i][j_inf] != None:
if (position_i, j_inf) not in liste:
if (obstacles[position_i][position_j])[1] == (obstacles[position_i][j_inf])[1]:
liste.append((position_i, j_inf))
compteur += 1
cpt_global += 1
if cpt_global >= 4:
pop_block.play()
for element in liste:
i1 = element[0]
j1 = element[1]
points += 1
if len(obstacles[i1][j1]) == 4:
surface.blit(blocks_fissure[5], (obstacles[i1][j1])[0])
obstacles[i1][j1] = None
else:
obstacles[i1][j1] = None
def save():
fopen = open("Save/sauvegarde.txt","a")
fopen.close()
fichier = open("Save/sauvegarde.txt","r")
ecraser = False
list_name = []
lines = fichier.readlines()
if len(lines) != 0:
for user in lines:
for i in range(len(user)):
if user[i] == ':':
list_name.append([user[0:i-1] , int(user[i+1:])])
for element in list_name:
if username == element[0]:
ecraser = True
if points > element[1]:
list_name.remove(element)
list_name.append([username,points])
fic = open("Save/sauvegarde.txt","w")
for element in list_name:
fic.write(element[0]+' : '+str(element[1])+'\n')
fic.close()
fichier.close()
if not ecraser:
fichier = open('Save/sauvegarde.txt' , 'a')
fic = open('Save/sauvegarde.txt' , 'r')
ligne = fic.readline()
if len(ligne) != 0:
fichier.write("\n"+username+" : "+str(points))
else:
fichier.write(username+" : "+str(points))
fic.close()
fichier.close()
def load():
global game_over
fichier = open("Save/sauvegarde.txt" , "r")
liste_trie = []
lignes = fichier.readlines()
for ligne in lignes:
for i in range(len(ligne)):
if ligne[i] == ':':
liste_trie.append( [ligne[0:i],int(ligne[i+1:])] )
cpt = 1
while cpt != 0:
cpt = 0
for i in range(len(liste_trie)-1):
element_1 = liste_trie[i]
element_2 = liste_trie[i+1]
if element_1[1] < element_2[1]:
temp = element_1
liste_trie[i] = element_2
liste_trie[i+1] = temp
cpt += 1
fichier.close()
for event in pygame.event.get():
if event.type == QUIT:
game_over = True
pos_x , pos_y = 250,150
surface.fill(black)
img = pygame.image.load("Screens/HighScore.png")
surface.blit(img , (250,-50))
font = pygame.font.Font("Screens/monospace.ttf",30)
texte , texte1 = font.render("Name" , True , green) , font.render("Score" , True , green)
rectangle , rectangle1 = texte.get_rect() , texte1.get_rect()
rectangle.topleft = (pos_x , pos_y)
rectangle1.topleft = (pos_x+250,pos_y)
surface.blit(texte , rectangle)
surface.blit(texte1,rectangle1)
pos_y +=50
for element in liste_trie:
texte = font.render(element[0] ,True,white)
texte1 = font.render(str(element[1]),True,white)
rectangle = texte.get_rect()
rectangle1 = texte1.get_rect()
rectangle.topleft = (pos_x,pos_y)
rectangle1.topleft = (pos_x+250,pos_y)
surface.blit(texte , rectangle)
surface.blit(texte1,rectangle1)
pos_y += 50
def pause():
global Paused , game_over
if Paused == 1:
paused = True
font = pygame.font.Font("freesansbold.ttf",40)
texte = font.render("Paused",True,white)
rectangle = texte.get_rect()
rectangle.topleft = (200,50)
while paused:
pygame.draw.rect(surface,black,rectangle,0)
surface.blit(texte,rectangle)
for event in pygame.event.get():
if event.type == QUIT:
paused = False
game_over = True
if event.type == KEYDOWN:
if event.key == K_p:
paused = False
Paused += 1
if Paused == 2:
Paused = 0
pygame.display.update()
def lose(second):
global y , GameOver , pourcentage , Death , CountDeath
if second == 80:
if len(death_depth) != 0:
i,j = death_depth[0],death_depth[1]
for k in range(i, -1, -1):
obstacles[k][j] = None
if second >= 100:
y -= gravity
if second == 200:
GameOver = False
pourcentage = 100
Death = 0
CountDeath -= 1
def gameover(death_screen):
global game_over
for event in pygame.event.get():
if event.type == QUIT:
game_over = True
if death_screen == 0:
pygame.mixer.music.stop()
pygame.mixer.music.load("Music/GameOver.wav")
pygame.mixer.music.play(0)
img = pygame.image.load("Screens/GameOver.jpg")
img = pygame.transform.scale(img, (width, height))
surface.blit(img , (0,0))
def win(win_screen):
global game_over
for event in pygame.event.get():
if event.type == QUIT:
game_over = True
if win_screen == 0:
pygame.mixer.music.stop()
pygame.mixer.music.load("Music/WinTheme.wav")
pygame.mixer.music.play(0)
surface.fill(black)
img = pygame.image.load("Screens/Win.jpg")
img = pygame.transform.scale(img, (width, height-200))
surface.blit(img , (0,200))
message = "Congratulations !!"
message1 = "Game Completed"
font = pygame.font.Font("freesansbold.ttf",50)
texte = font.render(message,True,white)
texte1 = font.render(message1,True,white)
rectangle = texte.get_rect()
rectangle.topleft = (200,50)
rectangle1 = texte1.get_rect()
rectangle1.topleft = (200,120)
surface.blit(texte , rectangle)
surface.blit(texte1 , rectangle1)
def main():
global game_over , collision_horizontal , collision_vertical_left , collision_vertical_right , jump , driller,\
x, y , obstacles , drill_ticker , liste_blocks , merge_blocks , pourcentage , points , second_death , CountDeath , Capsule_Air , NextLevel , level , Paused
drill_ticker , second = 0 , 0
liste_blocks = []
merge_blocks = []
second_death = 0
driller , level = 1 , 1
pygame.mixer.music.load("Music/main.mp3")
pygame.mixer.music.play(-1)
NextLevel = False
cpt_save , Paused= 0 , 0
death_screen , win_screen = 0,0
while not game_over:
clock.tick(60)
if CountDeath != 0 and level != 11:
collision_horizontal = False
collision_vertical_left = False
collision_vertical_right = False
jump = False
if NextLevel:
initialise()
level += 1
pourcentage = 100
points += 10
Capsule_Air -= 1
NextLevel = False
driller = pygame.Rect(x, y, 40, 50)
for event in pygame.event.get():
if event.type == QUIT:
game_over = True
if event.type == KEYDOWN:
if event.key == K_p:
Paused += 1
if drill_ticker > 0:
drill_ticker -= 1
if not GameOver:
second_death = 0
second += 1
if second == 60:
second = 0
pourcentage -= 1
if y >= 300:
y -= 50
for i in range(ligne):
for j in range(cologne):
if obstacles[i][j] != None:
(obstacles[i][j])[0].y -= 50
if y <= 100:
y += 50
for i in range(ligne):
for j in range(cologne):
if obstacles[i][j] != None:
(obstacles[i][j])[0].y += 50
else:
second_death += 1
lose(second_death)
for element in liste_blocks:
element[2] += 1
for i in range(ligne):
for j in range(cologne):
if obstacles[i][j] != None:
if len(obstacles[i][j]) == 4:
if (obstacles[i][j])[2] == 5:
(obstacles[i][j])[3] += 1
if (obstacles[i][j])[3] == 20:
obstacles[i][j] = None
pourcentage -= 20
points += 1
if obstacles[i][j] != None:
if (obstacles[i][j])[1] == 7:
if (obstacles[i][j])[2] >= 1:
(obstacles[i][j])[2] += 1
if (obstacles[i][j])[2] == 500:
obstacles[i][j] = None
pop_block.play()
draw()
air()
collisions_player()
gravity_blocks()
move()
events()
score(points)
depth(profondeur)
lives(CountDeath)
levels()
pause()
else:
if cpt_save == 0:
save()
cpt_save += 1
if CountDeath == 0 and death_screen < 100:
gameover(death_screen)
death_screen += 1
elif level == 11 and win_screen < 100:
win(win_screen)
win_screen += 1
else:
load()
pygame.display.update()
pygame.quit()
# Lancemant :
launch , username = saisie()
if launch == True:
play = intro()
if play == True:
pygame.mixer.music.stop()
initialise()
main()
else:
pygame.quit()
else:
pygame.quit()
| 28.186104 | 158 | 0.601726 |
f703edfd294b009350efb017aa9f635fff7cb725 | 30,396 | py | Python | treetopper/stand.py | zacharybeebe/treetopper | 9302d9c482eb2209c516c79100be98614666f8c1 | [
"MIT"
] | null | null | null | treetopper/stand.py | zacharybeebe/treetopper | 9302d9c482eb2209c516c79100be98614666f8c1 | [
"MIT"
] | null | null | null | treetopper/stand.py | zacharybeebe/treetopper | 9302d9c482eb2209c516c79100be98614666f8c1 | [
"MIT"
] | null | null | null | from os import (
startfile,
getcwd
)
from os.path import join
from io import BytesIO
from csv import (
writer,
excel
)
from openpyxl import (
Workbook,
load_workbook
)
from statistics import (
mean,
variance,
stdev
)
from treetopper.plot import Plot
from treetopper.timber import (
TimberQuick,
TimberFull
)
from treetopper.log import Log
from treetopper.thin import (
ThinTPA,
ThinBA,
ThinRD
)
from treetopper._exceptions import TargetDensityError
from treetopper.fvs import FVS
from treetopper._constants import (
math,
ALL_SPECIES_NAMES,
GRADE_SORT,
LOG_LENGTHS,
SORTED_HEADS
)
from treetopper._utils import (
format_comma,
format_pct,
extension_check,
reorder_dict,
check_date,
add_logs_to_table_heads
)
from treetopper._import_from_sheets import import_from_sheet
from treetopper._print_console import (
print_stand_species,
print_stand_logs,
print_stand_stats
)
from treetopper._print_pdf import PDF
class Stand(object):
"""The Stand Class represents a stand of timber that has had an inventory conducted on it. It should made up of plots (Plot Class)
which contain trees (Timber Classes).
The Stand class will run calculations and statistics of the current stand conditions and it will run calculations of the log
merchantabilty for three metrics: logs per acre, log board feet per acre, and log cubic feet per acre, based on log grades,
log length ranges and species.
"""
def __init__(self, name: str, plot_factor: float, acres: float = None, inventory_date: str = None):
self.name = name.upper()
self.plot_factor = plot_factor
self.plots = []
self.plot_count = 0
self.tpa = 0
self.ba_ac = 0
self.qmd = 0
self.rd_ac = 0
self.bf_ac = 0
self.cf_ac = 0
self.avg_hgt = 0
self.hdr = 0
self.vbar = 0
self.tpa_stats = {}
self.ba_ac_stats = {}
self.rd_ac_stats = {}
self.bf_ac_stats = {}
self.cf_ac_stats = {}
self.species = {}
self.species_gross = {}
self.species_stats = {}
self.logs = {}
self.table_data = []
self.summary_stand = []
self.summary_logs = {}
self.summary_stats = []
self.metrics = ['tpa', 'ba_ac', 'rd_ac', 'bf_ac', 'cf_ac']
self.attrs = ['_gross', '_stats', '']
self.acres = acres
if inventory_date:
self.inv_date = check_date(inventory_date)
else:
self.inv_date = inventory_date
def __getitem__(self, attribute: str):
return self.__dict__[attribute]
def get_stand_table_text(self):
"""Returns a console-formatted string of current stand conditions"""
return print_stand_species(self.summary_stand)
def get_logs_table_text(self):
"""Returns a console-formatted string of stand logs data"""
return print_stand_logs(self.summary_logs)
def get_stats_table_text(self):
"""Returns and console-formatted string of stand stand statistics"""
return print_stand_stats(self.summary_stats)
def get_console_report_text(self):
"""Returns a console-formatted string of the complete stand report"""
return self._compile_report_text()
def console_report(self):
"""Prints a console-formatted string of the complete stand report"""
print(self._compile_report_text())
def get_pdf_report_bytes_io(self):
pdf = self._compile_pdf_report()
return BytesIO(pdf.output(dest='S').encode('latin-1'))
def pdf_report(self, filename: str, directory: str = None, start_file_upon_creation: bool = False):
"""Exports a pdf of the complete stand report to a user specified directory or if directory is None,
to the current working directory. Will open the created pdf report if start_file_upon_creation is True"""
check = extension_check(filename, '.pdf')
if directory:
file = join(directory, check)
else:
file = join(getcwd(), check)
pdf = self._compile_pdf_report()
pdf.output(file, 'F')
if start_file_upon_creation:
startfile(file)
def add_plot(self, plot: Plot):
"""Adds a plot to the stand's plots list and re-runs the calculations and statistics of the stand.
plot argument needs to be the a Plot Class"""
self.plots.append(plot)
self.plot_count += 1
for met in self.metrics:
self._update_metrics(met)
self.qmd = math.sqrt((self.ba_ac / self.tpa) / .005454)
self.vbar = self.bf_ac / self.ba_ac
self._update_species(plot)
self._update_logs(plot)
self.table_data = self._update_table_data()
self.summary_stand = self._update_summary_stand()
self.summary_logs = self._update_summary_logs()
self.summary_stats = self._update_summary_stats()
def import_sheet_quick(self, file_path: str):
"""Imports tree and plot data from a CSV or XLSX file for a quick cruise and adds that data to the stand"""
plots = import_from_sheet(file_path, self.name, 'q')
for plot_num in plots:
plot = Plot()
for tree in plots[plot_num]:
plot.add_tree(TimberQuick(self.plot_factor, *tree))
self.add_plot(plot)
def import_sheet_full(self, file_path: str):
"""Imports tree and plot data from a CSV or XLSX file for a full cruise and adds that data to the stand"""
plots = import_from_sheet(file_path, self.name, 'f')
for plot_num in plots:
plot = Plot()
for tree_data in plots[plot_num]:
args = tree_data[: -1]
logs = tree_data[-1]
tree = TimberFull(self.plot_factor, *args)
for log in logs:
tree.add_log(*log)
plot.add_tree(tree)
self.add_plot(plot)
def table_to_csv(self, filename: str, directory: str = None):
"""Creates or appends a CSV file with tree data from self.table_data"""
check = extension_check(filename, '.csv')
if directory:
file = join(directory, check)
else:
file = join(getcwd(), check)
if isfile(file):
allow = 'a'
start = 1
else:
allow = 'w'
start = 0
with open(file, allow, newline='') as csv_file:
csv_write = writer(csv_file, dialect=excel)
for i in self.table_data[start:]:
csv_write.writerow(i)
def table_to_excel(self, filename: str, directory: str = None):
"""Creates or appends an Excel file with tree data from self.table_data"""
check = extension_check(filename, '.xlsx')
if directory:
file = join(directory, check)
else:
file = join(getcwd(), check)
if isfile(file):
wb = load_workbook(file)
ws = wb.active
for i in self.table_data[1:]:
ws.append(i)
wb.save(file)
else:
wb = Workbook()
ws = wb.active
for i in self.table_data:
ws.append(i)
wb.save(file)
def _update_metrics(self, metric: str):
"""Updates stand metrics based on the metric entered in the argument, used internally"""
metric_list = [plot[metric] for plot in self.plots]
stats = self._get_stats(metric_list)
setattr(self, metric, stats['mean'])
setattr(self, f'{metric}_stats', stats)
def _update_species(self, plot):
"""Re-runs stand conditions calculations and statistics, used internally"""
update_after = ['qmd', 'vbar', 'avg_hgt', 'hdr']
if self.plot_count == 0:
return
else:
for species in plot.species:
if species not in self.species_gross:
for attr in self.attrs:
if attr == '_gross':
getattr(self, f'species{attr}')[species] = {met: [] for met in self.metrics}
else:
getattr(self, f'species{attr}')[species] = {met: 0 for met in self.metrics}
for key in plot.species[species]:
if key not in update_after:
self.species_gross[species][key].append(plot.species[species][key])
for species in self.species_gross:
for key in self.species_gross[species]:
if key not in update_after:
data = self.species_gross[species][key]
if len(data) < self.plot_count:
data += ([0] * (self.plot_count - len(data)))
stats = self._get_stats(data)
self.species[species][key] = stats['mean']
self.species_stats[species][key] = stats
self.species[species]['qmd'] = math.sqrt((self.species[species]['ba_ac'] / self.species[species]['tpa']) / 0.005454)
self.species[species]['vbar'] = self.species[species]['bf_ac'] / self.species[species]['ba_ac']
if species == 'totals_all':
self.species[species]['avg_hgt'] = mean([p.avg_hgt for p in self.plots])
self.species[species]['hdr'] = mean([p.hdr for p in self.plots])
else:
trees = []
for p in self.plots:
for t in p.trees:
trees.append(t)
self.species[species]['avg_hgt'] = mean([t.height for t in trees if t.species == species])
self.species[species]['hdr'] = mean([t.hdr for t in trees if t.species == species])
def _update_logs(self, plot):
"""Re-runs stand logs calculations, used internally"""
if self.plot_count == 0:
return
else:
subs = ['lpa', 'bf_ac', 'cf_ac']
for species in plot.logs:
if species not in self.logs:
self.logs[species] = {}
for grade in plot.logs[species]:
if grade not in self.logs[species]:
self.logs[species][grade] = {rng: {sub: {'gross': [], 'mean': 0} for sub in subs} for rng in LOG_LENGTHS}
self.logs[species][grade]['totals_by_grade'] = {sub: {'gross': [], 'mean': 0} for sub in subs}
for rng in plot.logs[species][grade]:
if rng != 'display':
for sub in subs:
self.logs[species][grade][rng][sub]['gross'].append(plot.logs[species][grade][rng][sub])
for species in self.logs:
for grade in self.logs[species]:
for rng in self.logs[species][grade]:
for sub in subs:
gross = self.logs[species][grade][rng][sub]['gross']
if len(gross) < self.plot_count:
gross += ([0] * (self.plot_count - len(gross)))
self.logs[species][grade][rng][sub]['mean'] = mean(gross)
def _update_table_data(self):
"""Converts stand data to plot/tree inventory data table layout, used internally"""
heads = ['Stand', 'Plot Number', 'Tree Number', 'Species', 'DBH', 'Height',
'Stump Height', 'Log 1 Length', 'Log 1 Grade', 'Log 1 Defect', 'Between Logs Feet']
master = []
max_logs = []
for i, plot in enumerate(self.plots):
for j, tree in enumerate(plot.trees):
temp = [self.name, i + 1, j + 1]
for key in ['species', 'dbh', 'height']:
temp.append(tree[key])
len_logs = len(tree.logs)
max_logs.append(len_logs)
for k, lnum in enumerate(tree.logs):
log = tree.logs[lnum]
if lnum == 1:
temp.append(log.stem_height - log.length - 1)
for lkey in ['length', 'grade', 'defect']:
temp.append(log[lkey])
if k < len(tree.logs) - 1:
between = tree.logs[lnum+1].stem_height - log.stem_height - tree.logs[lnum+1].length - 1
if between < 0:
temp.append(0)
else:
temp.append(between)
master.append(temp)
heads += add_logs_to_table_heads(max(max_logs))
len_heads = len(heads)
for i in master:
len_i = len(i)
if len_i < len_heads:
i += ['' for j in range(len_heads - len_i)]
master.insert(0, heads)
return master
def _update_summary_stand(self):
"""Updates the current stand conditions list of stand.summary_stand, used internally"""
heads = ['SPECIES'] + [head[1] for head in SORTED_HEADS]
body_data = []
for key in self.species:
if key == 'totals_all':
show = 'TOTALS'
else:
show = key
temp = [str(show)] + [format_comma(self.species[key][i[0]]) for i in SORTED_HEADS]
body_data.append(temp)
body_data.append(body_data.pop(0))
body_data.insert(0, heads)
return body_data
def _update_summary_logs(self):
"""Updates the stand logs summary dict, data-tables are broken down by metric type --> species, used internally.
Example: self.summary_logs['BOARD FEET PER ACRE']['DF'] --> data table"""
table_data = {}
tables = [['bf_ac', 'BOARD FEET PER ACRE'], ['cf_ac', 'CUBIC FEET PER ACRE'], ['lpa', 'LOGS PER ACRE']]
for table in tables:
metric_key = table[0]
key = table[1]
table_data[key] = {}
for species in self.logs:
if species == 'totals_all':
show = 'TOTALS'
else:
show = ALL_SPECIES_NAMES[species]
table_data[key][show] = [['LOG GRADES'] + [rng.upper() for rng in LOG_LENGTHS] + ['TOTALS']]
grade_sort = []
for grade in self.logs[species]:
values = [self.logs[species][grade][rng][metric_key]['mean'] for rng in self.logs[species][grade]]
if sum(values) > 0:
if grade == 'totals_by_length':
col_text = 'TOTALS'
else:
col_text = grade
grade_sort.append([col_text] + [format_comma(z) for z in values])
grade_sort = sorted(grade_sort, key=lambda x: GRADE_SORT[x[0]])
for g in grade_sort:
table_data[key][show].append(g)
table_data[key] = reorder_dict(table_data[key])
return table_data
def _update_summary_stats(self):
"""Updates the stand statistics dict, stats-tables are broken down by species, used internally.
Example: self.summary_stats['DF'] --> stats-table"""
tables = {}
for spp in self.species_stats:
if spp == 'totals_all':
show = 'TOTALS'
else:
show = ALL_SPECIES_NAMES[spp]
tables[show] = [['METRIC'] + [head.upper() for head in self.species_stats[spp]['tpa'] if head != 'low_avg_high'] + ['LOW',
'AVERAGE',
'HIGH']]
for key in self.species_stats[spp]:
temp = [key.upper()]
not_enough_data = False
for sub in self.species_stats[spp][key]:
x = self.species_stats[spp][key][sub]
if not_enough_data:
if x == 'Not enough data':
if sub == 'low_avg_high':
for i in range(3):
temp.append('-')
else:
temp.append('-')
else:
if x == 'Not enough data':
temp.append(x)
not_enough_data = True
else:
if sub == 'low_avg_high':
for i in x:
temp.append(format_comma(i))
elif sub == 'stderr_pct':
temp.append(format_pct(x))
else:
temp.append(format_comma(x))
tables[show].append(temp)
return reorder_dict(tables)
def _get_stats(self, data):
"""Runs the statistical calculations on a set of the stand conditions data, returns an updated sub dict, used internally"""
m = mean(data)
if len(data) >= 2:
std = stdev(data)
ste = std / math.sqrt(self.plot_count)
low_avg_high = [max(round(m - ste, 1), 0), m, m + ste]
d = {'mean': m,
'variance': variance(data),
'stdev': std,
'stderr': ste,
'stderr_pct': (ste / m) * 100,
'low_avg_high': low_avg_high}
else:
d = {'mean': m,
'variance': 'Not enough data',
'stdev': 'Not enough data',
'stderr': 'Not enough data',
'stderr_pct': 'Not enough data',
'low_avg_high': 'Not enough data'}
return d
def _compile_report_text(self):
"""Compiles the console-formatted report of all stand data and stats, used internally"""
n = '\n' * 4
console_text = f'{print_stand_species(self.summary_stand)}{n}'
console_text += f'{print_stand_logs(self.summary_logs)}{n}'
console_text += f'{print_stand_stats(self.summary_stats)}'
return console_text
def _compile_pdf_report(self):
pdf = PDF()
pdf.alias_nb_pages()
pdf.add_page()
pdf.compile_stand_report(self)
return pdf
if __name__ == '__main__':
import argparse
import traceback
import sys
from os import mkdir, getcwd
from os.path import join, isfile, isdir, expanduser
from treetopper._utils import get_desktop_path
def make_dir_and_subdir(workflow_num):
desktop = get_desktop_path()
tt_dir = join(desktop, 'treetopper_outputs')
if not isdir(tt_dir):
mkdir(tt_dir)
wf_dir = join(tt_dir, f'workflow_{workflow_num}')
if not isdir(wf_dir):
mkdir(wf_dir)
return wf_dir
def get_package_path(filename):
path = None
for i in sys.path:
if 'AppData' in i and i[-13:] == 'site-packages':
path = i
break
tt_path = join(path, 'treetopper')
sheet_path = join(tt_path, 'example_csv_and_xlsx')
final = join(sheet_path, filename)
return final
parser = argparse.ArgumentParser(description='treetopper Example Workflows')
parser.add_argument('workflow_number', help='Enter the number of the workflow to run.\n Valid workflow numbers: 1, 2, 3, 4, 5, 6)')
args = parser.parse_args()
wf = args.workflow_number
while True:
if wf not in ['1', '2', '3', '4', '5', '6']:
print('Please enter a workflow number 1, 2, 3, 4, 5, or 6')
wf = input('Workflow #: ')
else:
break
wf = int(wf)
def workflow_1(workflow_number):
stand = Stand('WF1', -20)
plot_factor = stand.plot_factor
tree_data = [
# Plot 1
[TimberQuick(plot_factor, 'DF', 29.5, 119), TimberQuick(plot_factor, 'WH', 18.9, 102),
TimberQuick(plot_factor, 'WH', 20.2, 101), TimberQuick(plot_factor, 'WH', 19.9, 100),
TimberQuick(plot_factor, 'DF', 20.6, 112)],
# Plot 2
[TimberQuick(plot_factor, 'DF', 25.0, 117), TimberQuick(plot_factor, 'DF', 14.3, 105),
TimberQuick(plot_factor, 'DF', 20.4, 119), TimberQuick(plot_factor, 'DF', 16.0, 108),
TimberQuick(plot_factor, 'RC', 20.2, 124), TimberQuick(plot_factor, 'RC', 19.5, 116),
TimberQuick(plot_factor, 'RC', 23.4, 121), TimberQuick(plot_factor, 'DF', 17.8, 116),
TimberQuick(plot_factor, 'DF', 22.3, 125)]
]
for trees in tree_data:
plot = Plot()
for tree in trees:
plot.add_tree(tree)
stand.add_plot(plot)
path = make_dir_and_subdir(workflow_number)
stand.console_report()
stand.table_to_csv(join(path, 'example_csv_export.csv'))
thin80tpa = ThinTPA(stand, 80)
thin80tpa.console_report()
end_message = """**WORKFLOW 1 created a QUICK CRUISE stand from manually entered tree data.
It then ran a thinning scenario with a target density of 80 Trees per Acre considering all species and diameter ranges.
Outputs:
Stand console report in terminal [print(stand_class.console_report)] ^above^
Thinning console report in terminal [print(thin_class.console_report))] ^above^
Plot data .csv "example_csv_export.csv" in desktop/treetopper_outputs/workflow_1/
"""
print(f'\n\n{end_message}')
def workflow_2(workflow_number):
stand = Stand('WF2', 33.3)
plot_factor = stand.plot_factor
tree_data = [
# Plot 1
[[TimberFull(plot_factor, 'DF', 29.5, 119), [[42, 40, 'S2', 5], [83, 40, 'S3', 0], [102, 18, 'S4', 10]]],
[TimberFull(plot_factor, 'WH', 18.9, 102), [[42, 40, 'S2', 0], [79, 36, 'S4', 5]]],
[TimberFull(plot_factor, 'WH', 20.2, 101), [[42, 40, 'S2', 5], [83, 40, 'S4', 0]]],
[TimberFull(plot_factor, 'WH', 19.9, 100), [[42, 40, 'S2', 0], [83, 40, 'S4', 15]]],
[TimberFull(plot_factor, 'DF', 20.6, 112), [[42, 40, 'S2', 0], [83, 40, 'S3', 5], [100, 16, 'UT', 10]]]],
# Plot 2
[[TimberFull(plot_factor, 'DF', 25.0, 117), [[42, 40, 'SM', 0], [83, 40, 'S3', 5], [100, 16, 'S4', 0]]],
[TimberFull(plot_factor, 'DF', 14.3, 105), [[42, 40, 'S3', 0], [79, 36, 'S4', 0]]],
[TimberFull(plot_factor, 'DF', 20.4, 119), [[42, 40, 'S2', 5], [83, 40, 'S3', 5], [100, 16, 'S4', 5]]],
[TimberFull(plot_factor, 'DF', 16.0, 108), [[42, 40, 'S3', 5], [83, 40, 'S3', 10]]],
[TimberFull(plot_factor, 'RC', 20.2, 124), [[42, 40, 'CR', 5], [83, 40, 'CR', 5], [104, 20, 'CR', 5]]],
[TimberFull(plot_factor, 'RC', 19.5, 116), [[42, 40, 'CR', 10], [83, 40, 'CR', 5], [100, 16, 'CR', 0]]],
[TimberFull(plot_factor, 'RC', 23.4, 121), [[42, 40, 'CR', 0], [83, 40, 'CR', 0], [106, 22, 'CR', 5]]],
[TimberFull(plot_factor, 'DF', 17.8, 116), [[42, 40, 'S2', 0], [83, 40, 'S3', 0], [100, 16, 'S4', 10]]],
[TimberFull(plot_factor, 'DF', 22.3, 125), [[42, 40, 'SM', 0], [83, 40, 'S3', 5], [108, 24, 'S4', 0]]]]
]
for trees in tree_data:
plot = Plot()
for tree, logs in trees:
for log in logs:
tree.add_log(*log)
plot.add_tree(tree)
stand.add_plot(plot)
path = make_dir_and_subdir(workflow_number)
stand.console_report()
stand.table_to_excel(join(path, 'example_xlsx_export.xlsx'))
thin120ba = ThinBA(stand, 120, species_to_cut=['DF', 'WH'])
thin120ba.console_report()
end_message = """**WORKFLOW 2 created a FULL CRUISE stand from manually entered tree data.
It then ran a thinning scenario with a target density of 120 Basal Area per Acre harvesting only DF and WH considering all diameter ranges.
Outputs:
Stand console report in terminal [print(stand_class.console_report)] ^above^
Thinning console report in terminal [print(thin_class.console_report))] ^above^
Plot data .xlsx "example_xlsx_export.xlsx" in desktop/treetopper_outputs/workflow_2/
"""
print(f'\n\n{end_message}')
def workflow_3(workflow_number):
path = make_dir_and_subdir(workflow_number)
stand = Stand('EX4', -30)
stand.import_sheet_quick(get_package_path('Example_Excel_quick.xlsx'))
stand.console_report()
stand.table_to_excel(join(path, 'example_xlsx_export.xlsx'))
thin25rd = ThinRD(stand, 25, species_to_cut=['DF', 'WH'], min_dbh_to_cut=10, max_dbh_to_cut=18)
thin25rd.console_report()
end_message = """**WORKFLOW 3 created a QUICK CRUISE stand from importing plot data from an excel sheet.
It then ran a thinning scenario with a target density of 25 Relative Density per Acre harvesting only DF and WH, with a
minimum dbh of 10 inches and a maximum dbh of 18 inches. ** Note this thinning density won't be able to be achieved
fully because our parameters don't allow for the needed harvest density, but this is to illustrate that the thinning
will let the user know how much density was taken and how much more is needed to achieve the desired density target
Outputs:
Stand console report in terminal [print(stand_class.console_report)] ^above^
Thinning console report in terminal [print(thin_class.console_report))] ^above^
Plot data .xlsx "example_xlsx_export.xlsx" in desktop/treetopper_outputs/workflow_3/
"""
print(f'\n\n{end_message}')
def workflow_4(workflow_number):
path = make_dir_and_subdir(workflow_number)
stand = Stand('OK2', 46.94)
stand.import_sheet_full(get_package_path('Example_CSV_full.csv'))
stand.console_report()
stand.table_to_excel(join(path, 'example_xlsx_export.xlsx'))
try:
thin100tpa = ThinTPA(stand, 100)
thin100tpa.console_report()
except TargetDensityError as e:
print(traceback.format_exc())
end_message = """**WORKFLOW 4 created a FULL CRUISE stand from importing plot data from an csv sheet.
It then ran a thinning scenario with a target density of 100 Trees per Acre considering all species and diameter ranges.
** Note this thinning density is greater than the current stand density and the Thin Class will throw a TargetDensityError exception
which will explain what went wrong.
Outputs:
Stand console report in terminal [print(stand_class.console_report)] ^above^
Thinning console report in terminal [print(thin_class.console_report))] ^above^
Plot data .xlsx "example_xlsx_export.xlsx" in desktop/treetopper_outputs/workflow_4/
"""
print(f'\n\n{end_message}')
def workflow_5(workflow_number):
path = make_dir_and_subdir(workflow_number)
stand = Stand('EX3', 33.3)
stand.import_sheet_quick(get_package_path('Example_CSV_quick.csv'))
stand.pdf_report(join(path, 'stand_report.pdf'))
stand.table_to_excel(join(path, 'example_xlsx_export.xlsx'))
thin140ba = ThinBA(stand, 140, species_to_cut=['DF', 'WH', 'RA'], max_dbh_to_cut=24)
thin140ba.pdf_report(join(path, 'thin_report.pdf'))
end_message = """**WORKFLOW 5 created a QUICK CRUISE stand from importing plot data from an csv sheet.
It then ran a thinning scenario with a target density of 140 Basal Area per Acre harvesting only DF, WH and RA with a maximum diameter of 24 inches.
Outputs:
Stand PDF report "stand_report.pdf" from [stand_class.pdf_report()] in desktop/treetopper_outputs/workflow_5/
Thinning PDF report "thin_report.pdf" from [thin_class.pdf_report()] in desktop/treetopper_outputs/workflow_5/
Plot data .xlsx "example_xlsx_export.xlsx" in desktop/treetopper_outputs/workflow_5/
"""
print(f'\n\n{end_message}')
def workflow_6(workflow_number):
path = make_dir_and_subdir(workflow_number)
stand = Stand('OK1', -30)
stand.import_sheet_full(get_package_path('Example_Excel_full.xlsx'))
stand.table_to_excel(join(path, 'example_xlsx_export.xlsx'))
fvs = FVS()
fvs.set_stand(stand, 'PN', 612, 6, 45, 'DF', 110)
fvs.access_db('access_db', directory=path)
fvs.sqlite_db('sqlite_db', directory=path)
fvs.excel_db('excel_db', directory=path)
end_message = """**WORKFLOW 6 created a FULL CRUISE stand from importing plot data from an excel sheet.
It then ran the FVS module to create FVS formatted databases from the stand data. FVS is the US Forest Service's Forest Vegetation Simulator.
Outputs:
FVS Access database "access_db.db" from [fvs_class.access_db()] in desktop/treetopper_outputs/workflow_6/
FVS Suppose file "Suppose.loc" in desktop/treetopper_outputs/workflow_6/. ** FVS Legacy needs a .loc file along with the database.
FVS SQLite database "sqlite_db.db" from [fvs_class.sqlite_db()] in desktop/treetopper_outputs/workflow_6/
FVS Excel database "excel_db.db" from [fvs_class.excel_db()] in desktop/treetopper_outputs/workflow_6/
Plot data .xlsx "example_xlsx_export.xlsx" in desktop/treetopper_outputs/workflow_6/
"""
print(f'\n\n{end_message}')
def main(workflow_number):
opts = {
1: workflow_1,
2: workflow_2,
3: workflow_3,
4: workflow_4,
5: workflow_5,
6: workflow_6
}
opts[workflow_number](workflow_number)
print(f"\n\n{'-' * 200}\n\n")
main(wf)
print(f"\n\n{'-' * 200}\n\n")
| 41.46794 | 150 | 0.565469 |
f703ee65ebc49d049639276ee2bcc8f8f67095eb | 992 | py | Python | pyclopedia/p01_beginner/p03_data_structure/p02_list/p02_slice_operator.py | MacHu-GWU/pyclopedia-project | c6ee156eb40bc5a4ac5f51aa735b6fd004cb68ee | [
"MIT"
] | null | null | null | pyclopedia/p01_beginner/p03_data_structure/p02_list/p02_slice_operator.py | MacHu-GWU/pyclopedia-project | c6ee156eb40bc5a4ac5f51aa735b6fd004cb68ee | [
"MIT"
] | null | null | null | pyclopedia/p01_beginner/p03_data_structure/p02_list/p02_slice_operator.py | MacHu-GWU/pyclopedia-project | c6ee156eb40bc5a4ac5f51aa735b6fd004cb68ee | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
def example1():
"""Slice operator.
seq[::stride] # [seq[0], seq[stride], ..., seq[-1] ]
seq[low::stride] # [seq[low], seq[low+stride], ..., seq[-1] ]
seq[:high:stride] # [seq[0], seq[stride], ..., seq[high-1]]
seq[low:high:stride] # [seq[low], seq[low+stride], ..., seq[high-1]]
"""
l = list("01234567")
assert l[::2] == list("0246") # 从 index(0) 开始, 隔2个取一个
assert l[1::2] == list("1357") # 从 index(1) 开始, 隔2个取一个
assert l[:4:2] == list("02") # 从头开始到 index(4-1) 为止,隔2个取一个
assert l[2:6:2] == list("24") # 从index(2)开始到index(6-1)为止,隔2个取一个
example1()
def example2():
"""Reversed slice operator
"""
l = list("01234567")
assert l[::-1] == list("76543210") # 从最后一个开始,逆序排列
assert l[::-2] == list("7531") # 从最后一个开始,隔2个取一个
assert l[-2::-2] == list("6420") # 从-2开始,隔2个取一个
assert l[:3:-2] == list("75") # 从最后开始,到3为止,隔2个取一个
example2()
| 29.176471 | 73 | 0.519153 |
f7040c6cca5a86749407c6d12a090a8e1288ff52 | 6,990 | py | Python | src/teleop_tools/mouse_teleop/scripts/mouse_teleop.py | aljanabim/svea | 37d27089237af3777456d7664473ffb811dabf33 | [
"MIT"
] | 5 | 2021-06-25T13:09:30.000Z | 2022-03-15T11:33:07.000Z | src/teleop_tools/mouse_teleop/scripts/mouse_teleop.py | aljanabim/svea | 37d27089237af3777456d7664473ffb811dabf33 | [
"MIT"
] | null | null | null | src/teleop_tools/mouse_teleop/scripts/mouse_teleop.py | aljanabim/svea | 37d27089237af3777456d7664473ffb811dabf33 | [
"MIT"
] | 17 | 2019-09-29T10:22:41.000Z | 2021-04-08T12:38:37.000Z | #! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Enrique Fernandez
# Released under the BSD License.
#
# Authors:
# * Enrique Fernandez
import Tkinter
import rospy
from geometry_msgs.msg import Twist, Vector3
import numpy
class MouseTeleop():
def __init__(self):
# Retrieve params:
self._frequency = rospy.get_param('~frequency', 0.0)
self._scale = rospy.get_param('~scale', 1.0)
self._holonomic = rospy.get_param('~holonomic', False)
# Create twist publisher:
self._pub_cmd = rospy.Publisher('mouse_vel', Twist, queue_size=100)
# Initialize twist components to zero:
self._v_x = 0.0
self._v_y = 0.0
self._w = 0.0
# Initialize mouse position (x, y) to None (unknown); it's initialized
# when the mouse button is pressed on the _start callback that handles
# that event:
self._x = None
self._y = None
# Create window:
self._root = Tkinter.Tk()
self._root.title('Mouse Teleop')
# Make window non-resizable:
self._root.resizable(0, 0)
# Create canvas:
self._canvas = Tkinter.Canvas(self._root, bg='white')
# Create canvas objects:
self._canvas.create_arc(0, 0, 0, 0, fill='red', outline='red',
width=1, style=Tkinter.PIESLICE, start=90.0, tag='w')
self._canvas.create_line(0, 0, 0, 0, fill='blue', width=4, tag='v_x')
if self._holonomic:
self._canvas.create_line(0, 0, 0, 0,
fill='blue', width=4, tag='v_y')
# Create canvas text objects:
self._text_v_x = Tkinter.StringVar()
if self._holonomic:
self._text_v_y = Tkinter.StringVar()
self._text_w = Tkinter.StringVar()
self._label_v_x = Tkinter.Label(self._root,
anchor=Tkinter.W, textvariable=self._text_v_x)
if self._holonomic:
self._label_v_y = Tkinter.Label(self._root,
anchor=Tkinter.W, textvariable=self._text_v_y)
self._label_w = Tkinter.Label(self._root,
anchor=Tkinter.W, textvariable=self._text_w)
if self._holonomic:
self._text_v_x.set('v_x = %0.2f m/s' % self._v_x)
self._text_v_y.set('v_y = %0.2f m/s' % self._v_y)
self._text_w.set( 'w = %0.2f deg/s' % self._w)
else:
self._text_v_x.set('v = %0.2f m/s' % self._v_x)
self._text_w.set( 'w = %0.2f deg/s' % self._w)
self._label_v_x.pack()
if self._holonomic:
self._label_v_y.pack()
self._label_w.pack()
# Bind event handlers:
self._canvas.bind('<Button-1>', self._start)
self._canvas.bind('<ButtonRelease-1>', self._release)
self._canvas.bind('<Configure>', self._configure)
if self._holonomic:
self._canvas.bind('<B1-Motion>', self._mouse_motion_linear)
self._canvas.bind('<Shift-B1-Motion>', self._mouse_motion_angular)
self._root.bind('<Shift_L>', self._change_to_motion_angular)
self._root.bind('<KeyRelease-Shift_L>',
self._change_to_motion_linear)
else:
self._canvas.bind('<B1-Motion>', self._mouse_motion_angular)
self._canvas.pack()
# If frequency is positive, use synchronous publishing mode:
if self._frequency > 0.0:
# Create timer for the given frequency to publish the twist:
period = rospy.Duration(1.0 / self._frequency)
self._timer = rospy.Timer(period, self._publish_twist)
# Start window event manager main loop:
self._root.mainloop()
def __del__(self):
if self._frequency > 0.0:
self._timer.shutdown()
self._root.quit()
def _start(self, event):
self._x, self._y = event.y, event.x
self._y_linear = self._y_angular = 0
self._v_x = self._v_y = self._w = 0.0
def _release(self, event):
self._v_x = self._v_y = self._w = 0.0
self._send_motion()
def _configure(self, event):
self._width, self._height = event.height, event.width
self._c_x = self._height / 2.0
self._c_y = self._width / 2.0
self._r = min(self._height, self._width) * 0.25
def _mouse_motion_linear(self, event):
self._v_x, self._v_y = self._relative_motion(event.y, event.x)
self._send_motion()
def _mouse_motion_angular(self, event):
self._v_x, self._w = self._relative_motion(event.y, event.x)
self._send_motion()
def _update_coords(self, tag, x0, y0, x1, y1):
x0 += self._c_x
y0 += self._c_y
x1 += self._c_x
y1 += self._c_y
self._canvas.coords(tag, (x0, y0, x1, y1))
def _draw_v_x(self, v):
x = -v * float(self._width)
self._update_coords('v_x', 0, 0, 0, x)
def _draw_v_y(self, v):
y = -v * float(self._height)
self._update_coords('v_y', 0, 0, y, 0)
def _draw_w(self, w):
x0 = y0 = -self._r
x1 = y1 = self._r
self._update_coords('w', x0, y0, x1, y1)
yaw = w * numpy.rad2deg(self._scale)
self._canvas.itemconfig('w', extent=yaw)
def _send_motion(self):
v_x = self._v_x * self._scale
v_y = self._v_y * self._scale
w = self._w * self._scale
linear = Vector3(v_x, v_y, 0.0)
angular = Vector3(0.0, 0.0, w)
self._draw_v_x(self._v_x)
if self._holonomic:
self._draw_v_y(self._v_y)
self._draw_w(self._w)
if self._holonomic:
self._text_v_x.set('v_x = %0.2f m/s' % self._v_x)
self._text_v_y.set('v_y = %0.2f m/s' % self._v_y)
self._text_w.set( 'w = %0.2f deg/s' % numpy.rad2deg(self._w))
else:
self._text_v_x.set('v = %0.2f m/s' % self._v_x)
self._text_w.set( 'w = %0.2f deg/s' % numpy.rad2deg(self._w))
twist = Twist(linear, angular)
self._pub_cmd.publish(twist)
def _publish_twist(self, event):
self._send_motion()
def _relative_motion(self, x, y):
dx = self._x - x
dy = self._y - y
dx /= float(self._width)
dy /= float(self._height)
dx = max(-1.0, min(dx, 1.0))
dy = max(-1.0, min(dy, 1.0))
return dx, dy
def _change_to_motion_linear(self, event):
if self._y is not None:
y = event.x
self._y_angular = self._y - y
self._y = self._y_linear + y
def _change_to_motion_angular(self, event):
if self._y is not None:
y = event.x
self._y_linear = self._y - y
self._y = self._y_angular + y
def main():
rospy.init_node('mouse_teleop')
MouseTeleop()
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
pass
| 28.884298 | 78 | 0.572246 |
f704533cb05012bfc523241ab664a84ebc5b8dad | 7,054 | py | Python | obsolete/reports/pipeline_capseq/trackers/macs_replicated_intervals.py | kevinrue/cgat-flow | 02b5a1867253c2f6fd6b4f3763e0299115378913 | [
"MIT"
] | 11 | 2018-09-07T11:33:23.000Z | 2022-01-07T12:16:11.000Z | obsolete/reports/pipeline_capseq/trackers/macs_replicated_intervals.py | kevinrue/cgat-flow | 02b5a1867253c2f6fd6b4f3763e0299115378913 | [
"MIT"
] | 102 | 2018-03-22T15:35:26.000Z | 2022-03-23T17:46:16.000Z | obsolete/reports/pipeline_capseq/trackers/macs_replicated_intervals.py | kevinrue/cgat-flow | 02b5a1867253c2f6fd6b4f3763e0299115378913 | [
"MIT"
] | 7 | 2018-06-11T15:01:41.000Z | 2020-03-31T09:29:33.000Z | import os
import sys
import re
import types
import itertools
import matplotlib.pyplot as plt
import numpy
import scipy.stats
import numpy.ma
import Stats
import Histogram
from cgatReport.Tracker import *
from cpgReport import *
##########################################################################
class replicatedIntervalSummary(cpgTracker):
"""Summary stats of intervals called by the peak finder. """
mPattern = "_replicated_intervals$"
def __call__(self, track, slice=None):
data = self.getRow(
"SELECT COUNT(*) as Intervals, round(AVG(length),0) as Mean_length, round(AVG(nprobes),0) as Mean_reads FROM %(track)s_replicated_intervals" % locals())
return data
##########################################################################
class replicatedIntervalLengths(cpgTracker):
"""Distribution of interval length. """
mPattern = "_replicated_intervals$"
def __call__(self, track, slice=None):
data = self.getAll(
"SELECT length FROM %(track)s_replicated_intervals" % locals())
return data
##########################################################################
class replicatedIntervalPeakValues(cpgTracker):
"""Distribution of maximum interval coverage (the number of reads at peak). """
mPattern = "_replicated_intervals$"
def __call__(self, track, slice=None):
data = self.getAll(
"SELECT peakval FROM %(track)s_replicated_intervals" % locals())
return data
##########################################################################
class replicatedIntervalAverageValues(cpgTracker):
"""Distribution of average coverage (the average number of reads within the interval) """
mPattern = "_replicated_intervals$"
def __call__(self, track, slice=None):
data = self.getAll(
"SELECT avgval FROM %(track)s_replicated_intervals" % locals())
return data
##########################################################################
class replicatedIntervalFoldChange(cpgTracker):
"""return fold changes for all intervals. """
mPattern = "_replicated_intervals$"
def __call__(self, track, slice=None):
data = self.getAll(
"SELECT fold FROM %(track)s_replicated_intervals" % locals())
return data
##########################################################################
##########################################################################
##########################################################################
class replicatedIntervalPeakLocation(cpgTracker):
mPattern = "_replicated_intervals$"
def __call__(self, track, slice=None):
data1 = self.getValues(
"SELECT (PeakCenter - start) / CAST( Length as FLOAT) - 0.5 FROM %(track)s_replicated_intervals" % locals())
data2 = self.getValues(
"SELECT (end - PeakCenter) / CAST( Length as FLOAT) - 0.5 FROM %(track)s_replicated_intervals" % locals())
return {"distance": data1 + data2}
##########################################################################
class replicatedIntervalPeakDistance(cpgTracker):
mPattern = "_replicated_intervals$"
def __call__(self, track, slice=None):
data1 = self.getValues(
"SELECT PeakCenter - start FROM %(track)s_replicated_intervals" % locals())
data2 = self.getValues(
"SELECT end - PeakCenter FROM %(track)s_replicated_intervals" % locals())
return {"distance": data1 + data2}
##########################################################################
##########################################################################
##########################################################################
class replicatedIntervalCpGDensity(cpgTracker):
pattern = "(.*)_replicated_composition"
def __call__(self, track, slice=None):
data1 = self.getValues(
"SELECT pCpG FROM %(track)s_replicated_composition" % locals())
data2 = self.getValues(
"SELECT pCpG FROM %(track)s_replicated_composition_control" % locals())
data3 = self.getValues(
"SELECT pCpG FROM %(track)s_replicated_composition_flanking5" % locals())
data4 = self.getValues(
"SELECT pCpG FROM %(track)s_replicated_composition_flanking3" % locals())
return odict(list(zip(("CAPseq composition", "Control composition", "5` Flank Composition", "3` Flank Composition"), (data1, data2, data3, data4))))
##########################################################################
class replicatedIntervalCpGObsExp1(cpgTracker):
pattern = "(.*)_replicated_composition"
def __call__(self, track, slice=None):
data1 = self.getValues(
"SELECT CpG_ObsExp1 FROM %(track)s_replicated_composition" % locals())
data2 = self.getValues(
"SELECT CpG_ObsExp1 FROM %(track)s_replicated_composition_control" % locals())
data3 = self.getValues(
"SELECT CpG_ObsExp1 FROM %(track)s_replicated_composition_flanking5" % locals())
data4 = self.getValues(
"SELECT CpG_ObsExp1 FROM %(track)s_replicated_composition_flanking3" % locals())
return odict(list(zip(("CAPseq composition", "Control composition", "5` Flank Composition", "3` Flank Composition"), (data1, data2, data3, data4))))
##########################################################################
class replicatedIntervalCpGObsExp2(cpgTracker):
pattern = "(.*)_replicated_composition"
def __call__(self, track, slice=None):
data1 = self.getValues(
"SELECT CpG_ObsExp FROM %(track)s_replicated_composition" % locals())
data2 = self.getValues(
"SELECT CpG_ObsExp FROM %(track)s_replicated_composition_control" % locals())
data3 = self.getValues(
"SELECT CpG_ObsExp FROM %(track)s_replicated_composition_flanking5" % locals())
data4 = self.getValues(
"SELECT CpG_ObsExp FROM %(track)s_replicated_composition_flanking3" % locals())
return odict(list(zip(("CAPseq composition", "Control composition", "5` Flank Composition", "3` Flank Composition"), (data1, data2, data3, data4))))
##########################################################################
class replicatedIntervalGCContent(cpgTracker):
pattern = "(.*)_replicated_composition"
def __call__(self, track, slice=None):
data1 = self.getValues(
"SELECT pGC FROM %(track)s_replicated_composition" % locals())
data2 = self.getValues(
"SELECT pGC FROM %(track)s_replicated_composition_control" % locals())
data3 = self.getValues(
"SELECT pGC FROM %(track)s_replicated_composition_flanking5" % locals())
data4 = self.getValues(
"SELECT pGC FROM %(track)s_replicated_composition_flanking3" % locals())
return odict(list(zip(("CAPseq composition", "Control composition", "5` Flank Composition", "3` Flank Composition"), (data1, data2, data3, data4))))
| 38.546448 | 164 | 0.568897 |
f7046f8691fd007d37014cd2211e82ad6141f5c5 | 1,700 | py | Python | src/Products/PluginIndexes/util.py | icemac/Products.ZCatalog | 697719cc0f1a016ab7b874237271d3e11693e78b | [
"ZPL-2.1"
] | null | null | null | src/Products/PluginIndexes/util.py | icemac/Products.ZCatalog | 697719cc0f1a016ab7b874237271d3e11693e78b | [
"ZPL-2.1"
] | 1 | 2021-02-10T16:05:38.000Z | 2021-02-10T16:05:38.000Z | src/Products/PluginIndexes/util.py | icemac/Products.ZCatalog | 697719cc0f1a016ab7b874237271d3e11693e78b | [
"ZPL-2.1"
] | 1 | 2021-02-10T15:34:58.000Z | 2021-02-10T15:34:58.000Z | ##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
from datetime import datetime
from DateTime.DateTime import DateTime
import six
MAX32 = int(2 ** 31 - 1)
def safe_callable(ob):
# Works with ExtensionClasses and Acquisition.
try:
ob.__class__
try:
return bool(ob.__call__)
except AttributeError:
return isinstance(ob, six.class_types)
except AttributeError:
return callable(ob)
def datetime_to_minutes(value, precision=1,
max_value=MAX32, min_value=-MAX32):
if value is None:
return value
if isinstance(value, (str, datetime)):
value = DateTime(value)
if isinstance(value, DateTime):
value = value.millis() / 1000 / 60 # flatten to minutes
# flatten to precision
if precision > 1:
value = value - (value % precision)
value = int(value)
if value > max_value or value < min_value:
# value must be integer fitting in the range (default 32bit)
raise OverflowError(
'{0} is not within the range of dates allowed.'.format(value))
return value
| 28.813559 | 78 | 0.606471 |
f704acb27652fac4c53df6d424b7bb033879e704 | 15,975 | py | Python | CollisionAvoidanceMonitor/main.py | GustavLero/EPICS-inst_servers | 4bcdd6a80f1d9e074de3f0f7c66968d506981988 | [
"BSD-3-Clause"
] | null | null | null | CollisionAvoidanceMonitor/main.py | GustavLero/EPICS-inst_servers | 4bcdd6a80f1d9e074de3f0f7c66968d506981988 | [
"BSD-3-Clause"
] | null | null | null | CollisionAvoidanceMonitor/main.py | GustavLero/EPICS-inst_servers | 4bcdd6a80f1d9e074de3f0f7c66968d506981988 | [
"BSD-3-Clause"
] | null | null | null | import sys
import os
import ode
import logging
import threading
from time import sleep, time
from genie_python.genie_startup import *
import pv_server
import render
from configurations import config_zoom as config
from collide import collide, CollisionDetector
from geometry import GeometryBox
from move import move_all
sys.path.insert(0, os.path.abspath(os.environ["MYDIRCD"]))
from monitor import Monitor
from server_common.loggers.isis_logger import IsisLogger
logging.basicConfig(level=logging.INFO,
format='%(asctime)s (%(threadName)-2s) %(message)s',
)
def auto_seek(start_step_size, start_values, end_value, geometries, moves, axis_index, ignore, fine_step=None):
limit = end_value
current_value = start_values[axis_index]
if current_value == end_value:
return end_value
values = start_values[:]
last_value = None
old_points = None
step_checked = False
if current_value < end_value:
# Going up
def comp(a, b):
return a < b
step_size = abs(start_step_size)
else:
# Going down
def comp(a, b):
return a > b
step_size = -abs(start_step_size)
while last_value is None or comp(last_value, end_value):
# Move if we need to
if last_value is not None:
current_value += step_size
# print "Using step size of %f" % step_size
else:
current_value = start_values[axis_index]
if not comp(current_value, end_value):
current_value = end_value
values[axis_index] = current_value
move_all(geometries, moves, values=values[:])
# Check nothing moved too far
if step_checked is False:
new_points = [g.get_vertices() for g in geometries]
if old_points is not None:
delta = max_delta(geometries, new_points, old_points)
if delta > start_step_size:
# Work out a new step size
step_size *= start_step_size/delta
last_value = None
continue
step_checked = True
# Check for collisions
collisions = collide(geometries, ignore)
if any(collisions):
if current_value == start_values[axis_index]:
# There was already a collision
limit = current_value
break
elif fine_step and fine_step < step_size:
start_values[axis_index] = last_value
limit = auto_seek(fine_step, start_values, current_value, geometries, moves, axis_index, ignore)
else:
limit = last_value
break
old_points = new_points[:]
last_value = current_value
# print "Found limits for axis %d using step size of %f" % (axis_index, step_size)
if limit is None:
raise ValueError("Null limit")
return limit
def max_delta(geometries, new_points, old_points):
# Calculate the greatest position deltas
delta = 0
for j in range(len(geometries)):
old = old_points[j]
new = new_points[j]
deltas = [map(float, n - o) for n, o in zip(new, old)]
for i, (x, y, z) in enumerate(deltas):
mag = float(x) ** 2 + float(y) ** 2 + float(z) ** 2
if mag > delta:
delta = mag
# print "New max delta of %f (%f, %f, %f) for body %d at %s from %s" % \
# (mag ** 0.5, x, y, z, j, new[i], old[i])
delta = float(delta) ** 0.5
return delta
def compare(sign):
if sign > 0:
return lambda a, b: a > b
else:
return lambda a, b: a < b
def auto_seek_limits(geometries, ignore, moves, values, limits, coarse=1.0, fine=0.1):
dynamic_limits = []
for i in range(len(values)):
logging.debug("Seeking for axis %d" % i)
lower_limit = auto_seek(coarse, values[:], min(limits[i]), geometries, moves, i, ignore, fine)
upper_limit = auto_seek(coarse, values[:], max(limits[i]), geometries, moves, i, ignore, fine)
dynamic_limits.append([lower_limit, upper_limit])
logging.debug("Found limits for axis %d at %s, %s" % (i, upper_limit, lower_limit))
return dynamic_limits
def look_ahead(start_values, pvs, is_moving, geometries, moves, ignore, max_movement=1.0, max_time=10., time_step=0.1):
# Get the indices of the axes currently moving
moving = [i for i, m in enumerate(is_moving) if m == 0] # DMOV = 0 when motors not moving
msg = "No collisions predicted in the next %fs" % max_time
safe_time = max_time
safe = True
# Only worth calculating if more than one axis is moving
if len(moving) > 1:
set_points = [None] * len(pvs)
speeds = [None] * len(pvs)
directions = [None] * len(pvs)
# Assume everything has finished moving
move_complete = [True] * len(pvs)
# Get some settings:
for i in moving:
pv = pvs[i]
set_point = get_pv(pv + '.DVAL')
speed = get_pv(pv + '.VELO')
direction = 0.
move = set_point - start_values[i]
if move > 0:
direction = 1.
if move < 0:
direction = -1.
set_points[i] = set_point
speeds[i] = speed
directions[i] = direction
# This axis has not finished moving!
move_complete[i] = False
current_time = 0.
values = start_values[:]
old_points = None
step_checked = False
last_time = None
while current_time < max_time:
if last_time is None:
values = start_values[:]
current_time = 0.
old_points = None
else:
current_time += time_step
for i in moving:
if move_complete[i] is False:
values[i] = start_values[i] + (directions[i] * speeds[i] * current_time)
comp = compare(directions[i])(values[i], set_points[i])
if comp:
values[i] = set_points[i]
# Move the bodies
move_all(geometries, moves, values=values)
if step_checked is False:
new_points = [g.get_vertices() for g in geometries]
if old_points is not None:
delta = max_delta(geometries, new_points, old_points)
if delta > max_movement:
# Reduce the size of the time step
time_step *= max_movement/delta
# Reset to starting point
last_time = None
old_points = None
continue
step_checked = True
# Check for collisions
collisions = collide(geometries, ignore)
if any(collisions):
if last_time is None:
msg = "There is already a collision"
safe_time = 0.
else:
msg = "Collision expected in %.1fs - %.1fs" % (last_time, current_time)
safe_time = last_time
safe = False
break
old_points = new_points[:]
last_time = current_time
return msg, safe_time, safe
# Set the high and low dial limits for each motor
def set_limits(limits, pvs):
for limit, pv in zip(limits, pvs):
set_pv(pv + '.DLLM', limit[0])
set_pv(pv + '.DHLM', limit[1])
# Contains operating mode events
class OperatingMode(object):
def __init__(self):
# Close event to be triggered by the render thread
self.close = threading.Event()
# Set dynamic limits automatically
self.set_limits = threading.Event()
# Stop the motors on a collision
self.auto_stop = threading.Event()
# Re-calculate limits on demand
self.calc_limits = threading.Event()
def get_operation_mode(self):
return self.auto_stop.is_set(), self.set_limits.is_set(), self.close.is_set()
def set_operation_mode(self, auto_stop, set_limits, close):
if auto_stop:
self.auto_stop.set()
else:
self.auto_stop.clear()
if set_limits:
self.set_limits.set()
else:
self.set_limits.clear()
if close:
self.close.set()
else:
self.close.clear()
# The main routine to execute
def main():
# Load config:
colors = config.colors
moves = config.moves
ignore = config.ignore
pvs = config.pvs
config_limits = config.hardlimits
old_limits = config_limits[:]
# Create space objects for the live and rendered world
space = ode.Space()
render_space = ode.Space()
collision_space = ode.Space()
# Create and populate lists of geometries
geometries = []
render_geometries = []
collision_geometries = []
for i, geometry in enumerate(config.geometries):
geometries.append(GeometryBox(space, oversize=config.oversize, **geometry))
render_geometries.append(GeometryBox(render_space, **geometry))
collision_geometries.append(GeometryBox(collision_space, oversize=config.oversize, **geometry))
# Create and populate two lists of monitors
monitors = []
is_moving = []
for pv in pvs:
m = Monitor(pv + ".DRBV")
m.start()
monitors.append(m)
any_moving = Monitor(pv + ".DMOV")
any_moving.start()
is_moving.append(any_moving)
# Create a shared operating mode object to control the main thread
op_mode = OperatingMode()
# Set the default behaviour to set_limits as calculated, and auto_stop on collision
op_mode.set_limits.set()
op_mode.auto_stop.set()
# Start a logger
logger = IsisLogger()
# Create a shared render parameter object to update the render thread
parameters = render.RenderParams()
if 'blind' not in sys.argv:
# Initialise the render thread, and set it to daemon - won't prevent the main thread from exiting
renderer = render.Renderer(parameters, render_geometries, colors, monitors, pvs, moves, op_mode)
renderer.daemon = True
# Need to know if this is the first execution of the main loop
op_mode.calc_limits.set()
# Initialise the pv server
# Loop over the pvdb and update the counts based on the number of aves/bodies
for pv in pv_server.pvdb:
for key, val in pv_server.pvdb[pv].items():
if key == 'count':
if val is pv_server.axis_count:
pv_server.pvdb[pv]['count'] = len(config.pvs)
if val is pv_server.body_count:
pv_server.pvdb[pv]['count'] = len(config.geometries)
driver = pv_server.start_thread(config.control_pv, op_mode)
driver.setParam('OVERSIZE', config.oversize)
driver.setParam('COARSE', config.coarse)
driver.setParam('FINE', config.fine)
driver.setParam('NAMES', [g['name'] for g in config.geometries])
# Only report for new collisions
collision_detector = CollisionDetector(driver, collision_geometries, config.moves, monitors, config.ignore,
is_moving, logger, op_mode, config.pvs)
collision_detector.start()
# Main loop
while True:
# Freeze the positions of our current monitors by creating some dummies
# This stops the threads from trying to reading each monitor sequentially, and holding each other up
frozen = [m.value() for m in monitors]
# Execute the move
move_all(geometries, moves, values=frozen)
# Check if the oversize has been changed, ahead of any collision calcs
if driver.new_data.isSet():
for geometry, collision_geometry in zip(geometries, collision_geometries):
geometry.set_size(oversize=driver.getParam('OVERSIZE'))
collision_geometry.set_size(oversize=driver.getParam('OVERSIZE'))
driver.new_data.clear()
op_mode.calc_limits.set()
if driver.getParam("CALC") != 0:
op_mode.calc_limits.set()
collisions = collision_detector.collisions[:]
collision_message = collision_detector.message[:]
# Check if there have been any changes to the .MOVN monitors
fresh = any([m.fresh() for m in is_moving])
# Check if any of the motors monitors are moving
moving = [not m.value() for m in is_moving] # Invert because DMOV is inverted from MOVN
any_moving = any(moving)
new_limits = []
if fresh or any_moving or op_mode.calc_limits.isSet():
# Look ahead some time to see if any collisions are going to happen in the future
msg, safe_time, safe = look_ahead(frozen, config.pvs, moving, geometries, moves, ignore,
max_movement=driver.getParam('COARSE'))
if not safe and not any(collisions):
logger.write_to_log(msg, "MAJOR", "COLLIDE")
driver.setParam('MSG', msg)
else:
driver.setParam('MSG', collision_message)
logging.info(msg)
# Start timing for diagnostics
time_passed = time()
# Seek the correct limit values
dynamic_limits = auto_seek_limits(geometries, ignore, moves, frozen, config_limits,
coarse=driver.getParam('COARSE'), fine=driver.getParam('FINE'))
# Calculate and log the time taken to calculate
time_passed = (time() - time_passed) * 1000
# Log the new limits
logging.info("New limits calculated in %dms, are %s" % (time_passed, dynamic_limits))
# Set the limits according to the set_limits operating mode
if op_mode.set_limits.is_set():
# Apply the calculated limits
new_limits = dynamic_limits[:]
else:
# Restore the configuration limits
new_limits = config_limits[:]
# Update the render thread parameters
parameters.update_params(dynamic_limits, collisions, time_passed)
# # Update the PVs
driver.setParam('TIME', time_passed)
driver.setParam('HI_LIM', [l[1] for l in dynamic_limits])
driver.setParam('LO_LIM', [l[0] for l in dynamic_limits])
driver.setParam('TRAVEL', [min([l[0] - m, l[1] - m], key=abs)
for l, m in zip(dynamic_limits, frozen)])
driver.setParam('TRAV_F', [l[1] - m for l, m in zip(dynamic_limits, frozen)])
driver.setParam('TRAV_R', [l[0] - m for l, m in zip(dynamic_limits, frozen)])
driver.updatePVs()
if 'blind' not in sys.argv:
# On the first run, start the renderer
if renderer.is_alive() is False:
renderer.start()
op_mode.calc_limits.clear()
driver.setParam("CALC", False)
else:
# Restore the configuration limits
if op_mode.set_limits.is_set() is False:
new_limits = config_limits[:]
# Stop us overloading the limits
if not new_limits == old_limits:
threading.Thread(target=set_limits, args=(new_limits, pvs)).start()
old_limits = new_limits[:]
# Exit the program
if op_mode.close.is_set():
# Restore the configuration limits
set_limits(config_limits, pvs)
return
# Give the CPU a break
sleep(0.01)
if 'return' in sys.argv:
return
# Execute main
main()
| 33.560924 | 119 | 0.587856 |
f704c9482c2b74b28c4faceee71e9f4dcabea3a3 | 316 | py | Python | johann/__init__.py | lobotmcj/johann | c188c6f31446907a5d6a237191540856f02a91a0 | [
"BSD-3-Clause"
] | 11 | 2020-08-27T18:33:09.000Z | 2022-03-18T03:09:03.000Z | johann/__init__.py | johannsdg/johann | c188c6f31446907a5d6a237191540856f02a91a0 | [
"BSD-3-Clause"
] | null | null | null | johann/__init__.py | johannsdg/johann | c188c6f31446907a5d6a237191540856f02a91a0 | [
"BSD-3-Clause"
] | 2 | 2020-09-04T03:07:35.000Z | 2020-11-06T19:08:03.000Z | # Copyright (c) 2019-present, The Johann Authors. All Rights Reserved.
# Use of this source code is governed by a BSD-3-clause license that can
# be found in the LICENSE file. See the AUTHORS file for names of contributors.
"""Johann, lightweight and flexible scenario orchestration"""
__version__ = "0.3.0-alpha"
| 39.5 | 79 | 0.759494 |
f704ccdab8daddc07843c80260a004c3a4b58cc3 | 40,273 | py | Python | tests/unit_test/action/action_test.py | Anitej/kairon | 61d6bd7f230a744303abab42e3b54b0381fee7da | [
"Apache-2.0"
] | null | null | null | tests/unit_test/action/action_test.py | Anitej/kairon | 61d6bd7f230a744303abab42e3b54b0381fee7da | [
"Apache-2.0"
] | null | null | null | tests/unit_test/action/action_test.py | Anitej/kairon | 61d6bd7f230a744303abab42e3b54b0381fee7da | [
"Apache-2.0"
] | null | null | null | import json
import os
os.environ["system_file"] = "./tests/testing_data/system.yaml"
from typing import Dict, Text, Any, List
import pytest
import responses
from mongoengine import connect, disconnect
from rasa_sdk import Tracker
from rasa_sdk.executor import CollectingDispatcher
from kairon.action_server.data_objects import HttpActionRequestBody, HttpActionConfig, HttpActionLog
from kairon.action_server.actions import ActionUtility, HttpAction
from kairon.action_server.exception import HttpActionFailure
from kairon.utils import Utility
def pytest_configure():
return {
'db_url': None,
}
class TestActions:
@pytest.fixture(autouse=True)
def setup(self):
os.environ["system_file"] = "./tests/testing_data/system.yaml"
Utility.load_evironment()
db_url = Utility.environment['database']["url"]
pytest.db_url = db_url
connect(host=db_url)
@pytest.fixture
def mock_get_http_action_exception(self, monkeypatch):
def _raise_excep(*arge, **kwargs):
raise HttpActionFailure("No HTTP action found for bot and action")
monkeypatch.setattr(ActionUtility, "get_http_action_config", _raise_excep)
@responses.activate
def test_execute_http_request_getWith_auth_token(self):
http_url = 'http://localhost:8080/mock'
# file deepcode ignore HardcodedNonCryptoSecret: Random string for testing
auth_token = "bearer jkhfhkujsfsfslfhjsfhkjsfhskhfksj"
responses.add(
method=responses.GET,
url=http_url,
json={'data': 'test_data', 'test_class': [{'key': 'value'}, {'key2': 'value2'}]},
status=200
)
response = ActionUtility.execute_http_request(auth_token=auth_token, http_url=http_url,
request_method=responses.GET)
assert response
assert response['data'] == 'test_data'
assert len(response['test_class']) == 2
assert response['test_class'][1]['key2'] == 'value2'
assert responses.calls[0].request.headers['Authorization'] == auth_token
@responses.activate
def test_execute_http_request_get_no_auth_token(self):
http_url = 'http://localhost:8080/mock'
responses.add(
method=responses.GET,
url=http_url,
json={'data': 'test_data', 'test_class': [{'key': 'value'}, {'key2': 'value2'}]},
status=200
)
response = ActionUtility.execute_http_request(auth_token=None, http_url=http_url,
request_method=responses.GET)
assert response
assert response['data'] == 'test_data'
assert len(response['test_class']) == 2
assert response['test_class'][1]['key2'] == 'value2'
assert 'Authorization' not in responses.calls[0].request.headers
@responses.activate
def test_execute_http_request_post_with_auth_token(self):
http_url = 'http://localhost:8080/mock'
auth_token = "bearer jkhfhkujsfsfslfhjsfhkjsfhskhfksj"
resp_msg = "Data added successfully"
request_params = {'data': 'test_data', 'test_class': [{'key': 'value'}, {'key2': 'value2'}]}
responses.add(
method=responses.POST,
url=http_url,
body=resp_msg,
status=200,
match=[responses.json_params_matcher(request_params)]
)
response = ActionUtility.execute_http_request(auth_token=auth_token, http_url=http_url,
request_method=responses.POST, request_body=request_params)
assert response
assert response == resp_msg
assert responses.calls[0].request.headers['Authorization'] == auth_token
@responses.activate
def test_execute_http_request_post_no_auth_token(self):
http_url = 'http://localhost:8080/mock'
resp_msg = "Data added successfully"
request_params = {'data': 'test_data', 'test_class': [{'key': 'value'}, {'key2': 'value2'}]}
responses.add(
method=responses.POST,
url=http_url,
body=resp_msg,
status=200,
match=[responses.json_params_matcher(request_params)]
)
response = ActionUtility.execute_http_request(auth_token=None, http_url=http_url,
request_method=responses.POST, request_body=request_params)
assert response
assert response == resp_msg
assert 'Authorization' not in responses.calls[0].request.headers
@responses.activate
def test_execute_http_request_put_with_auth_token(self):
http_url = 'http://localhost:8080/mock'
auth_token = "bearer jkhfhkujsfsfslfhjsfhkjsfhskhfksj"
resp_msg = "Data updated successfully"
request_params = {'data': 'test_data', 'test_class': [{'key': 'value'}, {'key2': 'value2'}]}
responses.add(
method=responses.PUT,
url=http_url,
body=resp_msg,
status=200,
match=[responses.json_params_matcher(request_params)]
)
response = ActionUtility.execute_http_request(auth_token=auth_token, http_url=http_url,
request_method=responses.PUT, request_body=request_params)
assert response
assert response == resp_msg
assert responses.calls[0].request.headers['Authorization'] == auth_token
@responses.activate
def test_execute_http_request_put_no_auth_token(self):
http_url = 'http://localhost:8080/mock'
resp_msg = "Data updated successfully"
request_params = {'data': 'test_data', 'test_class': [{'key': 'value'}, {'key2': 'value2'}]}
responses.add(
method=responses.PUT,
url=http_url,
body=resp_msg,
status=200,
match=[responses.json_params_matcher(request_params)]
)
response = ActionUtility.execute_http_request(auth_token=None, http_url=http_url,
request_method=responses.PUT, request_body=request_params)
assert response
assert response == resp_msg
assert 'Authorization' not in responses.calls[0].request.headers
@responses.activate
def test_execute_http_request_delete_with_request_body_auth_token(self):
http_url = 'http://localhost:8080/mock'
auth_token = "bearer jkhfhkujsfsfslfhjsfhkjsfhskhfksj"
resp_msg = "Data deleted successfully"
request_params = {'data': 'test_data', 'test_class': [{'key': 'value'}, {'key2': 'value2'}]}
responses.add(
method=responses.DELETE,
url=http_url,
body=resp_msg,
status=200,
match=[responses.json_params_matcher(request_params)]
)
response = ActionUtility.execute_http_request(auth_token=auth_token, http_url=http_url,
request_method=responses.DELETE, request_body=request_params)
assert response
assert response == resp_msg
assert responses.calls[0].request.headers['Authorization'] == auth_token
@responses.activate
def test_execute_http_request_delete_with_auth_token_no_request_body(self):
http_url = 'http://localhost:8080/mock'
auth_token = "bearer jkhfhkujsfsfslfhjsfhkjsfhskhfksj"
resp_msg = "Data deleted successfully"
responses.add(
method=responses.DELETE,
url=http_url,
body=resp_msg,
status=200,
)
response = ActionUtility.execute_http_request(auth_token=auth_token, http_url=http_url,
request_method=responses.DELETE, request_body=None)
assert response
assert response == resp_msg
assert responses.calls[0].request.headers['Authorization'] == auth_token
@responses.activate
def test_execute_http_request_delete_no_auth_token(self):
http_url = 'http://localhost:8080/mock'
resp_msg = "Data updated successfully"
request_params = {'data': 'test_data', 'test_class': [{'key': 'value'}, {'key2': 'value2'}]}
responses.add(
method=responses.DELETE,
url=http_url,
body=resp_msg,
status=200,
match=[
responses.json_params_matcher(request_params)
]
)
response = ActionUtility.execute_http_request(auth_token=None, http_url=http_url,
request_method=responses.DELETE, request_body=request_params)
assert response
assert response == resp_msg
assert 'Authorization' not in responses.calls[0].request.headers
def test_get_http_action_config(self):
http_params = [HttpActionRequestBody(key="key1", value="value1", parameter_type="slot"),
HttpActionRequestBody(key="key2", value="value2")]
expected = HttpActionConfig(
auth_token="bearer kjflksjflksajfljsdflinlsufisnflisjbjsdalibvs",
action_name="http_action",
response="json",
http_url="http://test.com",
request_method="GET",
params_list=http_params,
bot="bot",
user="user"
).save().to_mongo().to_dict()
actual = ActionUtility.get_http_action_config("bot", "http_action")
assert actual is not None
assert expected['auth_token'] == actual['auth_token']
assert expected['action_name'] == actual['action_name']
assert expected['response'] == actual['response']
assert expected['http_url'] == actual['http_url']
assert expected['request_method'] == actual['request_method']
assert expected['params_list'] is not None
assert expected['params_list'][0]['key'] == actual['params_list'][0]['key']
assert expected['params_list'][0]['value'] == actual['params_list'][0]['value']
assert expected['params_list'][0]['parameter_type'] == actual['params_list'][0]['parameter_type']
assert expected['params_list'][1]['key'] == actual['params_list'][1]['key']
assert expected['params_list'][1]['value'] == actual['params_list'][1]['value']
assert expected['params_list'][1]['parameter_type'] == actual['params_list'][1]['parameter_type']
assert actual['status']
def test_get_http_action_config_deleted_action(self):
http_params = [HttpActionRequestBody(key="key1", value="value1", parameter_type="slot"),
HttpActionRequestBody(key="key2", value="value2")]
HttpActionConfig(
auth_token="",
action_name="test_get_http_action_config_deleted_action",
response="${RESPONSE}",
http_url="http://www.digite.com",
request_method="POST",
params_list=http_params,
bot="bot",
user="user",
status=False
).save().to_mongo().to_dict()
expected = HttpActionConfig(
auth_token="bearer kjflksjflksajfljsdflinlsufisnflisjbjsdalibvs",
action_name="test_get_http_action_config_deleted_action",
response="json",
http_url="http://test.com",
request_method="GET",
params_list=http_params,
bot="bot",
user="user"
).save().to_mongo().to_dict()
actual = ActionUtility.get_http_action_config("bot", "test_get_http_action_config_deleted_action")
assert actual is not None
assert expected['auth_token'] == actual['auth_token']
assert expected['action_name'] == actual['action_name']
assert expected['response'] == actual['response']
assert expected['http_url'] == actual['http_url']
assert expected['request_method'] == actual['request_method']
assert expected['params_list'] is not None
assert expected['params_list'][0]['key'] == actual['params_list'][0]['key']
assert expected['params_list'][0]['value'] == actual['params_list'][0]['value']
assert expected['params_list'][0]['parameter_type'] == actual['params_list'][0]['parameter_type']
assert expected['params_list'][1]['key'] == actual['params_list'][1]['key']
assert expected['params_list'][1]['value'] == actual['params_list'][1]['value']
assert expected['params_list'][1]['parameter_type'] == actual['params_list'][1]['parameter_type']
assert actual['status']
def test_get_http_action_no_bot(self):
try:
ActionUtility.get_http_action_config(bot=None, action_name="http_action")
assert False
except HttpActionFailure as ex:
assert str(ex) == "Bot name and action name are required"
def test_get_http_action_no_http_action(self):
try:
ActionUtility.get_http_action_config(bot="bot", action_name=None)
assert False
except HttpActionFailure as ex:
assert str(ex) == "Bot name and action name are required"
def test_get_http_action_invalid_bot(self):
http_params = [HttpActionRequestBody(key="key1", value="value1", parameter_type="slot"),
HttpActionRequestBody(key="key2", value="value2")]
HttpActionConfig(
auth_token="bearer kjflksjflksajfljsdflinlsufisnflisjbjsdalibvs",
action_name="http_action",
response="json",
http_url="http://test.com",
request_method="GET",
params_list=http_params,
bot="bot",
user="user"
).save().to_mongo().to_dict()
try:
ActionUtility.get_http_action_config("bot1", "http_action")
assert False
except HttpActionFailure as ex:
assert str(ex).__contains__("No HTTP action found for bot")
def test_get_http_action_invalid_http_action(self):
http_params = [HttpActionRequestBody(key="key1", value="value1", parameter_type="slot"),
HttpActionRequestBody(key="key2", value="value2")]
HttpActionConfig(
auth_token="bearer kjflksjflksajfljsdflinlsufisnflisjbjsdalibvs",
action_name="http_action",
response="json",
http_url="http://test.com",
request_method="GET",
params_list=http_params,
bot="bot",
user="user"
).save().to_mongo().to_dict()
try:
ActionUtility.get_http_action_config("bot", "http_action1")
assert False
except HttpActionFailure as ex:
assert str(ex).__contains__("No HTTP action found for bot")
def test_get_http_action_no_request_body(self):
http_params = []
HttpActionConfig(
auth_token="bearer kjflksjflksajfljsdflinlsufisnflisjbjsdalibvs",
action_name="http_action",
response="json",
http_url="http://test.com",
request_method="GET",
params_list=http_params,
bot="bot",
user="user"
).save().to_mongo().to_dict()
try:
ActionUtility.get_http_action_config("bot", "http_action1")
assert False
except HttpActionFailure as ex:
assert str(ex).__contains__("No HTTP action found for bot")
def test_prepare_request(self):
slots = {"bot": "demo_bot", "http_action_config": "http_action_name", "slot_name": "param2value"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
http_action_config_params = [HttpActionRequestBody(key="param1", value="value1"),
HttpActionRequestBody(key="param2", value="slot_name", parameter_type="slot")]
tracker = Tracker(sender_id="sender1", slots=slots, events=events, paused=False, latest_message=None,
followup_action=None, active_loop=None, latest_action_name=None)
actual_request_body = ActionUtility.prepare_request(tracker=tracker,
http_action_config_params=http_action_config_params)
assert actual_request_body
assert actual_request_body['param1'] == 'value1'
assert actual_request_body['param2'] == 'param2value'
def test_prepare_request_empty_slot(self):
slots = {"bot": "demo_bot", "http_action_config": "http_action_name", "param2": "param2value"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
http_action_config_params = [HttpActionRequestBody(key="param1", value="value1"),
HttpActionRequestBody(key="param3", value="", parameter_type="slot")]
tracker = Tracker(sender_id="sender1", slots=slots, events=events, paused=False, latest_message=None,
followup_action=None, active_loop=None, latest_action_name=None)
request_params = ActionUtility.prepare_request(tracker=tracker, http_action_config_params=http_action_config_params)
assert request_params['param1'] == "value1"
assert not request_params['param3']
def test_prepare_request_sender_id(self):
slots = {"bot": "demo_bot", "http_action_config": "http_action_name", "param2": "param2value"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
http_action_config_params = [HttpActionRequestBody(key="param1", value="value1"),
HttpActionRequestBody(key="user_id", value="", parameter_type="sender_id")]
tracker = Tracker(sender_id="kairon_user@digite.com", slots=slots, events=events, paused=False, latest_message=None,
followup_action=None, active_loop=None, latest_action_name=None)
request_params = ActionUtility.prepare_request(tracker=tracker, http_action_config_params=http_action_config_params)
assert request_params['param1'] == "value1"
assert request_params['user_id'] == "kairon_user@digite.com"
def test_prepare_request_no_request_params(self):
slots = {"bot": "demo_bot", "http_action_config": "http_action_name", "param2": "param2value"}
events: List[Dict] = None
http_action_config_params: List[HttpActionRequestBody] = None
tracker = Tracker(sender_id="sender1", slots=slots, events=events, paused=False, latest_message=None,
followup_action=None, active_loop=None, latest_action_name=None)
actual_request_body = ActionUtility.prepare_request(tracker=tracker,
http_action_config_params=http_action_config_params)
# deepcode ignore C1801: empty request body for http request with no request body params
assert len(actual_request_body) == 0
@pytest.mark.asyncio
async def test_name(self):
assert await HttpAction().name() == "kairon_http_action"
def test_is_empty(self):
assert ActionUtility.is_empty("")
assert ActionUtility.is_empty(" ")
assert ActionUtility.is_empty(None)
assert not ActionUtility.is_empty("None")
def test_prepare_response(self):
json1 = json.dumps({
"a": {
"b": {
"3": 2,
"43": 30,
"c": [],
"d": ['red', 'buggy', 'bumpers'],
}
}
})
response = ActionUtility.prepare_response("The value of ${a.b.3} in ${a.b.d.0} is ${a.b.c}", json1)
assert response == 'The value of 2 in red is []'
json2 = json.dumps({
"data": [
{"a": {
"b": {
"43": 30,
"c": [],
"d": ['red', 'buggy', 'bumpers'],
}}},
{"a": {
"b": {
"43": 5,
"c": [1, 2],
"d": ['buggy', 'bumpers'],
}}}
]
})
response = ActionUtility.prepare_response("The value of ${data.0.a} in ${data.0.a.b} is ${data.0.a.b.d}", json2)
assert response == 'The value of {"b": {"43": 30, "c": [], "d": ["red", "buggy", "bumpers"]}} in {"43": 30, "c": [], "d": ["red", "buggy", "bumpers"]} is [\'red\', \'buggy\', \'bumpers\']'
def test_prepare_response_key_not_present(self):
json1 = json.dumps({
"a": {
"b": {
"3": 2,
"43": 30,
"c": [],
"d": ['red', 'buggy', 'bumpers'],
}
}
})
try:
ActionUtility.prepare_response("The value of ${a.b.3} in ${a.b.d.0} is ${a.b.e}", json1)
assert False
except HttpActionFailure:
assert True
def test_prepare_response_string_response(self):
json1 = json.dumps({
"a": {
"b": {
"3": 2,
"43": 30,
"c": [],
"d": ['red', 'buggy', 'bumpers'],
}
}
})
response = ActionUtility.prepare_response("The value of red is 0", json1)
assert response == "The value of red is 0"
def test_prepare_response_string_empty_response_string(self):
json1 = json.dumps({
"a": {
"b": {
"3": 2,
"43": 30,
"c": [],
"d": ['red', 'buggy', 'bumpers'],
}
}
})
response = ActionUtility.prepare_response("", json1)
assert response == '{"a": {"b": {"3": 2, "43": 30, "c": [], "d": ["red", "buggy", "bumpers"]}}}'
def test_prepare_response_string_empty_request_output(self):
json1 = json.dumps("{}")
try:
ActionUtility.prepare_response("The value of ${a.b.3} in ${a.b.d.0} is ${a.b.e}", json1)
assert False
except HttpActionFailure:
assert True
def test_prepare_response_invalid_response_json(self):
json_as_string = "Not a json string"
try:
ActionUtility.prepare_response("The value of ${a.b.3} in ${a.b.d.0} is ${a.b.c}", json_as_string)
assert False
except HttpActionFailure as e:
assert str(e) == 'Could not find value for keys in response'
def test_prepare_response_as_json_and_expected_as_plain_string(self):
json_as_string = "Not a json string"
response = ActionUtility.prepare_response("The value of 2 in red is []", json_as_string)
assert response == 'The value of 2 in red is []'
def test_prepare_response_as_string_and_expected_as_none(self):
response = ActionUtility.prepare_response("The value of 2 in red is []", None)
assert response == 'The value of 2 in red is []'
@pytest.mark.asyncio
async def test_run_invalid_http_action(self, mock_get_http_action_exception):
slots = {"bot": "5f50fd0a56b698ca10d35d2e", "http_action_config_http_action": "test_run_invalid_http_action",
"param2": "param2value"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
latest_message = {'text': 'get intents', 'intent_ranking': [{'name': 'http_action'}]}
HttpActionConfig(
auth_token="bearer kjflksjflksajfljsdflinlsufisnflisjbjsdalibvs",
action_name="test_run_invalid_http_action1",
response="json",
http_url="http://www.google.com",
request_method="GET",
params_list=None,
bot="5f50fd0a56b698ca10d35d2e",
user="user"
).save()
dispatcher: CollectingDispatcher = CollectingDispatcher()
tracker = Tracker(sender_id="sender1", slots=slots, events=events, paused=False, latest_message=latest_message,
followup_action=None, active_loop=None, latest_action_name=None)
domain: Dict[Text, Any] = None
await HttpAction().run(dispatcher, tracker, domain)
str(dispatcher.messages[0]['text']).__contains__(
"I have failed to process your request: No HTTP action found for bot")
log = HttpActionLog.objects(sender="sender1",
bot="5f50fd0a56b698ca10d35d2e",
status="FAILURE").get()
assert log['exception'].__contains__('No HTTP action found for bot')
@pytest.mark.asyncio
async def test_run_no_bot(self):
slots = {"bot": None, "http_action_config_http_action": "new_http_action", "param2": "param2value"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
dispatcher: CollectingDispatcher = CollectingDispatcher()
latest_message = {'text': 'get intents', 'intent_ranking': [{'name': 'http_action'}]}
tracker = Tracker(sender_id="sender2", slots=slots, events=events, paused=False, latest_message=latest_message,
followup_action=None, active_loop=None, latest_action_name=None)
domain: Dict[Text, Any] = None
actual: List[Dict[Text, Any]] = await HttpAction().run(dispatcher, tracker, domain)
assert actual is not None
assert str(actual[0]['name']) == 'KAIRON_ACTION_RESPONSE'
assert str(actual[0]['value']) == 'I have failed to process your request'
log = HttpActionLog.objects(sender="sender2",
status="FAILURE").get()
assert log['exception'] == 'Bot id and HTTP action configuration name not found in slot'
@pytest.mark.asyncio
async def test_run_no_http_action(self):
slots = {"bot": "jhgfsjgfausyfgus", "http_action_config_http_action": None, "param2": "param2value"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
dispatcher: CollectingDispatcher = CollectingDispatcher()
latest_message = {'text': 'get intents', 'intent_ranking': [{'name': 'http_action'}]}
tracker = Tracker(sender_id="sender1", slots=slots, events=events, paused=False, latest_message=latest_message,
followup_action=None, active_loop=None, latest_action_name=None)
domain: Dict[Text, Any] = None
actual: List[Dict[Text, Any]] = await HttpAction().run(dispatcher, tracker, domain)
assert actual is not None
assert str(actual[0]['name']) == 'KAIRON_ACTION_RESPONSE'
assert str(actual[0]['value']) == 'I have failed to process your request'
@pytest.mark.asyncio
async def test_run(self, monkeypatch):
action = HttpActionConfig(
auth_token="bearer kjflksjflksajfljsdflinlsufisnflisjbjsdalibvs",
action_name="http_action",
response="This should be response",
http_url="http://www.google.com",
request_method="GET",
params_list=None,
bot="5f50fd0a56b698ca10d35d2e",
user="user"
)
def _get_action(*arge, **kwargs):
return action.to_mongo().to_dict()
monkeypatch.setattr(ActionUtility, "get_http_action_config", _get_action)
slots = {"bot": "5f50fd0a56b698ca10d35d2e", "http_action_config_test_run": "http_action",
"param2": "param2value"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
dispatcher: CollectingDispatcher = CollectingDispatcher()
latest_message = {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]}
tracker = Tracker(sender_id="sender_test_run", slots=slots, events=events, paused=False,
latest_message=latest_message,
followup_action=None, active_loop=None, latest_action_name=None)
domain: Dict[Text, Any] = None
action.save().to_mongo().to_dict()
actual: List[Dict[Text, Any]] = await HttpAction().run(dispatcher, tracker, domain)
assert actual is not None
assert str(actual[0]['name']) == 'KAIRON_ACTION_RESPONSE'
assert str(actual[0]['value']) == 'This should be response'
log = HttpActionLog.objects(sender="sender_test_run",
status="SUCCESS").get()
assert not log['exception']
assert log['timestamp']
assert log['intent']
assert log['action']
assert log['bot_response']
assert log['api_response']
@pytest.mark.asyncio
async def test_run_with_post(self, monkeypatch):
action = HttpActionConfig(
auth_token="",
action_name="test_run_with_post",
response="Data added successfully, id:${RESPONSE}",
http_url="http://localhost:8080/mock",
request_method="POST",
params_list=None,
bot="5f50fd0a56b698ca10d35d2e",
user="user"
)
def _get_action(*arge, **kwargs):
return action.to_mongo().to_dict()
monkeypatch.setattr(ActionUtility, "get_http_action_config", _get_action)
http_url = 'http://localhost:8080/mock'
resp_msg = "5000"
responses.start()
responses.add(
method=responses.POST,
url=http_url,
body=resp_msg,
status=200,
)
slots = {"bot": "5f50fd0a56b698ca10d35d2e", "http_action_config_test_run": "test_run_with_post"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
dispatcher: CollectingDispatcher = CollectingDispatcher()
latest_message = {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]}
tracker = Tracker(sender_id="sender1", slots=slots, events=events, paused=False, latest_message=latest_message,
followup_action=None, active_loop=None, latest_action_name=None)
domain: Dict[Text, Any] = None
action.save().to_mongo().to_dict()
actual: List[Dict[Text, Any]] = await HttpAction().run(dispatcher, tracker, domain)
assert actual is not None
assert actual[0]['name'] == 'KAIRON_ACTION_RESPONSE'
assert actual[0]['value'] == 'Data added successfully, id:5000'
@pytest.mark.asyncio
async def test_run_with_post_and_parameters(self, monkeypatch):
request_params = [HttpActionRequestBody(key='key1', value="value1"),
HttpActionRequestBody(key='key2', value="value2")]
action = HttpActionConfig(
auth_token="",
action_name="test_run_with_post",
response="Data added successfully, id:${RESPONSE}",
http_url="http://localhost:8080/mock",
request_method="POST",
params_list=request_params,
bot="5f50fd0a56b698ca10d35d2e",
user="user"
)
def _get_action(*arge, **kwargs):
return action.to_mongo().to_dict()
monkeypatch.setattr(ActionUtility, "get_http_action_config", _get_action)
http_url = 'http://localhost:8080/mock'
resp_msg = "5000"
responses.start()
responses.add(
method=responses.POST,
url=http_url,
body=resp_msg,
status=200,
)
slots = {"bot": "5f50fd0a56b698ca10d35d2e", "http_action_config_test_run": "test_run_with_post"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
dispatcher: CollectingDispatcher = CollectingDispatcher()
latest_message = {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]}
tracker = Tracker(sender_id="sender_test_run_with_post", slots=slots, events=events, paused=False,
latest_message=latest_message,
followup_action=None, active_loop=None, latest_action_name=None)
domain: Dict[Text, Any] = None
action.save().to_mongo().to_dict()
actual: List[Dict[Text, Any]] = await HttpAction().run(dispatcher, tracker, domain)
responses.stop()
assert actual is not None
assert str(actual[0]['name']) == 'KAIRON_ACTION_RESPONSE'
assert str(actual[0]['value']) == 'Data added successfully, id:5000'
log = HttpActionLog.objects(sender="sender_test_run_with_post",
action="test_run_with_post",
status="SUCCESS").get()
assert not log['exception']
assert log['timestamp']
assert log['intent'] == "test_run"
assert log['action'] == "test_run_with_post"
assert log['request_params'] == {"key1": "value1", "key2": "value2"}
assert log['api_response'] == '5000'
assert log['bot_response'] == 'Data added successfully, id:5000'
@pytest.mark.asyncio
async def test_run_with_get(self, monkeypatch):
action = HttpActionConfig(
auth_token="",
action_name="test_run_with_get",
response="The value of ${a.b.3} in ${a.b.d.0} is ${a.b.d}",
http_url="http://localhost:8081/mock",
request_method="GET",
params_list=None,
bot="5f50fd0a56b698ca10d35d2e",
user="user"
)
def _get_action(*arge, **kwargs):
return action.to_mongo().to_dict()
monkeypatch.setattr(ActionUtility, "get_http_action_config", _get_action)
http_url = 'http://localhost:8081/mock'
resp_msg = json.dumps({
"a": {
"b": {
"3": 2,
"43": 30,
"c": [],
"d": ['red', 'buggy', 'bumpers'],
}
}
})
responses.start()
responses.add(
method=responses.GET,
url=http_url,
body=resp_msg,
status=200,
)
slots = {"bot": "5f50fd0a56b698ca10d35d2e", "http_action_config_test_run": "test_run_with_post"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
dispatcher: CollectingDispatcher = CollectingDispatcher()
latest_message = {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]}
tracker = Tracker(sender_id="sender1", slots=slots, events=events, paused=False, latest_message=latest_message,
followup_action=None, active_loop=None, latest_action_name=None)
domain: Dict[Text, Any] = None
action.save().to_mongo().to_dict()
actual: List[Dict[Text, Any]] = await HttpAction().run(dispatcher, tracker, domain)
responses.stop()
assert actual is not None
assert str(actual[0]['name']) == 'KAIRON_ACTION_RESPONSE'
assert str(actual[0]['value']) == 'The value of 2 in red is [\'red\', \'buggy\', \'bumpers\']'
@pytest.mark.asyncio
async def test_run_no_connection(self, monkeypatch):
action = HttpActionConfig(
auth_token="",
action_name="test_run_with_post",
response="This should be response",
http_url="http://localhost:8085/mock",
request_method="GET",
params_list=None,
bot="5f50fd0a56b698ca10d35d2e",
user="user"
)
def _get_action(*arge, **kwargs):
return action.to_mongo().to_dict()
monkeypatch.setattr(ActionUtility, "get_http_action_config", _get_action)
slots = {"bot": "5f50fd0a56b698ca10d35d2e", "http_action_config_test_run": "test_run_with_post"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
dispatcher: CollectingDispatcher = CollectingDispatcher()
latest_message = {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]}
tracker = Tracker(sender_id="sender1", slots=slots, events=events, paused=False, latest_message=latest_message,
followup_action=None, active_loop=None, latest_action_name=None)
domain: Dict[Text, Any] = None
action.save()
actual: List[Dict[Text, Any]] = await HttpAction().run(dispatcher, tracker, domain)
assert actual is not None
assert str(actual[0]['name']) == 'KAIRON_ACTION_RESPONSE'
assert str(actual[0]['value']).__contains__('I have failed to process your request')
@pytest.mark.asyncio
async def test_run_with_get_placeholder_vs_string_response(self, monkeypatch):
action = HttpActionConfig(
auth_token="",
action_name="test_run_with_get_string_http_response_placeholder_required",
response="The value of ${a.b.3} in ${a.b.d.0} is ${a.b.d}",
http_url="http://localhost:8080/mock",
request_method="GET",
params_list=None,
bot="5f50fd0a56b698ca10d35d2e",
user="user"
)
def _get_action(*arge, **kwargs):
return action.to_mongo().to_dict()
monkeypatch.setattr(ActionUtility, "get_http_action_config", _get_action)
http_url = 'http://localhost:8082/mock'
resp_msg = "This is string http response"
responses.start()
responses.add(
method=responses.GET,
url=http_url,
body=resp_msg,
status=200,
)
slots = {"bot": "5f50fd0a56b698ca10d35d2e",
"http_action_config_test_run": "test_run_with_get_string_http_response_placeholder_required"}
events = [{"event1": "hello"}, {"event2": "how are you"}]
dispatcher: CollectingDispatcher = CollectingDispatcher()
latest_message = {'text': 'get intents', 'intent_ranking': [{'name': 'test_run'}]}
tracker = Tracker(sender_id="sender1", slots=slots, events=events, paused=False, latest_message=latest_message,
followup_action=None, active_loop=None, latest_action_name=None)
domain: Dict[Text, Any] = None
action.save().to_mongo().to_dict()
actual: List[Dict[Text, Any]] = await HttpAction().run(dispatcher, tracker, domain)
responses.stop()
assert actual is not None
assert str(actual[0]['name']) == 'KAIRON_ACTION_RESPONSE'
assert str(
actual[0]['value']) == 'I have failed to process your request'
def test_attach_response_no_placeholder(self):
output = ActionUtility.attach_response("This has no placeholder", {"a": "b"})
assert output == "This has no placeholder"
def test_attach_response(self):
output = ActionUtility.attach_response("I want $${RESPONSE}", {"dollars": "51"})
assert output == 'I want ${\'dollars\': \'51\'}'
def test_attach_response_int(self):
output = ActionUtility.attach_response("I want $${RESPONSE}", 51)
assert output == 'I want $51'
def test_retrieve_value_from_response(self):
keys = ["a.b.3", 'a.b']
resp_msg = {
"a": {
"b": {
"3": 2,
"43": 30,
"c": [],
"d": ['red', 'buggy', 'bumpers'],
}
}
}
key_values = ActionUtility.retrieve_value_from_response(keys, resp_msg)
assert key_values is not None
assert key_values['${a.b.3}'] == 2
assert key_values['${a.b}'] is not None
assert key_values['${a.b}']['3'] == 2
assert key_values['${a.b}']['d'][0] == 'red'
def test_retrieve_value_from_response_invalid_key(self):
keys = ["d.e.f", 'g.h']
resp_msg = {
"a": {
"b": {
"3": 2,
"43": 30,
"c": [],
"d": ['red', 'buggy', 'bumpers'],
}
}
}
try:
ActionUtility.retrieve_value_from_response(keys, resp_msg)
assert False
except HttpActionFailure as e:
assert str(e) == 'Unable to retrieve value for key from HTTP response: \'d\''
| 45.098544 | 196 | 0.596653 |
f704f458109bda2f6012d3166ddc6ff6686bf0f4 | 12,306 | py | Python | ABLIRC/bin/Basic/Distance2XXX/reads_or_peaks_distribution_relative2xxx.py | ablifedev/ABLIRC | 875278b748a8e22ada2c76c3c76dbf970be4a6a4 | [
"MIT"
] | 1 | 2020-02-25T13:08:20.000Z | 2020-02-25T13:08:20.000Z | ABLIRC/bin/Basic/Distance2XXX/reads_or_peaks_distribution_relative2xxx.py | ablifedev/ABLIRC | 875278b748a8e22ada2c76c3c76dbf970be4a6a4 | [
"MIT"
] | 1 | 2020-02-25T13:16:03.000Z | 2020-02-25T13:16:03.000Z | ABLIRC/bin/Basic/Distance2XXX/reads_or_peaks_distribution_relative2xxx.py | ablifedev/ABLIRC | 875278b748a8e22ada2c76c3c76dbf970be4a6a4 | [
"MIT"
] | null | null | null | #!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
####################################################################################
### Copyright (C) 2015-2019 by ABLIFE
####################################################################################
####################################################################################
####################################################################################
# Date Version Author ChangeLog
#
#
#
#####################################################################################
"""
程序功能说明:
1.统计reads or peaks 相对于TTS,TSS,STARTCODON,STOPCODON的分布
程序设计思路:
利用gffutils和HTSeq包进行统计
"""
import re, os, sys, logging, time, datetime
from optparse import OptionParser, OptionGroup
reload(sys)
sys.setdefaultencoding('utf-8')
import subprocess
import threading
import gffutils
import HTSeq
import numpy
import multiprocessing
from matplotlib import pyplot
sys.path.insert(1, os.path.split(os.path.realpath(__file__))[0] + "/../../")
from ablib.utils.tools import *
from ablib.utils.distribution import *
if sys.version_info < (2, 7):
print("Python Version error: please use phthon2.7")
sys.exit(-1)
_version = 'v0.1'
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
def configOpt():
"""Init for option
"""
usage = 'Usage: %prog [option] [-h]'
p = OptionParser(usage)
##basic options
p.add_option(
'-g', '--gff', dest='gff', action='store',
type='string', help='gff file,do not have to provide it if db is exited')
p.add_option(
'-d', '--db', dest='db', default='gffdb', action='store',
type='string', help='the gff database file to create or use')
p.add_option(
'-b', '--bamorbed', dest='bamorbed', action='store',
type='string', help='bam or bed file, Important: the bamfile\'s suffix must be ".bam"')
p.add_option(
'-w', '--halfwinwidth', dest='halfwinwidth', default=1000, action='store',
type='int', help='halfwinwidth,default is 1000')
p.add_option(
'-p', '--postype', dest='postype', action='store',
type='string', help='gene position type:tss,tts,startcodon,stopcodon,intronstart,intronend')
p.add_option(
'-o', '--outfile', dest='outfile', default="distance2xxx_reads_density.txt", action='store',
type='string', help='gene expression file')
p.add_option(
'-n', '--samplename', dest='samplename', default='', action='store',
type='string', help='sample name,default is ""')
group = OptionGroup(p, "Preset options")
##preset options
group.add_option(
'-O', '--outDir', dest='outDir', default='./', action='store',
type='string', help='output directory', metavar="DIR")
group.add_option(
'-L', '--logDir', dest='logDir', default='', action='store',
type='string', help='log dir ,default is same as outDir')
group.add_option(
'-P', '--logPrefix', dest='logPrefix', default='', action='store',
type='string', help='log file prefix')
group.add_option(
'-E', '--email', dest='email', default='none', action='store',
type='string', help='email address, if you want get a email when this job is finished,default is no email',
metavar="EMAIL")
group.add_option(
'-Q', '--quiet', dest='quiet', default=True, action='store_true',
help='do not print messages to stdout')
group.add_option(
'-K', '--keepTemp', dest='keepTemp', default=False, action='store_true',
help='keep temp dir')
group.add_option(
'-T', '--test', dest='isTest', default=False, action='store_true',
help='run this program for test')
p.add_option_group(group)
if len(sys.argv) == 1:
p.print_help()
sys.exit(1)
opt, args = p.parse_args()
return (p, opt, args)
def listToString(x):
"""获得完整的命令
"""
rVal = ''
for a in x:
rVal += a + ' '
return rVal
opt_parser, opt, args = configOpt()
if not opt.postype:
opt_parser.error('Option -p must be assigned.\n')
if opt.logDir == "":
opt.logDir = opt.outDir + '/log/'
sample = ""
if opt.samplename != "":
sample = opt.samplename + '_'
if opt.outfile == 'distance2xxx_reads_density.txt':
opt.outfile = sample + 'distance2' + opt.postype + '_reads_density.txt'
intype = "bam"
match = re.search(r'\.bam$', opt.bamorbed)
if not match:
intype = "bed"
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
scriptPath = os.path.abspath(os.path.dirname(__file__)) # absolute script path
binPath = "/".join(scriptPath.split("/")[0:-2]) # absolute bin path
outPath = os.path.abspath(opt.outDir) # absolute output path
#os.mkdir(outPath) if not os.path.isdir(outPath) else None
os.system('mkdir -p ' + outPath)
logPath = os.path.abspath(opt.logDir)
#os.mkdir(logPath) if not os.path.isdir(logPath) else None
os.system('mkdir -p ' + logPath)
tempPath = outPath + '/temp/' # absolute bin path
# os.mkdir(tempPath) if not os.path.isdir(tempPath) else None
resultPath = outPath + '/result/'
# os.mkdir(resultPath) if not os.path.isdir(resultPath) else None
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
def initLogging(logFilename):
"""Init for logging
"""
logging.basicConfig(
level=logging.DEBUG,
format='[%(asctime)s : %(levelname)s] %(message)s',
datefmt='%y-%m-%d %H:%M',
filename=logFilename,
filemode='w')
if not opt.quiet:
# define a Handler which writes INFO messages or higher to the sys.stderr
console = logging.StreamHandler()
console.setLevel(logging.INFO)
# set a format which is simpler for console use
formatter = logging.Formatter('[%(asctime)s : %(levelname)s] %(message)s', datefmt='%y-%m-%d %H:%M')
# tell the handler to use this format
console.setFormatter(formatter)
logging.getLogger('').addHandler(console)
dt = datetime.datetime.now()
logFile = logPath + '/' + opt.logPrefix + 'log.' + str(dt.strftime('%Y%m%d.%H%M%S.%f')) + '.txt'
initLogging(logFile)
logging.debug(sys.modules[__name__].__doc__)
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
logging.debug('Program version: %s' % _version)
logging.debug('Start the program with [%s]\n', listToString(sys.argv))
startTime = datetime.datetime.now()
logging.debug("计时器:Program start at %s" % startTime)
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
### S
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
### E
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
def main():
print("Main procedure start...")
if opt.gff:
db = gffutils.create_db(opt.gff, opt.db, merge_strategy="create_unique", verbose=False, force=True)
db = gffutils.FeatureDB(opt.db)
Watcher()
pool = multiprocessing.Pool(processes=15)
server = multiprocessing.Manager()
dis = server.dict()
for chr in db.seqids():
# if chr != "chr1":
# continue
if intype == "bam":
chr_dict = readBamHeader(opt.bamorbed)
if not chr in chr_dict:
continue
# print(chr)
dis[chr] = [0 for x in range(2 * opt.halfwinwidth)]
pool.apply_async(distributionToOnePointByChr,
args=(chr, opt.bamorbed, opt.db, opt.outfile, opt.postype, opt.halfwinwidth, dis))
pool.close()
pool.join()
d = dict(dis).copy()
server.shutdown()
profile = numpy.zeros(2 * opt.halfwinwidth, dtype='i')
for chr in sorted(d.keys()):
wincvg = numpy.fromiter(d[chr], dtype='i', count=2 * opt.halfwinwidth)
profile += wincvg
# pyplot.plot( numpy.arange( -opt.halfwinwidth, opt.halfwinwidth ), profile )
# pyplot.show()
os.chdir(opt.outDir)
fout = open(opt.outfile, 'w')
fout.writelines(
"+distance\tdensity\n")
n = 0
for i in range(-opt.halfwinwidth, opt.halfwinwidth):
fout.writelines(str(i) + '\t' + str(profile[n]) + '\n')
n += 1
fout.close()
#cmd = "cd " + outPath + "&& R --slave < /users/ablife/ablife-R/Line_single_ggplot2.r --args " + opt.outfile + " " + sample + 'distance2' + opt.postype + '_reads_density ./ \n'
cmd = "cd " + outPath + "&& Rscript " + binPath + "/plot/Line_single_ggplot2.r -f " + opt.outfile + " -t " + sample + 'distance2' + opt.postype + '_reads_density -n ' + sample + 'distance2' + opt.postype + '_reads_density -o ./'
os.system(cmd)
if __name__ == '__main__':
main()
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
if not opt.keepTemp:
os.system('rm -rf ' + tempPath)
logging.debug("Temp folder is deleted..")
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
logging.debug("Program ended")
currentTime = datetime.datetime.now()
runningTime = (currentTime - startTime).seconds # in seconds
logging.debug("计时器:Program start at %s" % startTime)
logging.debug("计时器:Program end at %s" % currentTime)
logging.debug("计时器:Program ran %.2d:%.2d:%.2d" % (runningTime / 3600, (runningTime % 3600) / 60, runningTime % 60))
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
if opt.email != "none":
run_cmd = listToString(sys.argv)
sendEmail(opt.email, str(startTime), str(currentTime), run_cmd, outPath)
logging.info("发送邮件通知到 %s" % opt.email)
# -----------------------------------------------------------------------------------
# -----------------------------------------------------------------------------------
| 33.715068 | 232 | 0.42719 |
f7050c7044587c7a03a7e83f1453f954e296f2ad | 49,770 | py | Python | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2017_03_30/aio/operations/_virtual_machine_scale_set_vms_operations.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | 1 | 2021-09-07T18:39:05.000Z | 2021-09-07T18:39:05.000Z | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2017_03_30/aio/operations/_virtual_machine_scale_set_vms_operations.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2017_03_30/aio/operations/_virtual_machine_scale_set_vms_operations.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | 1 | 2022-03-04T06:21:56.000Z | 2022-03-04T06:21:56.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._virtual_machine_scale_set_vms_operations import build_deallocate_request_initial, build_delete_request_initial, build_get_instance_view_request, build_get_request, build_list_request, build_power_off_request_initial, build_reimage_all_request_initial, build_reimage_request_initial, build_restart_request_initial, build_start_request_initial
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VirtualMachineScaleSetVMsOperations:
"""VirtualMachineScaleSetVMsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2017_03_30.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _reimage_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_reimage_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._reimage_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reimage_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimage'} # type: ignore
@distributed_trace_async
async def begin_reimage(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Reimages (upgrade the operating system) a specific virtual machine in a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._reimage_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reimage.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimage'} # type: ignore
async def _reimage_all_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_reimage_all_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._reimage_all_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reimage_all_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimageall'} # type: ignore
@distributed_trace_async
async def begin_reimage_all(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Allows you to re-image all the disks ( including data disks ) in the a VM scale set instance.
This operation is only supported for managed disks.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._reimage_all_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reimage_all.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimageall'} # type: ignore
async def _deallocate_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_deallocate_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._deallocate_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_deallocate_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/deallocate'} # type: ignore
@distributed_trace_async
async def begin_deallocate(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Deallocates a specific virtual machine in a VM scale set. Shuts down the virtual machine and
releases the compute resources it uses. You are not billed for the compute resources of this
virtual machine once it is deallocated.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._deallocate_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_deallocate.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/deallocate'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}'} # type: ignore
@distributed_trace_async
async def begin_delete(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Deletes a virtual machine from a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}'} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> "_models.VirtualMachineScaleSetVM":
"""Gets a virtual machine from a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualMachineScaleSetVM, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2017_03_30.models.VirtualMachineScaleSetVM
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineScaleSetVM"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualMachineScaleSetVM', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}'} # type: ignore
@distributed_trace_async
async def get_instance_view(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> "_models.VirtualMachineScaleSetVMInstanceView":
"""Gets the status of a virtual machine from a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualMachineScaleSetVMInstanceView, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2017_03_30.models.VirtualMachineScaleSetVMInstanceView
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineScaleSetVMInstanceView"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_instance_view_request(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self.get_instance_view.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualMachineScaleSetVMInstanceView', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_instance_view.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/instanceView'} # type: ignore
@distributed_trace
def list(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
filter: Optional[str] = None,
select: Optional[str] = None,
expand: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.VirtualMachineScaleSetVMListResult"]:
"""Gets a list of all virtual machines in a VM scale sets.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the VM scale set.
:type virtual_machine_scale_set_name: str
:param filter: The filter to apply to the operation. Allowed values are
'startswith(instanceView/statuses/code, 'PowerState') eq true', 'properties/latestModelApplied
eq true', 'properties/latestModelApplied eq false'.
:type filter: str
:param select: The list parameters. Allowed values are 'instanceView', 'instanceView/statuses'.
:type select: str
:param expand: The expand expression to apply to the operation. Allowed values are
'instanceView'.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualMachineScaleSetVMListResult or the result
of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.compute.v2017_03_30.models.VirtualMachineScaleSetVMListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineScaleSetVMListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
resource_group_name=resource_group_name,
virtual_machine_scale_set_name=virtual_machine_scale_set_name,
subscription_id=self._config.subscription_id,
filter=filter,
select=select,
expand=expand,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
resource_group_name=resource_group_name,
virtual_machine_scale_set_name=virtual_machine_scale_set_name,
subscription_id=self._config.subscription_id,
filter=filter,
select=select,
expand=expand,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("VirtualMachineScaleSetVMListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines'} # type: ignore
async def _power_off_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_power_off_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._power_off_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_power_off_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/poweroff'} # type: ignore
@distributed_trace_async
async def begin_power_off(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Power off (stop) a virtual machine in a VM scale set. Note that resources are still attached
and you are getting charged for the resources. Instead, use deallocate to release resources and
avoid charges.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._power_off_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_power_off.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/poweroff'} # type: ignore
async def _restart_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_restart_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._restart_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_restart_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/restart'} # type: ignore
@distributed_trace_async
async def begin_restart(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Restarts a virtual machine in a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._restart_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_restart.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/restart'} # type: ignore
async def _start_initial(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> Optional["_models.OperationStatusResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OperationStatusResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_start_request_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
subscription_id=self._config.subscription_id,
template_url=self._start_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/start'} # type: ignore
@distributed_trace_async
async def begin_start(
self,
resource_group_name: str,
vm_scale_set_name: str,
instance_id: str,
**kwargs: Any
) -> AsyncLROPoller["_models.OperationStatusResponse"]:
"""Starts a virtual machine in a VM scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param vm_scale_set_name: The name of the VM scale set.
:type vm_scale_set_name: str
:param instance_id: The instance ID of the virtual machine.
:type instance_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either OperationStatusResponse or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2017_03_30.models.OperationStatusResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationStatusResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._start_initial(
resource_group_name=resource_group_name,
vm_scale_set_name=vm_scale_set_name,
instance_id=instance_id,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('OperationStatusResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/start'} # type: ignore
| 47.445186 | 361 | 0.678802 |
f705116959898fd81eae2168f0c9e139ab6337b3 | 7,603 | py | Python | cogs/calculator.py | MerciDvor/modbot | f1a11eaa8e88d297ee19ca37aacc41489c0d0350 | [
"MIT"
] | 11 | 2019-01-10T22:09:31.000Z | 2021-12-14T05:26:10.000Z | cogs/calculator.py | MerciDvor/modbot | f1a11eaa8e88d297ee19ca37aacc41489c0d0350 | [
"MIT"
] | 5 | 2019-01-10T07:20:19.000Z | 2021-04-22T00:57:34.000Z | cogs/calculator.py | MerciDvor/modbot | f1a11eaa8e88d297ee19ca37aacc41489c0d0350 | [
"MIT"
] | 34 | 2019-01-10T05:49:29.000Z | 2022-02-11T14:04:54.000Z | from __future__ import division
import discord, math, operator
from discord.ext import commands
from pyparsing import (Literal,CaselessLiteral,Word,Combine,Group,Optional,
ZeroOrMore,Forward,nums,alphas,oneOf)
__author__='Paul McGuire'
__version__ = '$Revision: 0.0 $'
__date__ = '$Date: 2009-03-20 $'
__source__ = """http://pyparsing.wikispaces.com/file/view/fourFn.py
http://pyparsing.wikispaces.com/message/view/home/15549426
"""
__note__ = """
This is a re-wrap of Paul McGuire's fourFn.py as a class, so it can
be used easily in other places of the code. Most of the work wad done
by corpnewt, all I did was clean it and create the results in embeds.
Also, the messages are deleted after, except for the correct answer.
"""
class NumericStringParserForPython3(object):
"""
Most of this code comes from the fourFn.py pyparsing example
"""
def pushFirst(self, strg, loc, toks):
self.exprStack.append(toks[0])
def pushUMinus(self, strg, loc, toks):
if toks and toks[0]=='-':
self.exprStack.append('unary -')
def __init__(self):
"""
Please use any of the following symbols:
expop :: '^'
multop :: '*' | '/'
addop :: '+' | '-'
integer :: ['+' | '-'] '0'..'9'+
"""
point = Literal(".")
e = CaselessLiteral("E")
fnumber = Combine(Word("+-"+nums, nums) +
Optional(point + Optional(Word(nums))) +
Optional(e + Word("+-"+nums, nums)))
ident = Word(alphas, alphas+nums+"_$")
plus = Literal("+")
minus = Literal("-")
mult = Literal("*")
div = Literal("/")
lpar = Literal("(").suppress()
rpar = Literal(")").suppress()
addop = plus | minus
multop = mult | div
expop = Literal("^")
pi = CaselessLiteral("PI")
expr = Forward()
atom = ((Optional(oneOf("- +")) +
(pi|e|fnumber|ident+lpar+expr+rpar).setParseAction(self.pushFirst))
| Optional(oneOf("- +")) + Group(lpar+expr+rpar)
).setParseAction(self.pushUMinus)
# by defining exponentiation as "atom [ ^ factor ]..." instead of
# "atom [ ^ atom ]...", we get right-to-left exponents, instead of left-to-right
# that is, 2^3^2 = 2^(3^2), not (2^3)^2.
factor = Forward()
factor << atom + ZeroOrMore((expop + factor).setParseAction(self.pushFirst))
term = factor + ZeroOrMore((multop + factor).setParseAction(self.pushFirst))
expr << term + ZeroOrMore((addop + term).setParseAction(self.pushFirst))
# addop_term = (addop + term).setParseAction(self.pushFirst)
# general_term = term + ZeroOrMore(addop_term) | OneOrMore(addop_term)
# expr << general_term
self.bnf = expr
# this will map operator symbols to their corresponding arithmetic operations
epsilon = 1e-12
self.opn = {
"+" : operator.add,
"-" : operator.sub,
"*" : operator.mul,
"/" : operator.truediv,
"^" : operator.pow }
self.fn = {
"sin" : math.sin,
"cos" : math.cos,
"tan" : math.tan,
"abs" : abs,
"trunc" : lambda a: int(a),
"round" : round,
"sgn" : lambda a: abs(a)>epsilon and cmp(a,0) or 0}
def evaluateStack(self, s):
op = s.pop()
if op == 'unary -':
return -self.evaluateStack(s)
if op in "+-*/^":
op2 = self.evaluateStack(s)
op1 = self.evaluateStack(s)
return self.opn[op](op1, op2)
elif op == "PI":
return math.pi # 3.1415926535
elif op == "E":
return math.e # 2.718281828
elif op in self.fn:
return self.fn[op](self.evaluateStack(s))
elif op[0].isalpha():
return 0
else:
return float(op)
def eval(self,num_string,parseAll=True):
self.exprStack=[]
results=self.bnf.parseString(num_string,parseAll)
val=self.evaluateStack(self.exprStack[:])
return val
class Calculator:
# Init with the bot reference, and a reference to the settings var
def __init__(self, bot):
self.bot = bot
self.nsp=NumericStringParserForPython3()
self.user_color = discord.Colour(0xed791d) ## orange
self.mod_color = discord.Colour(0x7289da) ## blurple
@commands.command(description='Scientific calculator', aliases=['calculate', 'maths'])
async def calc(self, ctx, *, formula = None):
""" ✔ Do some math
thanks to Paul McGuire's fourFn.py. """
person = ctx.message.author
formula = formula.replace('x', '*').replace(' minus ', '-').replace(' plus ', '+').replace(' into ', '/') \
.replace(' sub ', '-').replace(' pi ', 'PI').replace(' divide ', '/').replace(' multiply ', '*') \
.replace(' add ', '+').replace(' div ', '/').replace(' multi ', '*').replace(' mul ', '*') \
.replace('π', 'PI').replace('÷', '/')
if formula == None:
# How can it calculate an empty message? Reee!
msg = f'\u200BUsage: `{ctx.prefix}{ctx.invoked_with} [any maths formula]`'
e = discord.Embed(color=self.user_color)
e.description = msg
try:
await ctx.send(embed=e, delete_after=23)
except discord.HTTPException:
await ctx.send(msg, delete_after=23)
return
try:
answer=self.nsp.eval(formula)
except:
# If there's a problem in the input, show examples
msg = f'\N{THINKING FACE} wrong `{formula}` input.\n\nTry any of these:'
e = discord.Embed(color=self.user_color)
e.description = f'\u200B{msg}'
e.add_field(name='multiply', value='`2 * 3 x 5 multiply 7`')
e.add_field(name='divide', value='`91 / 5 divide 3 into 2 ÷ 4`')
e.add_field(name='add', value='`1 + 4 plus 8 add 23`')
e.add_field(name='substract', value='`91 - 35 minus 3 sub 12`')
e.add_field(name='exponential', value="`7 ^ 5`")
e.add_field(name='Supported formulas',
value='```py\nround((cos(45) + (3+7^2)*2 + tan(369.18)) / π - 3)```')
try:
await ctx.send(embed=e, delete_after=23)
except discord.HTTPException:
error = f'\N{THINKING FACE} wrong `{formula}` input.\n\n ' \
f'Try any of these:```py\nround((cos(45) + (3+7^2)*2 + tan(369.18)) / π - 3)```'
await ctx.send(error, delete_after=23)
return
# Correct input prints correct answer
distance = self.bot or self.bot.message
duration = f'Calculated in {distance.ws.latency * 1000:.2f} ms'
success = round(answer, 2)
e = discord.Embed(color=self.user_color)
e.add_field(name='Input:', value=f'```py\n{formula}```', inline=True)
e.add_field(name='Result:', value=f'```css\n{success}```', inline=True)
e.set_footer(text=duration)
try:
await ctx.send(embed=e)
except discord.Forbidden: # FORBIDDEN (status code: 403): Missing Permissions
await ctx.send(f'```rust\n>Input: {formula}\nResult: {success}```')
def setup(bot):
bot.add_cog(Calculator(bot))
| 41.320652 | 123 | 0.544916 |
f7053ab01509c24ced03b369dae6ac255d9ca094 | 4,288 | py | Python | dp_conceptual_search/api/search/routes.py | flaxandteal/dp-conceptual-search | 16c6383a61ba5b7069337c2626a0dc243bfe9d35 | [
"MIT"
] | null | null | null | dp_conceptual_search/api/search/routes.py | flaxandteal/dp-conceptual-search | 16c6383a61ba5b7069337c2626a0dc243bfe9d35 | [
"MIT"
] | null | null | null | dp_conceptual_search/api/search/routes.py | flaxandteal/dp-conceptual-search | 16c6383a61ba5b7069337c2626a0dc243bfe9d35 | [
"MIT"
] | null | null | null | """
This file contains all routes for the /search API
"""
from sanic import Blueprint
from sanic.response import HTTPResponse
from dp4py_sanic.api.response.json_response import json
from dp_conceptual_search.config import CONFIG
from dp_conceptual_search.api.request import ONSRequest
from dp_conceptual_search.ons.search.index import Index
from dp_conceptual_search.ons.search.client.search_engine import SearchEngine
from dp_conceptual_search.ons.search.response.search_result import SearchResult
from dp_conceptual_search.api.search.sanic_search_engine import SanicSearchEngine
from dp_conceptual_search.api.search.conceptual import routes as conceptual_routes
search_blueprint = Blueprint('search', url_prefix='/search')
@search_blueprint.route('/departments', methods=['GET'], strict_slashes=True)
async def ons_departments_query(request: ONSRequest) -> HTTPResponse:
"""
Handles departments queries to the departments index
:param request:
:return:
"""
# Initialise the search engine
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.DEPARTMENTS)
# Perform the request
search_result: SearchResult = await sanic_search_engine.departments_query(request)
return json(request, search_result.to_dict(), 200)
@search_blueprint.route('/', methods=['GET', 'POST'], strict_slashes=False)
async def search(request: ONSRequest) -> HTTPResponse:
"""
API which combines the content, counts and featured result queries into one
:param request:
:return:
"""
if CONFIG.API.redirect_conceptual_search:
return await conceptual_routes.search(request)
# Initialise the search engine
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.ONS)
result = await sanic_search_engine.search(request)
return json(request, result, 200)
@search_blueprint.route('/content', methods=['GET', 'POST'], strict_slashes=True)
async def ons_content_query(request: ONSRequest) -> HTTPResponse:
"""
Handles content queries to the API.
:param request:
:return:
"""
if CONFIG.API.redirect_conceptual_search:
return await conceptual_routes.conceptual_content_query(request)
# Initialise the search engine
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.ONS)
# Perform the request
search_result: SearchResult = await sanic_search_engine.content_query(request)
return json(request, search_result.to_dict(), 200)
@search_blueprint.route('/counts', methods=['GET', 'POST'], strict_slashes=True)
async def ons_counts_query(request: ONSRequest) -> HTTPResponse:
"""
Handles type counts queries to the API.
:param request:
:return:
"""
if CONFIG.API.redirect_conceptual_search:
return await conceptual_routes.conceptual_counts_query(request)
# Initialise the search engine
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.ONS)
# Perform the request
search_result: SearchResult = await sanic_search_engine.type_counts_query(request)
return json(request, search_result.to_dict(), 200)
@search_blueprint.route('/featured', methods=['GET'], strict_slashes=True)
async def ons_featured_result_query(request: ONSRequest) -> HTTPResponse:
"""
Handles featured result queries (i.e product and home page census pages)
:param request:
:return:
"""
# Initialise the search engine
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.ONS)
# Perform the request
search_result: SearchResult = await sanic_search_engine.featured_result_query(request)
return json(request, search_result.to_dict(), 200)
@search_blueprint.route('/uri/', methods=['GET', 'POST'])
@search_blueprint.route('/uri/<path:path>', methods=['GET', 'POST'])
async def search_by_uri(request: ONSRequest, path: str):
"""
Search for a page by it's uri
:param request:
:param path:
:return:
"""
# Initialise the search engine
sanic_search_engine = SanicSearchEngine(request.app, SearchEngine, Index.ONS)
# Perform the request
search_result: SearchResult = await sanic_search_engine.search_by_uri(request, path)
return json(request, search_result.to_dict(), 200)
| 34.304 | 90 | 0.752332 |
f7053cb96e078cbb641803774c57e3d6c47395cc | 5,903 | py | Python | src/_repobee/disthelpers.py | tohanss/repobee | cf5eb1e83e62c20bbca00c8ad9f798a612e1664f | [
"MIT"
] | null | null | null | src/_repobee/disthelpers.py | tohanss/repobee | cf5eb1e83e62c20bbca00c8ad9f798a612e1664f | [
"MIT"
] | null | null | null | src/_repobee/disthelpers.py | tohanss/repobee | cf5eb1e83e62c20bbca00c8ad9f798a612e1664f | [
"MIT"
] | null | null | null | """Helper functions for the distribution."""
import importlib
import json
import pathlib
import subprocess
import sys
import types
import os
from typing import Optional, List
import requests
import repobee_plug as plug
import _repobee.ext
from _repobee import distinfo
from _repobee import plugin
class DependencyResolutionError(plug.PlugError):
"""Raise when dependency resolution fails during an install."""
def get_installed_plugins_path() -> pathlib.Path:
"""Return the path to the installed_plugins.json file."""
assert distinfo.INSTALL_DIR
return distinfo.INSTALL_DIR / "installed_plugins.json"
def get_installed_plugins(
installed_plugins_path: Optional[pathlib.Path] = None,
) -> dict:
"""Return the public content of the installed_plugins.json file."""
installed_plugins = _get_installed_plugins(installed_plugins_path)
if "_metainfo" in installed_plugins:
del installed_plugins["_metainfo"]
return installed_plugins
def _get_installed_plugins(
installed_plugins_path: Optional[pathlib.Path] = None,
):
"""Return the content of the installed_plugins.json file, with metainfo."""
return json.loads(
(installed_plugins_path or get_installed_plugins_path()).read_text(
"utf8"
)
)
def write_installed_plugins(
installed_plugins: dict,
installed_plugins_path: Optional[pathlib.Path] = None,
) -> None:
"""Write the installed_plugins.json file."""
path = installed_plugins_path or get_installed_plugins_path()
metainfo = _get_installed_plugins(path).get("_metainfo") or {}
metainfo.update(installed_plugins.get("_metainfo") or {})
installed_plugins_write = dict(installed_plugins)
installed_plugins_write["_metainfo"] = metainfo
path.write_text(
json.dumps(installed_plugins_write, indent=4), encoding="utf8"
)
def get_active_plugins(
installed_plugins_path: Optional[pathlib.Path] = None,
) -> List[str]:
"""Read active plugins from the installed_plugins.json file."""
installed_plugins = _get_installed_plugins(installed_plugins_path)
return (installed_plugins.get("_metainfo") or {}).get(
"active_plugins"
) or []
def write_active_plugins(
active_plugins: List[str],
installed_plugins_path: Optional[pathlib.Path] = None,
) -> None:
"""Write the active plugins."""
installed_plugins = _get_installed_plugins(installed_plugins_path)
installed_plugins.setdefault("_metainfo", {})[
"active_plugins"
] = active_plugins
write_installed_plugins(installed_plugins, installed_plugins_path)
def get_pip_path() -> pathlib.Path:
"""Return the path to the installed pip binary."""
assert distinfo.INSTALL_DIR
return distinfo.INSTALL_DIR / "env" / "bin" / "pip"
def get_plugins_json(url: str = "https://repobee.org/plugins.json") -> dict:
"""Fetch and parse the plugins.json file.
Args:
url: URL to the plugins.json file.
Returns:
A dictionary with the contents of the plugins.json file.
"""
resp = requests.get(url)
if resp.status_code != 200:
plug.log.error(resp.content.decode("utf8"))
raise plug.PlugError(f"could not fetch plugins.json from '{url}'")
return resp.json()
def get_builtin_plugins(ext_pkg: types.ModuleType = _repobee.ext) -> dict:
"""Returns a dictionary of builting plugins on the same form as the
plugins.json dict.
"""
def _get_plugin_description(name):
return (
importlib.import_module(f"{ext_pkg.__name__}.{name}").__dict__.get(
"PLUGIN_DESCRIPTION"
)
or "-"
)
return {
name: dict(
description=_get_plugin_description(name),
url=f"https://repobee.readthedocs.io/"
f"en/stable/builtins.html#{name}",
versions={"N/A": {}},
builtin=True,
)
for name in plugin.get_module_names(ext_pkg)
}
def pip(command: str, *args, **kwargs) -> subprocess.CompletedProcess:
"""Thin wrapper around the ``pip`` executable in the distribution's virtual
environment.
Args:
command: The command to execute (e.g. "install" or "list").
args: Positional arguments to ``pip``, passed in order. Flags should
also be passed here (e.g. `--pre`)
kwargs: Keyword arguments to ``pip``, passed as ``--key value`` to the
CLI. If the value is ``True``, the argument is passed as a flag,
i.e. as ``--key``.
Returns:
True iff the command exited with a zero exit status.
Raises:
DependencyResolutionError: If the 2020-resolver encounters fails to
resolve dependencies.
"""
cli_kwargs = [
f"--{key.replace('_', '-')}"
# True is interpreted as a flag
+ (f"={val}" if val is not True else "")
for key, val in kwargs.items()
]
env = dict(os.environ)
if command == "install":
# the resolver allows us to avoid installing plugins that are
# incompatible with the current version of RepoBee
cli_kwargs.append("--use-feature=2020-resolver")
# REPOBEE_INSTALL_DIR must be available when upgrading RepoBee,
# or the dist plugins aren't activated
env["REPOBEE_INSTALL_DIR"] = str(distinfo.INSTALL_DIR)
# due to the hack in setup.py to edit the distinfo, we must build
# RepoBee from source
cli_kwargs.append("--no-binary=repobee")
cmd = [str(get_pip_path()), command, *args, *cli_kwargs]
proc = subprocess.run(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env
)
if proc.returncode != 0:
stderr = proc.stderr.decode(sys.getdefaultencoding())
plug.log.error(stderr)
if "ResolutionImpossible" in stderr:
raise DependencyResolutionError()
return proc
| 32.256831 | 79 | 0.671015 |
f705402b1b08aa6730ba341cc50c68502d1b99d6 | 17,851 | py | Python | tests/test_scheduler.py | atlas555/pyspider | 8f71e0e8d67f03a728cd5ea48fa931f6415e1e10 | [
"Apache-2.0"
] | 5 | 2015-03-31T13:25:25.000Z | 2016-03-14T11:17:02.000Z | tests/test_scheduler.py | e-dorigatti/pyspider | 8f71e0e8d67f03a728cd5ea48fa931f6415e1e10 | [
"Apache-2.0"
] | null | null | null | tests/test_scheduler.py | e-dorigatti/pyspider | 8f71e0e8d67f03a728cd5ea48fa931f6415e1e10 | [
"Apache-2.0"
] | 1 | 2016-02-17T23:12:47.000Z | 2016-02-17T23:12:47.000Z | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<i@binux.me>
# http://binux.me
# Created on 2014-02-08 22:37:13
import os
import time
import shutil
import unittest2 as unittest
import logging
import logging.config
logging.config.fileConfig("pyspider/logging.conf")
from pyspider.scheduler.task_queue import TaskQueue
class TestTaskQueue(unittest.TestCase):
@classmethod
def setUpClass(self):
self.task_queue = TaskQueue()
self.task_queue.rate = 100000
self.task_queue.burst = 100000
self.task_queue.processing_timeout = 0.5
def test_10_put(self):
self.task_queue.put('a3', 0, time.time() + 0.5)
self.task_queue.put('a4', 3, time.time() + 0.2)
self.task_queue.put('a2', 0)
self.task_queue.put('a1', 1)
self.assertEqual(self.task_queue.size(), 4)
def test_20_update(self):
self.task_queue.put('a2', 4)
self.assertEqual(self.task_queue.size(), 4)
self.task_queue.put('a3', 2, 0)
self.assertEqual(self.task_queue.size(), 4)
def test_30_get_from_priority_queue(self):
self.assertEqual(self.task_queue.get(), 'a2')
self.assertEqual(self.task_queue.size(), 4)
def test_40_time_queue_1(self):
self.task_queue.check_update()
self.assertEqual(self.task_queue.get(), 'a3')
self.assertEqual(self.task_queue.size(), 4)
def test_50_time_queue_2(self):
time.sleep(0.3)
self.task_queue.check_update()
self.assertEqual(self.task_queue.get(), 'a4')
self.assertEqual(self.task_queue.get(), 'a1')
self.assertEqual(self.task_queue.size(), 4)
def test_60_processing_queue(self):
time.sleep(0.5)
self.task_queue.check_update()
self.assertEqual(self.task_queue.get(), 'a2')
self.assertEqual(len(self.task_queue), 4)
self.assertEqual(self.task_queue.get(), 'a4')
self.assertEqual(self.task_queue.get(), 'a3')
self.assertEqual(self.task_queue.get(), 'a1')
self.assertEqual(len(self.task_queue), 4)
def test_70_done(self):
self.assertTrue(self.task_queue.done('a2'))
self.assertTrue(self.task_queue.done('a1'))
self.assertEqual(len(self.task_queue), 2)
self.assertTrue(self.task_queue.done('a4'))
self.assertTrue(self.task_queue.done('a3'))
self.assertEqual(len(self.task_queue), 0)
from pyspider.scheduler.token_bucket import Bucket
class TestBucket(unittest.TestCase):
def test_bucket(self):
bucket = Bucket(100, 1000)
self.assertEqual(bucket.get(), 1000)
time.sleep(0.1)
self.assertEqual(bucket.get(), 1000)
bucket.desc(100)
self.assertEqual(bucket.get(), 900)
time.sleep(0.1)
self.assertAlmostEqual(bucket.get(), 910, delta=2)
time.sleep(0.1)
self.assertAlmostEqual(bucket.get(), 920, delta=2)
try:
from six.moves import xmlrpc_client
except ImportError:
import xmlrpclib as xmlrpc_client
from pyspider.scheduler.scheduler import Scheduler
from pyspider.database.sqlite import taskdb, projectdb, resultdb
from pyspider.libs.multiprocessing_queue import Queue
from pyspider.libs.utils import run_in_thread
class TestScheduler(unittest.TestCase):
taskdb_path = './data/tests/task.db'
projectdb_path = './data/tests/project.db'
resultdb_path = './data/tests/result.db'
check_project_time = 1
scheduler_xmlrpc_port = 23333
@classmethod
def setUpClass(self):
shutil.rmtree('./data/tests', ignore_errors=True)
os.makedirs('./data/tests')
def get_taskdb():
return taskdb.TaskDB(self.taskdb_path)
self.taskdb = get_taskdb()
def get_projectdb():
return projectdb.ProjectDB(self.projectdb_path)
self.projectdb = get_projectdb()
def get_resultdb():
return resultdb.ResultDB(self.resultdb_path)
self.resultdb = get_resultdb()
self.newtask_queue = Queue(10)
self.status_queue = Queue(10)
self.scheduler2fetcher = Queue(10)
self.rpc = xmlrpc_client.ServerProxy('http://localhost:%d' % self.scheduler_xmlrpc_port)
def run_scheduler():
scheduler = Scheduler(taskdb=get_taskdb(), projectdb=get_projectdb(),
newtask_queue=self.newtask_queue, status_queue=self.status_queue,
out_queue=self.scheduler2fetcher, data_path="./data/tests/",
resultdb=get_resultdb())
scheduler.UPDATE_PROJECT_INTERVAL = 0.1
scheduler.LOOP_INTERVAL = 0.1
scheduler.INQUEUE_LIMIT = 10
scheduler.DELETE_TIME = 0
scheduler.DEFAULT_RETRY_DELAY = {'': 5}
scheduler._last_tick = int(time.time()) # not dispatch cronjob
run_in_thread(scheduler.xmlrpc_run, port=self.scheduler_xmlrpc_port)
scheduler.run()
self.process = run_in_thread(run_scheduler)
time.sleep(1)
@classmethod
def tearDownClass(self):
if self.process.is_alive():
self.rpc._quit()
self.process.join(5)
assert not self.process.is_alive()
shutil.rmtree('./data/tests', ignore_errors=True)
time.sleep(1)
def test_10_new_task_ignore(self):
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url'
})
self.assertEqual(self.rpc.size(), 0)
self.assertEqual(len(self.rpc.get_active_tasks()), 0)
def test_20_new_project(self):
self.projectdb.insert('test_project', {
'name': 'test_project',
'group': 'group',
'status': 'TODO',
'script': 'import time\nprint(time.time())',
'comments': 'test project',
'rate': 1.0,
'burst': 10,
})
def test_30_update_project(self):
from six.moves import queue as Queue
with self.assertRaises(Queue.Empty):
task = self.scheduler2fetcher.get(timeout=1)
self.projectdb.update('test_project', status="DEBUG")
time.sleep(0.1)
self.rpc.update_project()
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
self.assertEqual(task['url'], 'data:,_on_get_info')
def test_34_new_not_used_project(self):
self.projectdb.insert('test_project_not_started', {
'name': 'test_project_not_started',
'group': 'group',
'status': 'RUNNING',
'script': 'import time\nprint(time.time())',
'comments': 'test project',
'rate': 1.0,
'burst': 10,
})
task = self.scheduler2fetcher.get(timeout=1)
self.assertEqual(task['taskid'], '_on_get_info')
def test_35_new_task(self):
time.sleep(0.2)
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'age': 0,
},
})
time.sleep(0.5)
task = self.scheduler2fetcher.get(timeout=10)
self.assertGreater(len(self.rpc.get_active_tasks()), 0)
self.assertIsNotNone(task)
self.assertEqual(task['project'], 'test_project')
self.assertIn('schedule', task)
self.assertIn('fetch', task)
self.assertIn('process', task)
self.assertIn('track', task)
self.assertEqual(task['fetch']['data'], 'abc')
def test_37_force_update_processing_task(self):
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url_force_update',
'schedule': {
'age': 10,
'force_update': True,
},
})
time.sleep(0.2)
# it should not block next
def test_40_taskdone_error_no_project(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'no_project',
'url': 'url'
})
time.sleep(0.1)
self.assertEqual(self.rpc.size(), 1)
def test_50_taskdone_error_no_track(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url'
})
time.sleep(0.1)
self.assertEqual(self.rpc.size(), 1)
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'track': {}
})
time.sleep(0.1)
self.assertEqual(self.rpc.size(), 1)
def test_60_taskdone_failed_retry(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': False
},
}
})
from six.moves import queue as Queue
with self.assertRaises(Queue.Empty):
task = self.scheduler2fetcher.get(timeout=4)
task = self.scheduler2fetcher.get(timeout=5)
self.assertIsNotNone(task)
def test_70_taskdone_ok(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': True
},
}
})
time.sleep(0.2)
self.assertEqual(self.rpc.size(), 0)
def test_80_newtask_age_ignore(self):
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'age': 30,
},
})
time.sleep(0.1)
self.assertEqual(self.rpc.size(), 0)
def test_82_newtask_via_rpc(self):
self.rpc.newtask({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'age': 30,
},
})
time.sleep(0.1)
self.assertEqual(self.rpc.size(), 0)
def test_90_newtask_with_itag(self):
time.sleep(0.1)
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'itag': "abc",
'retries': 1
},
})
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
self.test_70_taskdone_ok()
def test_a10_newtask_restart_by_age(self):
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'age': 0,
'retries': 1
},
})
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
def test_a20_failed_retry(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': False
},
}
})
task = self.scheduler2fetcher.get(timeout=5)
self.assertIsNotNone(task)
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'track': {
'fetch': {
'ok': False
},
'process': {
'ok': False
},
}
})
from six.moves import queue as Queue
with self.assertRaises(Queue.Empty):
self.scheduler2fetcher.get(timeout=5)
def test_a30_task_verify(self):
self.assertFalse(self.rpc.newtask({
#'taskid': 'taskid#',
'project': 'test_project',
'url': 'url',
}))
self.assertFalse(self.rpc.newtask({
'taskid': 'taskid#',
#'project': 'test_project',
'url': 'url',
}))
self.assertFalse(self.rpc.newtask({
'taskid': 'taskid#',
'project': 'test_project',
#'url': 'url',
}))
self.assertFalse(self.rpc.newtask({
'taskid': 'taskid#',
'project': 'not_exist_project',
'url': 'url',
}))
self.assertTrue(self.rpc.newtask({
'taskid': 'taskid#',
'project': 'test_project',
'url': 'url',
}))
def test_a40_success_recrawl(self):
self.newtask_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'fetch': {
'data': 'abc',
},
'process': {
'data': 'abc',
},
'schedule': {
'age': 0,
'retries': 1,
'auto_recrawl': True,
},
})
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'schedule': {
'age': 0,
'retries': 1,
'auto_recrawl': True,
},
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': True
},
}
})
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
def test_a50_failed_recrawl(self):
for i in range(3):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'schedule': {
'age': 0,
'retries': 1,
'auto_recrawl': True,
},
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': False
},
}
})
task = self.scheduler2fetcher.get(timeout=10)
self.assertIsNotNone(task)
def test_a60_disable_recrawl(self):
self.status_queue.put({
'taskid': 'taskid',
'project': 'test_project',
'url': 'url',
'schedule': {
'age': 0,
'retries': 1,
},
'track': {
'fetch': {
'ok': True
},
'process': {
'ok': True
},
}
})
from six.moves import queue as Queue
with self.assertRaises(Queue.Empty):
self.scheduler2fetcher.get(timeout=5)
def test_x10_inqueue_limit(self):
self.projectdb.insert('test_inqueue_project', {
'name': 'test_inqueue_project',
'group': 'group',
'status': 'DEBUG',
'script': 'import time\nprint(time.time())',
'comments': 'test project',
'rate': 0,
'burst': 0,
})
time.sleep(0.1)
pre_size = self.rpc.size()
for i in range(20):
self.newtask_queue.put({
'taskid': 'taskid%d' % i,
'project': 'test_inqueue_project',
'url': 'url',
'schedule': {
'age': 3000,
'force_update': True,
},
})
time.sleep(1)
self.assertEqual(self.rpc.size() - pre_size, 10)
def test_x20_delete_project(self):
self.assertIsNotNone(self.projectdb.get('test_inqueue_project'))
#self.assertIsNotNone(self.taskdb.get_task('test_inqueue_project', 'taskid1'))
self.projectdb.update('test_inqueue_project', status="STOP", group="lock,delete")
time.sleep(1)
self.assertIsNone(self.projectdb.get('test_inqueue_project'))
self.taskdb._list_project()
self.assertIsNone(self.taskdb.get_task('test_inqueue_project', 'taskid1'))
def test_z10_startup(self):
self.assertTrue(self.process.is_alive())
def test_z20_quit(self):
self.rpc._quit()
time.sleep(0.2)
self.assertFalse(self.process.is_alive())
self.assertEqual(
self.taskdb.get_task('test_project', 'taskid')['status'],
self.taskdb.SUCCESS
)
if __name__ == '__main__':
unittest.main()
| 30.671821 | 99 | 0.503893 |
f7059649cace577ed483d9e7b5ab728bae8e0607 | 12,268 | py | Python | VMBackup/main/PluginHost.py | jamvar/azure-linux-extensions | 66610daae2ef09f7920d9c4aa2e99a3035fe76a6 | [
"Apache-2.0"
] | 2 | 2021-11-02T00:16:29.000Z | 2022-02-17T12:08:42.000Z | VMBackup/main/PluginHost.py | jamvar/azure-linux-extensions | 66610daae2ef09f7920d9c4aa2e99a3035fe76a6 | [
"Apache-2.0"
] | 3 | 2019-07-29T20:25:09.000Z | 2019-08-13T00:00:45.000Z | VMBackup/main/PluginHost.py | ChrisCoe/azure-linux-extensions | 1ca6fce15eca3ddefc33651b094c9a4b4e52fa31 | [
"Apache-2.0"
] | 1 | 2017-07-17T18:52:10.000Z | 2017-07-17T18:52:10.000Z | import time
import sys
import os
import threading
try:
import ConfigParser as ConfigParsers
except ImportError:
import configparser as ConfigParsers
from common import CommonVariables
from pwd import getpwuid
from stat import *
import traceback
# [pre_post]
# "timeout" : (in seconds),
#
# .... other params ...
#
# "pluginName0" : "oracle_plugin", the python plugin file will have same name
# "pluginPath0" : "/abc/xyz/"
# "pluginConfigPath0" : "sdf/sdf/abcd.json"
#
#
# errorcode policy
# errorcode = 0 (CommonVariables.PrePost_PluginStatus_Successs), means success, script runs without error, warnings maybe possible
# errorcode = 5 (CommonVariables.PrePost_PluginStatus_Timeout), means timeout
# errorcode = 10 (CommonVariables.PrePost_PluginStatus_ConfigNotFound), config file not found
# errorcode = process return code, means bash script encountered some other error, like 127 for script not found
class PluginHostError(object):
def __init__(self, errorCode, pluginName):
self.errorCode = errorCode
self.pluginName = pluginName
def __str__(self):
return 'Plugin :- ', self.pluginName , ' ErrorCode :- ' + str(self.errorCode)
class PluginHostResult(object):
def __init__(self):
self.errors = []
self.anyScriptFailed = False
self.continueBackup = True
self.errorCode = 0
self.fileCode = []
self.filePath = []
def __str__(self):
errorStr = ''
for error in self.errors:
errorStr += (str(error)) + '\n'
errorStr += 'Final Error Code :- ' + str(self.errorCode) + '\n'
errorStr += 'Any script Failed :- ' + str(self.anyScriptFailed) + '\n'
errorStr += 'Continue Backup :- ' + str(self.continueBackup) + '\n'
return errorStr
class PluginHost(object):
""" description of class """
def __init__(self, logger):
self.logger = logger
self.modulesLoaded = False
self.configLocation = '/etc/azure/VMSnapshotPluginHost.conf'
self.timeoutInSeconds = 1800
self.plugins = []
self.pluginName = []
self.noOfPlugins = 0
self.preScriptCompleted = []
self.preScriptResult = []
self.postScriptCompleted = []
self.postScriptResult = []
def pre_check(self):
self.logger.log('Loading script modules now...',True,'Info')
errorCode = CommonVariables.PrePost_PluginStatus_Success
dobackup = True
fsFreeze_on = True
if not os.path.isfile(self.configLocation):
self.logger.log('Plugin host Config file does not exist in the location ' + self.configLocation, True)
self.configLocation = './main/VMSnapshotPluginHost.conf'
permissions = self.get_permissions(self.configLocation)
if not os.path.isfile(self.configLocation):
self.logger.log('Plugin host Config file does not exist in the location ' + self.configLocation, True)
errorCode =CommonVariables.FailedPrepostPluginhostConfigNotFound
elif not (int(permissions[1]) == 0 or int(permissions[1]) == 4) or not (int(permissions[2]) == 0 or int(permissions[2]) == 4):
self.logger.log('Plugin host Config file does not have desired permissions', True, 'Error')
errorCode = CommonVariables.FailedPrepostPluginhostConfigPermissionError
elif not self.find_owner(self.configLocation) == 'root':
self.logger.log('The owner of the Plugin host Config file ' + self.configLocation + ' is ' + self.find_owner(self.configLocation) + ' but not root', True, 'Error')
errorCode = CommonVariables.FailedPrepostPluginhostConfigPermissionError
else :
errorCode,dobackup,fsFreeze_on = self.load_modules()
return errorCode,dobackup,fsFreeze_on
def load_modules(self):
# Imports all plugin modules using the information in config.json
# and initializes basic class variables associated with each plugin
len = 0
errorCode = CommonVariables.PrePost_PluginStatus_Success
dobackup = True
fsFreeze_on = True
try:
self.logger.log('config file: '+str(self.configLocation),True,'Info')
config = ConfigParsers.ConfigParser()
config.read(self.configLocation)
if (config.has_option('pre_post', 'timeoutInSeconds')):
self.timeoutInSeconds = min(int(config.get('pre_post','timeoutInSeconds')),self.timeoutInSeconds)
if (config.has_option('pre_post', 'numberOfPlugins')):
len = int(config.get('pre_post','numberOfPlugins'))
self.logger.log('timeoutInSeconds: '+str(self.timeoutInSeconds),True,'Info')
self.logger.log('numberOfPlugins: '+str(len),True,'Info')
while len > 0:
pname = config.get('pre_post','pluginName'+str(self.noOfPlugins))
ppath = config.get('pre_post','pluginPath'+str(self.noOfPlugins))
pcpath = config.get('pre_post','pluginConfigPath'+str(self.noOfPlugins))
self.logger.log('Name of the Plugin is ' + pname, True)
self.logger.log('Plugin config path is ' + pcpath, True)
errorCode = CommonVariables.PrePost_PluginStatus_Success
dobackup = True
if os.path.isfile(pcpath):
permissions = self.get_permissions(pcpath)
if (int(permissions[0]) %2 == 1) or int(permissions[1]) > 0 or int(permissions[2]) > 0:
self.logger.log('Plugin Config file does not have desired permissions', True, 'Error')
errorCode = CommonVariables.FailedPrepostPluginConfigPermissionError
if not self.find_owner(pcpath) == 'root':
self.logger.log('The owner of the Plugin Config file ' + pcpath + ' is ' + self.find_owner(pcpath) + ' but not root', True, 'Error')
errorCode = CommonVariables.FailedPrepostPluginConfigPermissionError
else:
self.logger.log('Plugin host file does not exist in the location ' + pcpath, True, 'Error')
errorCode = CommonVariables.FailedPrepostPluginConfigNotFound
if(errorCode == CommonVariables.PrePost_PluginStatus_Success):
sys.path.append(ppath)
plugin = __import__(pname)
self.plugins.append(plugin.ScriptRunner(logger=self.logger,name=pname,configPath=pcpath,maxTimeOut=self.timeoutInSeconds))
errorCode,dobackup,fsFreeze_on = self.plugins[self.noOfPlugins].validate_scripts()
self.noOfPlugins = self.noOfPlugins + 1
self.pluginName.append(pname)
self.preScriptCompleted.append(False)
self.preScriptResult.append(None)
self.postScriptCompleted.append(False)
self.postScriptResult.append(None)
len = len - 1
if self.noOfPlugins != 0:
self.modulesLoaded = True
except Exception as err:
errMsg = 'Error in reading PluginHost config file : %s, stack trace: %s' % (str(err), traceback.format_exc())
self.logger.log(errMsg, True, 'Error')
errorCode = CommonVariables.FailedPrepostPluginhostConfigParsing
return errorCode,dobackup,fsFreeze_on
def find_owner(self, filename):
file_owner = ''
try:
file_owner = getpwuid(os.stat(filename).st_uid).pw_name
except Exception as err:
errMsg = 'Error in fetching owner of the file : ' + filename + ': %s, stack trace: %s' % (str(err), traceback.format_exc())
self.logger.log(errMsg, True, 'Error')
return file_owner
def get_permissions(self, filename):
permissions = '777'
try:
permissions = oct(os.stat(filename)[ST_MODE])[-3:]
self.logger.log('Permisisons of the file ' + filename + ' are ' + permissions,True)
except Exception as err:
errMsg = 'Error in fetching permissions of the file : ' + filename + ': %s, stack trace: %s' % (str(err), traceback.format_exc())
self.logger.log(errMsg, True, 'Error')
return permissions
def pre_script(self):
# Runs pre_script() for all plugins and maintains a timer
result = PluginHostResult()
curr = 0
for plugin in self.plugins:
t1 = threading.Thread(target=plugin.pre_script, args=(curr, self.preScriptCompleted, self.preScriptResult))
t1.start()
curr = curr + 1
flag = True
for i in range(0,((self.timeoutInSeconds)/5)+2): #waiting 10 more seconds to escape race condition between Host and script timing out
time.sleep(5)
flag = True
for j in range(0,self.noOfPlugins):
flag = flag & self.preScriptCompleted[j]
if flag:
break
continueBackup = True
#Plugin timed out
if not flag:
ecode = CommonVariables.FailedPrepostPluginhostPreTimeout
result.anyScriptFailed = True
presult = PluginHostError(errorCode = ecode, pluginName = self.pluginName[j])
result.errors.append(presult)
else:
for j in range(0,self.noOfPlugins):
ecode = CommonVariables.FailedPrepostPluginhostPreTimeout
continueBackup = continueBackup & self.preScriptResult[j].continueBackup
if self.preScriptCompleted[j]:
ecode = self.preScriptResult[j].errorCode
if ecode != CommonVariables.PrePost_PluginStatus_Success:
result.anyScriptFailed = True
presult = PluginHostError(errorCode = ecode, pluginName = self.pluginName[j])
result.errors.append(presult)
result.continueBackup = continueBackup
self.logger.log('Finished prescript execution from PluginHost side. Continue Backup: '+str(continueBackup),True,'Info')
return result
def post_script(self):
# Runs post_script() for all plugins and maintains a timer
result = PluginHostResult()
if not self.modulesLoaded:
return result
self.logger.log('Starting postscript for all modules.',True,'Info')
curr = 0
for plugin in self.plugins:
t1 = threading.Thread(target=plugin.post_script, args=(curr, self.postScriptCompleted, self.postScriptResult))
t1.start()
curr = curr + 1
flag = True
for i in range(0,((self.timeoutInSeconds)/5)+2): #waiting 10 more seconds to escape race condition between Host and script timing out
time.sleep(5)
flag = True
for j in range(0,self.noOfPlugins):
flag = flag & self.postScriptCompleted[j]
if flag:
break
continueBackup = True
#Plugin timed out
if not flag:
ecode = CommonVariables.FailedPrepostPluginhostPostTimeout
result.anyScriptFailed = True
presult = PluginHostError(errorCode = ecode, pluginName = self.pluginName[j])
result.errors.append(presult)
else:
for j in range(0,self.noOfPlugins):
ecode = CommonVariables.FailedPrepostPluginhostPostTimeout
continueBackup = continueBackup & self.postScriptResult[j].continueBackup
if self.postScriptCompleted[j]:
ecode = self.postScriptResult[j].errorCode
if ecode != CommonVariables.PrePost_PluginStatus_Success:
result.anyScriptFailed = True
presult = PluginHostError(errorCode = ecode, pluginName = self.pluginName[j])
result.errors.append(presult)
result.continueBackup = continueBackup
self.logger.log('Finished postscript execution from PluginHost side. Continue Backup: '+str(continueBackup),True,'Info')
return result
| 43.814286 | 175 | 0.623329 |
f705c09c479088d7f96725e5df722801a0715965 | 5,653 | py | Python | ibmsecurity/isam/base/network/felb/services/servers.py | ibm-enio/ibmsecurity | 81f989678642c3b6a49b2a3fbb5d9ca98804ef17 | [
"Apache-2.0"
] | 2 | 2019-12-05T13:51:10.000Z | 2019-12-20T08:02:35.000Z | ibmsecurity/isam/base/network/felb/services/servers.py | ibm-enio/ibmsecurity | 81f989678642c3b6a49b2a3fbb5d9ca98804ef17 | [
"Apache-2.0"
] | null | null | null | ibmsecurity/isam/base/network/felb/services/servers.py | ibm-enio/ibmsecurity | 81f989678642c3b6a49b2a3fbb5d9ca98804ef17 | [
"Apache-2.0"
] | 1 | 2020-04-03T09:30:01.000Z | 2020-04-03T09:30:01.000Z | import ibmsecurity.utilities.tools
import logging
logger = logging.getLogger(__name__)
module_uri = "/isam/felb/configuration/services/"
requires_modulers = None
requires_version = None
def add(isamAppliance, service_name, address, active, port, weight, secure, ssllabel, check_mode=False, force=False):
"""
Creating a server
"""
change_required = _check_exist(isamAppliance, service_name, address, port=port)
if force is True or change_required is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_post("Creating a server",
"{0}{1}/servers".format(module_uri, service_name, address),
{
"active": active,
"address": address,
"port": port,
"weight": weight,
"secure": secure,
"ssllabel": ssllabel
},
requires_version=requires_version, requires_modules=requires_modulers)
else:
return isamAppliance.create_return_object()
def delete(isamAppliance, service_name, address, check_mode=False, force=False):
"""
deletes a server from specified service name
"""
if force is True or _check_exist(isamAppliance, service_name, address) is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_delete("Deleting a server",
"{0}{1}/servers/{2}".format(module_uri, service_name, address),
requires_version=requires_version, requires_modules=requires_modulers)
else:
return isamAppliance.create_return_object()
def get(isamAppliance, service_name, address, check_mode=False, force=False):
"""
Retrieves server from specified service name
"""
return (
isamAppliance.invoke_get("Retrieving a server", "{0}{1}/servers/{2}".format(module_uri, service_name, address),
requires_version=requires_version, requires_modules=requires_modulers))
def get_all(isamAppliance, service_name, check_mode=False, force=False):
"""
Retrieves a list of servers under a specified service
"""
return isamAppliance.invoke_get("Retrieving servers for a service",
"{0}{1}/servers".format(module_uri, service_name),
requires_version=requires_version, requires_modules=requires_modulers)
def update(isamAppliance, service_name, address, active, new_address, new_port, weight, secure=False, ssllabel=None,
check_mode=False,
force=False):
"""
Updating server
"""
change_required = _check_update(isamAppliance, service_name, address, active, new_address, new_port, weight, secure,
ssllabel)
if force is True or change_required is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True)
else:
return isamAppliance.invoke_put("Updating a server",
"{0}{1}/servers/{2}".format(module_uri, service_name, address),
{
"address": new_address,
"active": active,
"port": new_port,
"weight": weight,
"secure": secure,
"ssllabel": ssllabel
},
requires_modules=requires_modulers,
requires_version=requires_version)
else:
return isamAppliance.create_return_object()
def _check_update(isamAppliance, service_name, address, active, new_address, new_port, weight, secure=False,
ssllabel=None):
"""
idempontency test
"""
org_obj = get(isamAppliance, service_name, address)
if org_obj['data']['address'] != new_address:
return True
elif org_obj['data']['active'] != active:
return True
elif org_obj['data']['port'] != new_port:
return True
elif org_obj['data']['weight'] != weight:
return True
elif org_obj['data']['secure'] != secure:
return True
elif org_obj['data']['ssllabel'] != ssllabel:
return True
else:
return False
def _check_exist(isamAppliance, service_name, address):
"""
idempotency test for delete function
"""
check_obj = {}
# Check weather the address with corresponding server exists
try:
check_obj = get(isamAppliance, service_name, address)
except:
return False
return True
def compare(isamAppliance1, isamAppliance2):
"""
Compare cluster configuration between two appliances
"""
ret_obj1 = get(isamAppliance1)
ret_obj2 = get(isamAppliance2)
return ibmsecurity.utilities.tools.json_compare(ret_obj1, ret_obj2, deleted_keys=[])
| 38.195946 | 120 | 0.552096 |
f705e1ac415f1541823e90e5a27d3686709724a5 | 370 | py | Python | cryptoxlib/clients/aax/exceptions.py | PetrZufan/cryptoxlib-aio | 8fbb817ee7a7a88693804e24877863370d1d53c7 | [
"MIT"
] | 90 | 2020-04-09T18:34:49.000Z | 2022-03-09T14:29:32.000Z | cryptoxlib/clients/aax/exceptions.py | PetrZufan/cryptoxlib-aio | 8fbb817ee7a7a88693804e24877863370d1d53c7 | [
"MIT"
] | 44 | 2020-04-03T17:02:20.000Z | 2022-01-29T14:51:51.000Z | cryptoxlib/clients/aax/exceptions.py | PetrZufan/cryptoxlib-aio | 8fbb817ee7a7a88693804e24877863370d1d53c7 | [
"MIT"
] | 28 | 2020-04-25T21:34:53.000Z | 2022-03-31T07:20:07.000Z | from typing import Optional
from cryptoxlib.exceptions import CryptoXLibException
class AAXException(CryptoXLibException):
pass
class AAXRestException(AAXException):
def __init__(self, status_code: int, body: Optional[dict]):
super().__init__(f"Rest API exception: status [{status_code}], response [{body}]")
self.status_code = status_code
self.body = body | 24.666667 | 84 | 0.778378 |
f705f177770c12c0106c8231abaa004e1def0c23 | 1,728 | py | Python | src/simulator/workload/schemes.py | pskopnik/htc-cache-simulator | ee502db3f1c2b99ffe05ee609a18069b583798da | [
"MIT"
] | 1 | 2020-12-15T16:09:31.000Z | 2020-12-15T16:09:31.000Z | src/simulator/workload/schemes.py | pskopnik/htc-cache-system-simulator | ee502db3f1c2b99ffe05ee609a18069b583798da | [
"MIT"
] | null | null | null | src/simulator/workload/schemes.py | pskopnik/htc-cache-system-simulator | ee502db3f1c2b99ffe05ee609a18069b583798da | [
"MIT"
] | null | null | null | from typing import List
from . import AccessRequest, FileID, PartSpec, PartsGenerator
class NonCorrelatedSchemesGenerator(object):
def __init__(self, number: int, fraction: float) -> None:
self._number: int = number
self._fraction: float = fraction
self._parts_number: int = 2 ** number
@property
def number(self) -> int:
return self._number
@property
def fraction(self) -> float:
return self._fraction
def parts(self, index: int, total_bytes: int) -> List[PartSpec]:
scheme_parts_number = 2 ** (self._number - 1)
parts: List[PartSpec] = []
for i in range(scheme_parts_number):
# Insert 1 bit at index into binary representation of i
part_index = (((i << 1 >> index) | 1) << index) | (i & ((1 << index) - 1))
containing_schemes = bin(part_index).count('1')
part_bytes = round(total_bytes * (
self._fraction ** containing_schemes
*
(1 - self._fraction) ** (self._number - containing_schemes)
))
parts.append((part_index, part_bytes))
return parts
def access_request(self, index: int, file: FileID, total_bytes: int) -> AccessRequest:
return AccessRequest(file, self.parts(index, total_bytes))
class WithIndex(PartsGenerator):
def __init__(self, generator: 'NonCorrelatedSchemesGenerator', index: int) -> None:
self._generator: NonCorrelatedSchemesGenerator = generator
self._index: int = index
def parts(self, total_bytes: int) -> List[PartSpec]:
return self._generator.parts(self._index, total_bytes)
def access_request(self, file: FileID, total_bytes: int) -> AccessRequest:
return self._generator.access_request(self._index, file, total_bytes)
def with_index(self, index: int) -> WithIndex:
return self.WithIndex(self, index)
| 30.315789 | 87 | 0.712963 |
f705f391ab810c9189b100ed39229631d36e2a29 | 213,437 | py | Python | core/domain/exp_domain_test.py | yashdusing/oppia | c0218e13ba29f9bc25cc5ec6f7f13108ee4fdb9a | [
"Apache-2.0"
] | null | null | null | core/domain/exp_domain_test.py | yashdusing/oppia | c0218e13ba29f9bc25cc5ec6f7f13108ee4fdb9a | [
"Apache-2.0"
] | null | null | null | core/domain/exp_domain_test.py | yashdusing/oppia | c0218e13ba29f9bc25cc5ec6f7f13108ee4fdb9a | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for exploration domain objects and methods defined on them."""
import copy
import os
import re
from core.domain import exp_domain
from core.domain import exp_fetchers
from core.domain import exp_services
from core.domain import html_validation_service
from core.domain import param_domain
from core.domain import state_domain
from core.platform import models
from core.tests import test_utils
import feconf
import utils
(exp_models,) = models.Registry.import_models([models.NAMES.exploration])
def mock_get_filename_with_dimensions(filename, unused_exp_id):
return html_validation_service.regenerate_image_filename_using_dimensions(
filename, 490, 120)
class ExplorationChangeTests(test_utils.GenericTestBase):
def test_exp_change_object_with_missing_cmd(self):
with self.assertRaisesRegexp(
utils.ValidationError, 'Missing cmd key in change dict'):
exp_domain.ExplorationChange({'invalid': 'data'})
def test_exp_change_object_with_invalid_cmd(self):
with self.assertRaisesRegexp(
utils.ValidationError, 'Command invalid is not allowed'):
exp_domain.ExplorationChange({'cmd': 'invalid'})
def test_exp_change_object_with_missing_attribute_in_cmd(self):
with self.assertRaisesRegexp(
utils.ValidationError, (
'The following required attributes are missing: '
'new_value')):
exp_domain.ExplorationChange({
'cmd': 'edit_state_property',
'property_name': 'content',
'old_value': 'old_value'
})
def test_exp_change_object_with_extra_attribute_in_cmd(self):
with self.assertRaisesRegexp(
utils.ValidationError, (
'The following extra attributes are present: invalid')):
exp_domain.ExplorationChange({
'cmd': 'rename_state',
'old_state_name': 'old_state_name',
'new_state_name': 'new_state_name',
'invalid': 'invalid'
})
def test_exp_change_object_with_invalid_exploration_property(self):
with self.assertRaisesRegexp(
utils.ValidationError, (
'Value for property_name in cmd edit_exploration_property: '
'invalid is not allowed')):
exp_domain.ExplorationChange({
'cmd': 'edit_exploration_property',
'property_name': 'invalid',
'old_value': 'old_value',
'new_value': 'new_value',
})
def test_exp_change_object_with_invalid_state_property(self):
with self.assertRaisesRegexp(
utils.ValidationError, (
'Value for property_name in cmd edit_state_property: '
'invalid is not allowed')):
exp_domain.ExplorationChange({
'cmd': 'edit_state_property',
'state_name': 'state_name',
'property_name': 'invalid',
'old_value': 'old_value',
'new_value': 'new_value',
})
def test_exp_change_object_with_create_new(self):
exp_change_object = exp_domain.ExplorationChange({
'cmd': 'create_new',
'category': 'category',
'title': 'title'
})
self.assertEqual(exp_change_object.cmd, 'create_new')
self.assertEqual(exp_change_object.category, 'category')
self.assertEqual(exp_change_object.title, 'title')
def test_exp_change_object_with_add_state(self):
exp_change_object = exp_domain.ExplorationChange({
'cmd': 'add_state',
'state_name': 'state_name',
})
self.assertEqual(exp_change_object.cmd, 'add_state')
self.assertEqual(exp_change_object.state_name, 'state_name')
def test_exp_change_object_with_rename_state(self):
exp_change_object = exp_domain.ExplorationChange({
'cmd': 'rename_state',
'old_state_name': 'old_state_name',
'new_state_name': 'new_state_name'
})
self.assertEqual(exp_change_object.cmd, 'rename_state')
self.assertEqual(exp_change_object.old_state_name, 'old_state_name')
self.assertEqual(exp_change_object.new_state_name, 'new_state_name')
def test_exp_change_object_with_delete_state(self):
exp_change_object = exp_domain.ExplorationChange({
'cmd': 'delete_state',
'state_name': 'state_name',
})
self.assertEqual(exp_change_object.cmd, 'delete_state')
self.assertEqual(exp_change_object.state_name, 'state_name')
def test_exp_change_object_with_edit_state_property(self):
exp_change_object = exp_domain.ExplorationChange({
'cmd': 'edit_state_property',
'state_name': 'state_name',
'property_name': 'content',
'new_value': 'new_value',
'old_value': 'old_value'
})
self.assertEqual(exp_change_object.cmd, 'edit_state_property')
self.assertEqual(exp_change_object.state_name, 'state_name')
self.assertEqual(exp_change_object.property_name, 'content')
self.assertEqual(exp_change_object.new_value, 'new_value')
self.assertEqual(exp_change_object.old_value, 'old_value')
def test_exp_change_object_with_edit_exploration_property(self):
exp_change_object = exp_domain.ExplorationChange({
'cmd': 'edit_exploration_property',
'property_name': 'title',
'new_value': 'new_value',
'old_value': 'old_value'
})
self.assertEqual(exp_change_object.cmd, 'edit_exploration_property')
self.assertEqual(exp_change_object.property_name, 'title')
self.assertEqual(exp_change_object.new_value, 'new_value')
self.assertEqual(exp_change_object.old_value, 'old_value')
def test_exp_change_object_with_migrate_states_schema_to_latest_version(
self):
exp_change_object = exp_domain.ExplorationChange({
'cmd': 'migrate_states_schema_to_latest_version',
'from_version': 'from_version',
'to_version': 'to_version',
})
self.assertEqual(
exp_change_object.cmd, 'migrate_states_schema_to_latest_version')
self.assertEqual(exp_change_object.from_version, 'from_version')
self.assertEqual(exp_change_object.to_version, 'to_version')
def test_exp_change_object_with_revert_commit(self):
exp_change_object = exp_domain.ExplorationChange({
'cmd': exp_models.ExplorationModel.CMD_REVERT_COMMIT,
'version_number': 'version_number'
})
self.assertEqual(
exp_change_object.cmd,
exp_models.ExplorationModel.CMD_REVERT_COMMIT)
self.assertEqual(exp_change_object.version_number, 'version_number')
def test_to_dict(self):
exp_change_dict = {
'cmd': 'create_new',
'title': 'title',
'category': 'category'
}
exp_change_object = exp_domain.ExplorationChange(exp_change_dict)
self.assertEqual(exp_change_object.to_dict(), exp_change_dict)
class ExplorationVersionsDiffDomainUnitTests(test_utils.GenericTestBase):
"""Test the exploration versions difference domain object."""
def setUp(self):
super(ExplorationVersionsDiffDomainUnitTests, self).setUp()
self.exp_id = 'exp_id1'
test_exp_filepath = os.path.join(
feconf.TESTS_DATA_DIR, 'string_classifier_test.yaml')
yaml_content = utils.get_file_contents(test_exp_filepath)
assets_list = []
exp_services.save_new_exploration_from_yaml_and_assets(
feconf.SYSTEM_COMMITTER_ID, yaml_content, self.exp_id,
assets_list)
self.exploration = exp_fetchers.get_exploration_by_id(self.exp_id)
def test_correct_creation_of_version_diffs(self):
# Rename a state.
self.exploration.rename_state('Home', 'Renamed state')
change_list = [exp_domain.ExplorationChange({
'cmd': 'rename_state',
'old_state_name': 'Home',
'new_state_name': 'Renamed state'
})]
exp_versions_diff = exp_domain.ExplorationVersionsDiff(change_list)
self.assertEqual(exp_versions_diff.added_state_names, [])
self.assertEqual(exp_versions_diff.deleted_state_names, [])
self.assertEqual(
exp_versions_diff.old_to_new_state_names, {
'Home': 'Renamed state'
})
self.exploration.version += 1
# Add a state.
self.exploration.add_states(['New state'])
self.exploration.states['New state'] = copy.deepcopy(
self.exploration.states['Renamed state'])
change_list = [exp_domain.ExplorationChange({
'cmd': 'add_state',
'state_name': 'New state',
})]
exp_versions_diff = exp_domain.ExplorationVersionsDiff(change_list)
self.assertEqual(exp_versions_diff.added_state_names, ['New state'])
self.assertEqual(exp_versions_diff.deleted_state_names, [])
self.assertEqual(exp_versions_diff.old_to_new_state_names, {})
self.exploration.version += 1
# Delete state.
self.exploration.delete_state('New state')
change_list = [exp_domain.ExplorationChange({
'cmd': 'delete_state',
'state_name': 'New state'
})]
exp_versions_diff = exp_domain.ExplorationVersionsDiff(change_list)
self.assertEqual(exp_versions_diff.added_state_names, [])
self.assertEqual(exp_versions_diff.deleted_state_names, ['New state'])
self.assertEqual(exp_versions_diff.old_to_new_state_names, {})
self.exploration.version += 1
# Test addition and multiple renames.
self.exploration.add_states(['New state'])
self.exploration.states['New state'] = copy.deepcopy(
self.exploration.states['Renamed state'])
self.exploration.rename_state('New state', 'New state2')
self.exploration.rename_state('New state2', 'New state3')
change_list = [exp_domain.ExplorationChange({
'cmd': 'add_state',
'state_name': 'New state',
}), exp_domain.ExplorationChange({
'cmd': 'rename_state',
'old_state_name': 'New state',
'new_state_name': 'New state2'
}), exp_domain.ExplorationChange({
'cmd': 'rename_state',
'old_state_name': 'New state2',
'new_state_name': 'New state3'
})]
exp_versions_diff = exp_domain.ExplorationVersionsDiff(change_list)
self.assertEqual(exp_versions_diff.added_state_names, ['New state3'])
self.assertEqual(exp_versions_diff.deleted_state_names, [])
self.assertEqual(exp_versions_diff.old_to_new_state_names, {})
self.exploration.version += 1
# Test addition, rename and deletion.
self.exploration.add_states(['New state 2'])
self.exploration.rename_state('New state 2', 'Renamed state 2')
self.exploration.delete_state('Renamed state 2')
change_list = [exp_domain.ExplorationChange({
'cmd': 'add_state',
'state_name': 'New state 2'
}), exp_domain.ExplorationChange({
'cmd': 'rename_state',
'old_state_name': 'New state 2',
'new_state_name': 'Renamed state 2'
}), exp_domain.ExplorationChange({
'cmd': 'delete_state',
'state_name': 'Renamed state 2'
})]
exp_versions_diff = exp_domain.ExplorationVersionsDiff(change_list)
self.assertEqual(exp_versions_diff.added_state_names, [])
self.assertEqual(exp_versions_diff.deleted_state_names, [])
self.assertEqual(exp_versions_diff.old_to_new_state_names, {})
self.exploration.version += 1
# Test multiple renames and deletion.
self.exploration.rename_state('New state3', 'Renamed state 3')
self.exploration.rename_state('Renamed state 3', 'Renamed state 4')
self.exploration.delete_state('Renamed state 4')
change_list = [exp_domain.ExplorationChange({
'cmd': 'rename_state',
'old_state_name': 'New state3',
'new_state_name': 'Renamed state 3'
}), exp_domain.ExplorationChange({
'cmd': 'rename_state',
'old_state_name': 'Renamed state 3',
'new_state_name': 'Renamed state 4'
}), exp_domain.ExplorationChange({
'cmd': 'delete_state',
'state_name': 'Renamed state 4'
})]
exp_versions_diff = exp_domain.ExplorationVersionsDiff(change_list)
self.assertEqual(exp_versions_diff.added_state_names, [])
self.assertEqual(
exp_versions_diff.deleted_state_names, ['New state3'])
self.assertEqual(exp_versions_diff.old_to_new_state_names, {})
self.exploration.version += 1
def test_cannot_create_exploration_change_with_invalid_change_dict(self):
with self.assertRaisesRegexp(
Exception, 'Missing cmd key in change dict'):
exp_domain.ExplorationChange({
'invalid_cmd': 'invalid'
})
def test_cannot_create_exploration_change_with_invalid_cmd(self):
with self.assertRaisesRegexp(
Exception, 'Command invalid_cmd is not allowed'):
exp_domain.ExplorationChange({
'cmd': 'invalid_cmd'
})
def test_cannot_create_exploration_change_with_invalid_state_property(self):
exp_change = exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_INTERACTION_ID,
'state_name': '',
'new_value': ''
})
self.assertTrue(isinstance(exp_change, exp_domain.ExplorationChange))
with self.assertRaisesRegexp(
Exception,
'Value for property_name in cmd edit_state_property: '
'invalid_property is not allowed'):
exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': 'invalid_property',
'state_name': '',
'new_value': ''
})
def test_cannot_create_exploration_change_with_invalid_exploration_property(
self):
exp_change = exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_EDIT_EXPLORATION_PROPERTY,
'property_name': 'title',
'new_value': ''
})
self.assertTrue(isinstance(exp_change, exp_domain.ExplorationChange))
with self.assertRaisesRegexp(
Exception,
'Value for property_name in cmd edit_exploration_property: '
'invalid_property is not allowed'):
exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_EDIT_EXPLORATION_PROPERTY,
'property_name': 'invalid_property',
'new_value': ''
})
def test_revert_exploration_commit(self):
exp_change = exp_domain.ExplorationChange({
'cmd': exp_models.ExplorationModel.CMD_REVERT_COMMIT,
'version_number': 1
})
self.assertEqual(exp_change.version_number, 1)
exp_change = exp_domain.ExplorationChange({
'cmd': exp_models.ExplorationModel.CMD_REVERT_COMMIT,
'version_number': 2
})
self.assertEqual(exp_change.version_number, 2)
class ExpVersionReferenceTests(test_utils.GenericTestBase):
def test_create_exp_version_reference_object(self):
exp_version_reference = exp_domain.ExpVersionReference('exp_id', 1)
self.assertEqual(
exp_version_reference.to_dict(), {
'exp_id': 'exp_id',
'version': 1
})
def test_validate_exp_version(self):
with self.assertRaisesRegexp(
Exception,
'Expected version to be an int, received invalid_version'):
exp_domain.ExpVersionReference('exp_id', 'invalid_version')
def test_validate_exp_id(self):
with self.assertRaisesRegexp(
Exception, 'Expected exp_id to be a str, received 0'):
exp_domain.ExpVersionReference(0, 1)
class ExplorationDomainUnitTests(test_utils.GenericTestBase):
"""Test the exploration domain object."""
# TODO(bhenning): The validation tests below should be split into separate
# unit tests. Also, all validation errors should be covered in the tests.
def test_validation(self):
"""Test validation of explorations."""
exploration = exp_domain.Exploration.create_default_exploration('eid')
exploration.init_state_name = ''
exploration.states = {}
exploration.title = 'Hello #'
self._assert_validation_error(exploration, 'Invalid character #')
exploration.title = 'Title'
exploration.category = 'Category'
# Note: If '/' ever becomes a valid state name, ensure that the rule
# editor frontend tenplate is fixed -- it currently uses '/' as a
# sentinel for an invalid state name.
bad_state = state_domain.State.create_default_state('/')
exploration.states = {'/': bad_state}
self._assert_validation_error(
exploration, 'Invalid character / in a state name')
new_state = state_domain.State.create_default_state('ABC')
new_state.update_interaction_id('TextInput')
# The 'states' property must be a non-empty dict of states.
exploration.states = {}
self._assert_validation_error(
exploration, 'exploration has no states')
exploration.states = {'A string #': new_state}
self._assert_validation_error(
exploration, 'Invalid character # in a state name')
exploration.states = {'A string _': new_state}
self._assert_validation_error(
exploration, 'Invalid character _ in a state name')
exploration.states = {'ABC': new_state}
self._assert_validation_error(
exploration, 'has no initial state name')
exploration.init_state_name = 'initname'
self._assert_validation_error(
exploration,
r'There is no state in \[\'ABC\'\] corresponding to '
'the exploration\'s initial state name initname.')
# Test whether a default outcome to a non-existing state is invalid.
exploration.states = {exploration.init_state_name: new_state}
self._assert_validation_error(
exploration, 'destination ABC is not a valid')
# Restore a valid exploration.
init_state = exploration.states[exploration.init_state_name]
default_outcome_dict = init_state.interaction.default_outcome.to_dict()
default_outcome_dict['dest'] = exploration.init_state_name
init_state.update_interaction_default_outcome(default_outcome_dict)
exploration.validate()
# Ensure an invalid destination can also be detected for answer groups.
# Note: The state must keep its default_outcome, otherwise it will
# trigger a validation error for non-terminal states needing to have a
# default outcome. To validate the outcome of the answer group, this
# default outcome must point to a valid state.
init_state = exploration.states[exploration.init_state_name]
default_outcome = init_state.interaction.default_outcome
default_outcome.dest = exploration.init_state_name
old_answer_groups = copy.deepcopy(init_state.interaction.answer_groups)
old_answer_groups.append({
'outcome': {
'dest': exploration.init_state_name,
'feedback': {
'content_id': 'feedback_1',
'html': '<p>Feedback</p>'
},
'labelled_as_correct': False,
'param_changes': [],
'refresher_exploration_id': None,
'missing_prerequisite_skill_id': None
},
'rule_specs': [{
'inputs': {
'x': 'Test'
},
'rule_type': 'Contains'
}],
'training_data': [],
'tagged_skill_misconception_id': None
})
init_state.update_interaction_answer_groups(old_answer_groups)
exploration.validate()
interaction = init_state.interaction
answer_groups = interaction.answer_groups
answer_group = answer_groups[0]
answer_group.outcome.dest = 'DEF'
self._assert_validation_error(
exploration, 'destination DEF is not a valid')
# Restore a valid exploration.
exploration.states[exploration.init_state_name].update_interaction_id(
'TextInput')
answer_group.outcome.dest = exploration.init_state_name
exploration.validate()
# Validate RuleSpec.
rule_spec = answer_group.rule_specs[0]
rule_spec.inputs = {}
self._assert_validation_error(
exploration, 'RuleSpec \'Contains\' is missing inputs')
rule_spec.inputs = 'Inputs string'
self._assert_validation_error(
exploration, 'Expected inputs to be a dict')
rule_spec.inputs = {'x': 'Test'}
rule_spec.rule_type = 'FakeRuleType'
self._assert_validation_error(exploration, 'Unrecognized rule type')
rule_spec.inputs = {'x': 15}
rule_spec.rule_type = 'Contains'
with self.assertRaisesRegexp(
Exception, 'Expected unicode string, received 15'
):
exploration.validate()
rule_spec.inputs = {'x': '{{ExampleParam}}'}
self._assert_validation_error(
exploration,
'RuleSpec \'Contains\' has an input with name \'x\' which refers '
'to an unknown parameter within the exploration: ExampleParam')
# Restore a valid exploration.
exploration.param_specs['ExampleParam'] = param_domain.ParamSpec(
'UnicodeString')
exploration.validate()
# Validate Outcome.
outcome = answer_group.outcome
destination = exploration.init_state_name
outcome.dest = None
self._assert_validation_error(
exploration, 'Every outcome should have a destination.')
# Try setting the outcome destination to something other than a string.
outcome.dest = 15
self._assert_validation_error(
exploration, 'Expected outcome dest to be a string')
outcome.dest = destination
outcome.feedback = state_domain.SubtitledHtml('feedback_1', '')
exploration.validate()
outcome.labelled_as_correct = 'hello'
self._assert_validation_error(
exploration, 'The "labelled_as_correct" field should be a boolean')
# Test that labelled_as_correct must be False for self-loops, and that
# this causes a strict validation failure but not a normal validation
# failure.
outcome.labelled_as_correct = True
with self.assertRaisesRegexp(
Exception, 'is labelled correct but is a self-loop.'
):
exploration.validate(strict=True)
exploration.validate()
outcome.labelled_as_correct = False
exploration.validate()
outcome.param_changes = 'Changes'
self._assert_validation_error(
exploration, 'Expected outcome param_changes to be a list')
outcome.param_changes = [param_domain.ParamChange(
0, 'generator_id', {})]
self._assert_validation_error(
exploration,
'Expected param_change name to be a string, received 0')
outcome.param_changes = []
exploration.validate()
outcome.refresher_exploration_id = 12345
self._assert_validation_error(
exploration,
'Expected outcome refresher_exploration_id to be a string')
outcome.refresher_exploration_id = None
exploration.validate()
outcome.refresher_exploration_id = 'valid_string'
exploration.validate()
outcome.missing_prerequisite_skill_id = 12345
self._assert_validation_error(
exploration,
'Expected outcome missing_prerequisite_skill_id to be a string')
outcome.missing_prerequisite_skill_id = None
exploration.validate()
outcome.missing_prerequisite_skill_id = 'valid_string'
exploration.validate()
# Test that refresher_exploration_id must be None for non-self-loops.
new_state_name = 'New state'
exploration.add_states([new_state_name])
outcome.dest = new_state_name
outcome.refresher_exploration_id = 'another_string'
self._assert_validation_error(
exploration,
'has a refresher exploration ID, but is not a self-loop')
outcome.refresher_exploration_id = None
exploration.validate()
exploration.delete_state(new_state_name)
# Validate InteractionInstance.
interaction.id = 15
self._assert_validation_error(
exploration, 'Expected interaction id to be a string')
interaction.id = 'SomeInteractionTypeThatDoesNotExist'
self._assert_validation_error(exploration, 'Invalid interaction id')
interaction.id = 'TextInput'
exploration.validate()
interaction.customization_args = []
self._assert_validation_error(
exploration, 'Expected customization args to be a dict')
interaction.customization_args = {15: ''}
self._assert_validation_error(
exploration, 'Invalid customization arg name')
interaction.customization_args = {'placeholder': ''}
exploration.validate()
interaction.answer_groups = {}
self._assert_validation_error(
exploration, 'Expected answer groups to be a list')
interaction.answer_groups = answer_groups
interaction.id = 'EndExploration'
self._assert_validation_error(
exploration,
'Terminal interactions must not have a default outcome.')
interaction.id = 'TextInput'
init_state.update_interaction_default_outcome(None)
self._assert_validation_error(
exploration,
'Non-terminal interactions must have a default outcome.')
interaction.id = 'EndExploration'
self._assert_validation_error(
exploration,
'Terminal interactions must not have any answer groups.')
# A terminal interaction without a default outcome or answer group is
# valid. This resets the exploration back to a valid state.
init_state.update_interaction_answer_groups([])
exploration.validate()
# Restore a valid exploration.
interaction.id = 'TextInput'
answer_groups_list = [
answer_group.to_dict() for answer_group in answer_groups]
init_state.update_interaction_answer_groups(answer_groups_list)
init_state.update_interaction_default_outcome(default_outcome.to_dict())
exploration.validate()
init_state.update_interaction_solution({
'answer_is_exclusive': True,
'correct_answer': 'hello_world!',
'explanation': {
'content_id': 'solution',
'html': 'hello_world is a string'
}
})
self._assert_validation_error(
exploration,
re.escape('Hint(s) must be specified if solution is specified'))
init_state.update_interaction_solution(None)
interaction.hints = {}
self._assert_validation_error(
exploration, 'Expected hints to be a list')
interaction.hints = []
# Validate AnswerGroup.
answer_groups_dict = {
'outcome': {
'dest': exploration.init_state_name,
'feedback': {
'content_id': 'feedback_1',
'html': 'Feedback'
},
'labelled_as_correct': False,
'param_changes': [],
'refresher_exploration_id': None,
'missing_prerequisite_skill_id': None
},
'rule_specs': [{
'inputs': {
'x': 'Test'
},
'rule_type': 'Contains'
}],
'training_data': [],
'tagged_skill_misconception_id': 1
}
init_state.update_interaction_answer_groups([answer_groups_dict])
self._assert_validation_error(
exploration,
'Expected tagged skill misconception id to be a str, received 1')
answer_groups_dict = {
'outcome': {
'dest': exploration.init_state_name,
'feedback': {
'content_id': 'feedback_1',
'html': 'Feedback'
},
'labelled_as_correct': False,
'param_changes': [],
'refresher_exploration_id': None,
'missing_prerequisite_skill_id': None
},
'rule_specs': [{
'inputs': {
'x': 'Test'
},
'rule_type': 'Contains'
}],
'training_data': [],
'tagged_skill_misconception_id':
'invalid_tagged_skill_misconception_id'
}
init_state.update_interaction_answer_groups([answer_groups_dict])
self._assert_validation_error(
exploration,
'Expected the format of tagged skill misconception id '
'to be <skill_id>-<misconception_id>, received '
'invalid_tagged_skill_misconception_id')
init_state.interaction.answer_groups[0].rule_specs = {}
self._assert_validation_error(
exploration, 'Expected answer group rules to be a list')
first_answer_group = init_state.interaction.answer_groups[0]
first_answer_group.tagged_skill_misconception_id = None
first_answer_group.rule_specs = []
self._assert_validation_error(
exploration,
'There must be at least one rule or training data for each'
' answer group.')
exploration.states = {
exploration.init_state_name: (
state_domain.State.create_default_state(
exploration.init_state_name))
}
exploration.states[exploration.init_state_name].update_interaction_id(
'TextInput')
exploration.validate()
exploration.language_code = 'fake_code'
self._assert_validation_error(exploration, 'Invalid language_code')
exploration.language_code = 'English'
self._assert_validation_error(exploration, 'Invalid language_code')
exploration.language_code = 'en'
exploration.validate()
exploration.param_specs = 'A string'
self._assert_validation_error(exploration, 'param_specs to be a dict')
exploration.param_specs = {
'@': param_domain.ParamSpec.from_dict({
'obj_type': 'UnicodeString'
})
}
self._assert_validation_error(
exploration, 'Only parameter names with characters')
exploration.param_specs = {
'notAParamSpec': param_domain.ParamSpec.from_dict(
{'obj_type': 'UnicodeString'})
}
exploration.validate()
def test_tag_validation(self):
"""Test validation of exploration tags."""
exploration = exp_domain.Exploration.create_default_exploration('eid')
exploration.objective = 'Objective'
init_state = exploration.states[exploration.init_state_name]
init_state.update_interaction_id('EndExploration')
init_state.update_interaction_default_outcome(None)
exploration.validate()
exploration.tags = 'this should be a list'
self._assert_validation_error(
exploration, 'Expected \'tags\' to be a list')
exploration.tags = [123]
self._assert_validation_error(exploration, 'to be a string')
exploration.tags = ['abc', 123]
self._assert_validation_error(exploration, 'to be a string')
exploration.tags = ['']
self._assert_validation_error(exploration, 'Tags should be non-empty')
exploration.tags = ['123']
self._assert_validation_error(
exploration, 'should only contain lowercase letters and spaces')
exploration.tags = ['ABC']
self._assert_validation_error(
exploration, 'should only contain lowercase letters and spaces')
exploration.tags = [' a b']
self._assert_validation_error(
exploration, 'Tags should not start or end with whitespace')
exploration.tags = ['a b ']
self._assert_validation_error(
exploration, 'Tags should not start or end with whitespace')
exploration.tags = ['a b']
self._assert_validation_error(
exploration, 'Adjacent whitespace in tags should be collapsed')
exploration.tags = ['abc', 'abc']
self._assert_validation_error(
exploration, 'Some tags duplicate each other')
exploration.tags = ['computer science', 'analysis', 'a b c']
exploration.validate()
def test_title_category_and_objective_validation(self):
"""Test that titles, categories and objectives are validated only in
'strict' mode.
"""
self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='', category='',
objective='', end_state_name='End')
exploration = exp_fetchers.get_exploration_by_id('exp_id')
exploration.validate()
with self.assertRaisesRegexp(
utils.ValidationError, 'title must be specified'
):
exploration.validate(strict=True)
exploration.title = 'A title'
with self.assertRaisesRegexp(
utils.ValidationError, 'category must be specified'
):
exploration.validate(strict=True)
exploration.category = 'A category'
with self.assertRaisesRegexp(
utils.ValidationError, 'objective must be specified'
):
exploration.validate(strict=True)
exploration.objective = 'An objective'
exploration.validate(strict=True)
def test_get_trainable_states_dict(self):
"""Test the get_trainable_states_dict() method."""
exp_id = 'exp_id1'
test_exp_filepath = os.path.join(
feconf.TESTS_DATA_DIR, 'string_classifier_test.yaml')
yaml_content = utils.get_file_contents(test_exp_filepath)
assets_list = []
exp_services.save_new_exploration_from_yaml_and_assets(
feconf.SYSTEM_COMMITTER_ID, yaml_content, exp_id,
assets_list)
exploration_model = exp_models.ExplorationModel.get(
exp_id, strict=False)
old_states = exp_fetchers.get_exploration_from_model(
exploration_model).states
exploration = exp_fetchers.get_exploration_by_id(exp_id)
# Rename a state to add it in unchanged answer group.
exploration.rename_state('Home', 'Renamed state')
change_list = [exp_domain.ExplorationChange({
'cmd': 'rename_state',
'old_state_name': 'Home',
'new_state_name': 'Renamed state'
})]
expected_dict = {
'state_names_with_changed_answer_groups': [],
'state_names_with_unchanged_answer_groups': ['Renamed state']
}
exp_versions_diff = exp_domain.ExplorationVersionsDiff(change_list)
actual_dict = exploration.get_trainable_states_dict(
old_states, exp_versions_diff)
self.assertEqual(actual_dict, expected_dict)
# Modify answer groups to trigger change in answer groups.
state = exploration.states['Renamed state']
exploration.states['Renamed state'].interaction.answer_groups.insert(
3, state.interaction.answer_groups[3])
answer_groups = []
for answer_group in state.interaction.answer_groups:
answer_groups.append(answer_group.to_dict())
change_list = [exp_domain.ExplorationChange({
'cmd': 'edit_state_property',
'state_name': 'Renamed state',
'property_name': 'answer_groups',
'new_value': answer_groups
})]
expected_dict = {
'state_names_with_changed_answer_groups': ['Renamed state'],
'state_names_with_unchanged_answer_groups': []
}
exp_versions_diff = exp_domain.ExplorationVersionsDiff(change_list)
actual_dict = exploration.get_trainable_states_dict(
old_states, exp_versions_diff)
self.assertEqual(actual_dict, expected_dict)
# Add new state to trigger change in answer groups.
exploration.add_states(['New state'])
exploration.states['New state'] = copy.deepcopy(
exploration.states['Renamed state'])
change_list = [exp_domain.ExplorationChange({
'cmd': 'add_state',
'state_name': 'New state',
})]
expected_dict = {
'state_names_with_changed_answer_groups': [
'New state', 'Renamed state'],
'state_names_with_unchanged_answer_groups': []
}
exp_versions_diff = exp_domain.ExplorationVersionsDiff(change_list)
actual_dict = exploration.get_trainable_states_dict(
old_states, exp_versions_diff)
self.assertEqual(actual_dict, expected_dict)
# Delete state.
exploration.delete_state('New state')
change_list = [exp_domain.ExplorationChange({
'cmd': 'delete_state',
'state_name': 'New state'
})]
expected_dict = {
'state_names_with_changed_answer_groups': ['Renamed state'],
'state_names_with_unchanged_answer_groups': []
}
exp_versions_diff = exp_domain.ExplorationVersionsDiff(change_list)
actual_dict = exploration.get_trainable_states_dict(
old_states, exp_versions_diff)
self.assertEqual(actual_dict, expected_dict)
# Test addition and multiple renames.
exploration.add_states(['New state'])
exploration.states['New state'] = copy.deepcopy(
exploration.states['Renamed state'])
exploration.rename_state('New state', 'New state2')
exploration.rename_state('New state2', 'New state3')
change_list = [exp_domain.ExplorationChange({
'cmd': 'add_state',
'state_name': 'New state',
}), exp_domain.ExplorationChange({
'cmd': 'rename_state',
'old_state_name': 'New state',
'new_state_name': 'New state2'
}), exp_domain.ExplorationChange({
'cmd': 'rename_state',
'old_state_name': 'New state2',
'new_state_name': 'New state3'
})]
expected_dict = {
'state_names_with_changed_answer_groups': [
'Renamed state', 'New state3'],
'state_names_with_unchanged_answer_groups': []
}
exp_versions_diff = exp_domain.ExplorationVersionsDiff(change_list)
actual_dict = exploration.get_trainable_states_dict(
old_states, exp_versions_diff)
self.assertEqual(actual_dict, expected_dict)
def test_is_demo_property(self):
"""Test the is_demo property."""
demo = exp_domain.Exploration.create_default_exploration('0')
self.assertEqual(demo.is_demo, True)
notdemo1 = exp_domain.Exploration.create_default_exploration('a')
self.assertEqual(notdemo1.is_demo, False)
notdemo2 = exp_domain.Exploration.create_default_exploration('abcd')
self.assertEqual(notdemo2.is_demo, False)
def test_has_state_name(self):
"""Test for has_state_name."""
demo = exp_domain.Exploration.create_default_exploration('0')
state_names = demo.states.keys()
self.assertEqual(state_names, ['Introduction'])
self.assertEqual(demo.has_state_name('Introduction'), True)
self.assertEqual(demo.has_state_name('Fake state name'), False)
def test_exploration_export_import(self):
"""Test that to_dict and from_dict preserve all data within an
exploration.
"""
demo = exp_domain.Exploration.create_default_exploration('0')
demo_dict = demo.to_dict()
exp_from_dict = exp_domain.Exploration.from_dict(demo_dict)
self.assertEqual(exp_from_dict.to_dict(), demo_dict)
def test_interaction_with_none_id_is_not_terminal(self):
"""Test that an interaction with an id of None leads to is_terminal
being false.
"""
# Default exploration has a default interaction with an ID of None.
demo = exp_domain.Exploration.create_default_exploration('0')
init_state = demo.states[feconf.DEFAULT_INIT_STATE_NAME]
self.assertFalse(init_state.interaction.is_terminal)
def test_cannot_create_demo_exp_with_invalid_param_changes(self):
demo_exp = exp_domain.Exploration.create_default_exploration('0')
demo_dict = demo_exp.to_dict()
new_state = state_domain.State.create_default_state('new_state_name')
new_state.param_changes = [param_domain.ParamChange.from_dict({
'customization_args': {
'list_of_values': ['1', '2'], 'parse_with_jinja': False
},
'name': 'myParam',
'generator_id': 'RandomSelector'
})]
demo_dict['states']['new_state_name'] = new_state.to_dict()
demo_dict['param_specs'] = {
'ParamSpec': {'obj_type': 'UnicodeString'}
}
with self.assertRaisesRegexp(
Exception,
'Parameter myParam was used in a state but not '
'declared in the exploration param_specs.'):
exp_domain.Exploration.from_dict(demo_dict)
def test_validate_exploration_category(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='', category='',
objective='', end_state_name='End')
exploration.validate()
exploration.category = 1
with self.assertRaisesRegexp(
Exception, 'Expected category to be a string, received 1'):
exploration.validate()
def test_validate_exploration_objective(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='', category='',
objective='', end_state_name='End')
exploration.validate()
exploration.objective = 1
with self.assertRaisesRegexp(
Exception, 'Expected objective to be a string, received 1'):
exploration.validate()
def test_validate_exploration_blurb(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='', category='',
objective='', end_state_name='End')
exploration.validate()
exploration.blurb = 1
with self.assertRaisesRegexp(
Exception, 'Expected blurb to be a string, received 1'):
exploration.validate()
def test_validate_exploration_language_code(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='', category='',
objective='', end_state_name='End')
exploration.validate()
exploration.language_code = 1
with self.assertRaisesRegexp(
Exception, 'Expected language_code to be a string, received 1'):
exploration.validate()
def test_validate_exploration_author_notes(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='', category='',
objective='', end_state_name='End')
exploration.validate()
exploration.author_notes = 1
with self.assertRaisesRegexp(
Exception, 'Expected author_notes to be a string, received 1'):
exploration.validate()
def test_validate_exploration_states(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='', category='',
objective='', end_state_name='End')
exploration.validate()
exploration.states = 1
with self.assertRaisesRegexp(
Exception, 'Expected states to be a dict, received 1'):
exploration.validate()
def test_validate_exploration_outcome_dest(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='', category='',
objective='', end_state_name='End')
exploration.validate()
exploration.init_state.interaction.default_outcome.dest = None
with self.assertRaisesRegexp(
Exception, 'Every outcome should have a destination.'):
exploration.validate()
def test_validate_exploration_outcome_dest_type(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='', category='',
objective='', end_state_name='End')
exploration.validate()
exploration.init_state.interaction.default_outcome.dest = 1
with self.assertRaisesRegexp(
Exception, 'Expected outcome dest to be a string, received 1'):
exploration.validate()
def test_validate_exploration_states_schema_version(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='', category='',
objective='', end_state_name='End')
exploration.validate()
exploration.states_schema_version = None
with self.assertRaisesRegexp(
Exception, 'This exploration has no states schema version.'):
exploration.validate()
def test_validate_exploration_auto_tts_enabled(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='', category='',
objective='', end_state_name='End')
exploration.validate()
exploration.auto_tts_enabled = 1
with self.assertRaisesRegexp(
Exception, 'Expected auto_tts_enabled to be a bool, received 1'):
exploration.validate()
def test_validate_exploration_correctness_feedback_enabled(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='', category='',
objective='', end_state_name='End')
exploration.validate()
exploration.correctness_feedback_enabled = 1
with self.assertRaisesRegexp(
Exception,
'Expected correctness_feedback_enabled to be a bool, received 1'):
exploration.validate()
def test_validate_exploration_param_specs(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='', category='',
objective='', end_state_name='End')
exploration.validate()
exploration.param_specs = {
1: param_domain.ParamSpec.from_dict(
{'obj_type': 'UnicodeString'})
}
with self.assertRaisesRegexp(
Exception, 'Expected parameter name to be a string, received 1'):
exploration.validate()
def test_validate_exploration_param_changes_type(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='', category='',
objective='', end_state_name='End')
exploration.validate()
exploration.param_changes = 1
with self.assertRaisesRegexp(
Exception, 'Expected param_changes to be a list, received 1'):
exploration.validate()
def test_validate_exploration_param_name(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='', category='',
objective='', end_state_name='End')
exploration.validate()
exploration.param_changes = [param_domain.ParamChange.from_dict({
'customization_args': {
'list_of_values': ['1', '2'], 'parse_with_jinja': False
},
'name': 'invalid',
'generator_id': 'RandomSelector'
})]
with self.assertRaisesRegexp(
Exception,
'No parameter named \'invalid\' exists in this '
'exploration'):
exploration.validate()
def test_validate_exploration_reserved_param_name(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='', category='',
objective='', end_state_name='End')
exploration.validate()
exploration.param_changes = [param_domain.ParamChange.from_dict({
'customization_args': {
'list_of_values': ['1', '2'], 'parse_with_jinja': False
},
'name': 'all',
'generator_id': 'RandomSelector'
})]
with self.assertRaisesRegexp(
Exception,
'The exploration-level parameter with name \'all\' is '
'reserved. Please choose a different name.'):
exploration.validate()
def test_validate_exploration_is_non_self_loop(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='', category='',
objective='', end_state_name='End')
exploration.validate()
exploration.add_states(['DEF'])
default_outcome_dict = {
'dest': 'DEF',
'feedback': {
'content_id': 'default_outcome',
'html': '<p>Default outcome for state1</p>'
},
'param_changes': [],
'labelled_as_correct': False,
'refresher_exploration_id': 'refresher_exploration_id',
'missing_prerequisite_skill_id': None
}
exploration.init_state.update_interaction_default_outcome(
default_outcome_dict)
with self.assertRaisesRegexp(
Exception,
'The default outcome for state Introduction has a refresher '
'exploration ID, but is not a self-loop.'):
exploration.validate()
def test_validate_exploration_answer_group_parameter(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='', category='',
objective='', end_state_name='End')
exploration.validate()
param_changes = [{
'customization_args': {
'list_of_values': ['1', '2'], 'parse_with_jinja': False
},
'name': 'ParamChange',
'generator_id': 'RandomSelector'
}]
answer_groups = [{
'outcome': {
'dest': exploration.init_state_name,
'feedback': {
'content_id': 'feedback_1',
'html': 'Feedback'
},
'labelled_as_correct': False,
'param_changes': param_changes,
'refresher_exploration_id': None,
'missing_prerequisite_skill_id': None
},
'rule_specs': [{
'inputs': {
'x': 'Test'
},
'rule_type': 'Contains'
}],
'training_data': [],
'tagged_skill_misconception_id': None
}]
exploration.init_state.update_interaction_answer_groups(answer_groups)
with self.assertRaisesRegexp(
Exception,
'The parameter ParamChange was used in an answer group, '
'but it does not exist in this exploration'):
exploration.validate()
def test_verify_all_states_reachable(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'owner_id')
exploration.validate()
exploration.add_states(['End'])
end_state = exploration.states['End']
end_state.update_interaction_id('EndExploration')
end_state.update_interaction_default_outcome(None)
with self.assertRaisesRegexp(
Exception,
'Please fix the following issues before saving this exploration: '
'1. The following states are not reachable from the initial state: '
'End 2. It is impossible to complete the exploration from the '
'following states: Introduction'):
exploration.validate(strict=True)
def test_update_init_state_name_with_invalid_state(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='title', category='category',
objective='objective', end_state_name='End')
exploration.update_init_state_name('End')
self.assertEqual(exploration.init_state_name, 'End')
with self.assertRaisesRegexp(
Exception,
'Invalid new initial state name: invalid_state;'):
exploration.update_init_state_name('invalid_state')
def test_rename_state_with_invalid_state(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='title', category='category',
objective='objective', end_state_name='End')
self.assertTrue(exploration.states.get('End'))
self.assertFalse(exploration.states.get('new state name'))
exploration.rename_state('End', 'new state name')
self.assertFalse(exploration.states.get('End'))
self.assertTrue(exploration.states.get('new state name'))
with self.assertRaisesRegexp(
Exception, 'State invalid_state does not exist'):
exploration.rename_state('invalid_state', 'new state name')
def test_default_outcome_is_labelled_incorrect_for_self_loop(self):
exploration = self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='title', category='category',
objective='objective', end_state_name='End')
exploration.validate(strict=True)
(exploration.init_state.interaction.default_outcome
.labelled_as_correct) = True
(exploration.init_state.interaction.default_outcome
.dest) = exploration.init_state_name
with self.assertRaisesRegexp(
Exception,
'The default outcome for state Introduction is labelled '
'correct but is a self-loop'):
exploration.validate(strict=True)
class ExplorationSummaryTests(test_utils.GenericTestBase):
def setUp(self):
super(ExplorationSummaryTests, self).setUp()
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
exploration = exp_domain.Exploration.create_default_exploration('eid')
exp_services.save_new_exploration(owner_id, exploration)
self.exp_summary = exp_fetchers.get_exploration_summary_by_id('eid')
def test_validation_passes_with_valid_properties(self):
self.exp_summary.validate()
def test_validation_fails_with_invalid_title(self):
self.exp_summary.title = 0
with self.assertRaisesRegexp(
utils.ValidationError,
'Expected title to be a string, received 0'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_category(self):
self.exp_summary.category = 0
with self.assertRaisesRegexp(
utils.ValidationError,
'Expected category to be a string, received 0'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_objective(self):
self.exp_summary.objective = 0
with self.assertRaisesRegexp(
utils.ValidationError,
'Expected objective to be a string, received 0'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_language_code(self):
self.exp_summary.language_code = 0
with self.assertRaisesRegexp(
utils.ValidationError,
'Expected language_code to be a string, received 0'):
self.exp_summary.validate()
def test_validation_fails_with_unallowed_language_code(self):
self.exp_summary.language_code = 'invalid'
with self.assertRaisesRegexp(
utils.ValidationError, 'Invalid language_code: invalid'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_tags(self):
self.exp_summary.tags = 'tags'
with self.assertRaisesRegexp(
utils.ValidationError,
'Expected \'tags\' to be a list, received tags'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_tag_in_tags(self):
self.exp_summary.tags = ['tag', 2]
with self.assertRaisesRegexp(
utils.ValidationError,
'Expected each tag in \'tags\' to be a string, received \'2\''):
self.exp_summary.validate()
def test_validation_fails_with_empty_tag_in_tags(self):
self.exp_summary.tags = ['', 'abc']
with self.assertRaisesRegexp(
utils.ValidationError, 'Tags should be non-empty'):
self.exp_summary.validate()
def test_validation_fails_with_unallowed_characters_in_tag(self):
self.exp_summary.tags = ['123', 'abc']
with self.assertRaisesRegexp(
utils.ValidationError, (
'Tags should only contain lowercase '
'letters and spaces, received \'123\'')):
self.exp_summary.validate()
def test_validation_fails_with_whitespace_in_tag_start(self):
self.exp_summary.tags = [' ab', 'abc']
with self.assertRaisesRegexp(
utils.ValidationError,
'Tags should not start or end with whitespace, received \' ab\''):
self.exp_summary.validate()
def test_validation_fails_with_whitespace_in_tag_end(self):
self.exp_summary.tags = ['ab ', 'abc']
with self.assertRaisesRegexp(
utils.ValidationError,
'Tags should not start or end with whitespace, received \'ab \''):
self.exp_summary.validate()
def test_validation_fails_with_adjacent_whitespace_in_tag(self):
self.exp_summary.tags = ['a b', 'abc']
with self.assertRaisesRegexp(
utils.ValidationError, (
'Adjacent whitespace in tags should '
'be collapsed, received \'a b\'')):
self.exp_summary.validate()
def test_validation_fails_with_duplicate_tags(self):
self.exp_summary.tags = ['abc', 'abc', 'ab']
with self.assertRaisesRegexp(
utils.ValidationError, 'Some tags duplicate each other'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_rating_type(self):
self.exp_summary.ratings = 0
with self.assertRaisesRegexp(
utils.ValidationError, 'Expected ratings to be a dict, received 0'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_rating_keys(self):
self.exp_summary.ratings = {'1': 0, '10': 1}
with self.assertRaisesRegexp(
utils.ValidationError,
'Expected ratings to have keys: 1, 2, 3, 4, 5, received 1, 10'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_value_type_for_ratings(self):
self.exp_summary.ratings = {'1': 0, '2': 'one', '3': 0, '4': 0, '5': 0}
with self.assertRaisesRegexp(
utils.ValidationError, 'Expected value to be int, received one'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_value_for_ratings(self):
self.exp_summary.ratings = {'1': 0, '2': -1, '3': 0, '4': 0, '5': 0}
with self.assertRaisesRegexp(
utils.ValidationError,
'Expected value to be non-negative, received -1'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_scaled_average_rating(self):
self.exp_summary.scaled_average_rating = 'one'
with self.assertRaisesRegexp(
utils.ValidationError,
'Expected scaled_average_rating to be float, received one'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_status(self):
self.exp_summary.status = 0
with self.assertRaisesRegexp(
utils.ValidationError, 'Expected status to be string, received 0'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_community_owned(self):
self.exp_summary.community_owned = '1'
with self.assertRaisesRegexp(
utils.ValidationError,
'Expected community_owned to be bool, received 1'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_contributors_summary(self):
self.exp_summary.contributors_summary = 0
with self.assertRaisesRegexp(
utils.ValidationError,
'Expected contributors_summary to be dict, received 0'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_owner_ids_type(self):
self.exp_summary.owner_ids = 0
with self.assertRaisesRegexp(
utils.ValidationError, 'Expected owner_ids to be list, received 0'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_owner_id_in_owner_ids(self):
self.exp_summary.owner_ids = ['1', 2, '3']
with self.assertRaisesRegexp(
utils.ValidationError,
'Expected each id in owner_ids to be string, received 2'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_editor_ids_type(self):
self.exp_summary.editor_ids = 0
with self.assertRaisesRegexp(
utils.ValidationError,
'Expected editor_ids to be list, received 0'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_editor_id_in_editor_ids(self):
self.exp_summary.editor_ids = ['1', 2, '3']
with self.assertRaisesRegexp(
utils.ValidationError,
'Expected each id in editor_ids to be string, received 2'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_voice_artist_ids_type(self):
self.exp_summary.voice_artist_ids = 0
with self.assertRaisesRegexp(
utils.ValidationError,
'Expected voice_artist_ids to be list, received 0'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_voice_artist_id_in_voice_artists_ids(
self):
self.exp_summary.voice_artist_ids = ['1', 2, '3']
with self.assertRaisesRegexp(
utils.ValidationError,
'Expected each id in voice_artist_ids to be string, received 2'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_viewer_ids_type(self):
self.exp_summary.viewer_ids = 0
with self.assertRaisesRegexp(
utils.ValidationError,
'Expected viewer_ids to be list, received 0'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_viewer_id_in_viewer_ids(self):
self.exp_summary.viewer_ids = ['1', 2, '3']
with self.assertRaisesRegexp(
utils.ValidationError,
'Expected each id in viewer_ids to be string, received 2'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_contributor_ids_type(self):
self.exp_summary.contributor_ids = 0
with self.assertRaisesRegexp(
utils.ValidationError,
'Expected contributor_ids to be list, received 0'):
self.exp_summary.validate()
def test_validation_fails_with_invalid_contributor_id_in_contributor_ids(
self):
self.exp_summary.contributor_ids = ['1', 2, '3']
with self.assertRaisesRegexp(
utils.ValidationError,
'Expected each id in contributor_ids to be string, received 2'):
self.exp_summary.validate()
class YamlCreationUnitTests(test_utils.GenericTestBase):
"""Test creation of explorations from YAML files."""
EXP_ID = 'An exploration_id'
def test_yaml_import_and_export(self):
"""Test the from_yaml() and to_yaml() methods."""
exploration = exp_domain.Exploration.create_default_exploration(
self.EXP_ID, title='Title', category='Category')
exploration.add_states(['New state'])
self.assertEqual(len(exploration.states), 2)
exploration.validate()
yaml_content = exploration.to_yaml()
self.assertEqual(yaml_content, self.SAMPLE_YAML_CONTENT)
exploration2 = exp_domain.Exploration.from_yaml('exp2', yaml_content)
self.assertEqual(len(exploration2.states), 2)
yaml_content_2 = exploration2.to_yaml()
self.assertEqual(yaml_content_2, yaml_content)
# Verify SAMPLE_UNTITLED_YAML_CONTENT can be converted to an exploration
# without error.
exp_domain.Exploration.from_untitled_yaml(
'exp4', 'Title', 'Category', self.SAMPLE_UNTITLED_YAML_CONTENT)
with self.assertRaises(Exception):
exp_domain.Exploration.from_yaml('exp3', 'No_initial_state_name')
with self.assertRaises(Exception):
exp_domain.Exploration.from_yaml(
'exp4', 'Invalid\ninit_state_name:\nMore stuff')
with self.assertRaises(Exception):
exp_domain.Exploration.from_yaml(
'exp4', 'State1:\n(\nInvalid yaml')
with self.assertRaisesRegexp(
Exception, 'Expected a YAML version >= 10, received: 9'
):
exp_domain.Exploration.from_yaml(
'exp4', self.SAMPLE_UNTITLED_YAML_CONTENT)
with self.assertRaisesRegexp(
Exception, 'Expected a YAML version <= 9'
):
exp_domain.Exploration.from_untitled_yaml(
'exp4', 'Title', 'Category', self.SAMPLE_YAML_CONTENT)
class SchemaMigrationMethodsUnitTests(test_utils.GenericTestBase):
"""Tests the presence of appropriate schema migration methods in the
Exploration domain object class.
"""
def test_correct_states_schema_conversion_methods_exist(self):
"""Test that the right states schema conversion methods exist."""
current_states_schema_version = (
feconf.CURRENT_STATE_SCHEMA_VERSION)
for version_num in range(current_states_schema_version):
self.assertTrue(hasattr(
exp_domain.Exploration,
'_convert_states_v%s_dict_to_v%s_dict' % (
version_num, version_num + 1)))
self.assertFalse(hasattr(
exp_domain.Exploration,
'_convert_states_v%s_dict_to_v%s_dict' % (
current_states_schema_version,
current_states_schema_version + 1)))
def test_correct_exploration_schema_conversion_methods_exist(self):
"""Test that the right exploration schema conversion methods exist."""
current_exp_schema_version = (
exp_domain.Exploration.CURRENT_EXP_SCHEMA_VERSION)
for version_num in range(1, current_exp_schema_version):
self.assertTrue(hasattr(
exp_domain.Exploration,
'_convert_v%s_dict_to_v%s_dict' % (
version_num, version_num + 1)))
self.assertFalse(hasattr(
exp_domain.Exploration,
'_convert_v%s_dict_to_v%s_dict' % (
current_exp_schema_version, current_exp_schema_version + 1)))
class SchemaMigrationUnitTests(test_utils.GenericTestBase):
"""Test migration methods for yaml content."""
YAML_CONTENT_V1 = ("""default_skin: conversation_v1
param_changes: []
param_specs: {}
schema_version: 1
states:
- content:
- type: text
value: ''
name: (untitled state)
param_changes: []
widget:
customization_args: {}
handlers:
- name: submit
rule_specs:
- definition:
inputs:
x: InputString
name: Equals
rule_type: atomic
dest: END
feedback:
- Correct!
param_changes: []
- definition:
rule_type: default
dest: (untitled state)
feedback: []
param_changes: []
sticky: false
widget_id: TextInput
- content:
- type: text
value: ''
name: New state
param_changes: []
widget:
customization_args: {}
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
sticky: false
widget_id: TextInput
""")
YAML_CONTENT_V2 = ("""default_skin: conversation_v1
init_state_name: (untitled state)
param_changes: []
param_specs: {}
schema_version: 2
states:
(untitled state):
content:
- type: text
value: ''
param_changes: []
widget:
customization_args: {}
handlers:
- name: submit
rule_specs:
- definition:
inputs:
x: InputString
name: Equals
rule_type: atomic
dest: END
feedback:
- Correct!
param_changes: []
- definition:
rule_type: default
dest: (untitled state)
feedback: []
param_changes: []
sticky: false
widget_id: TextInput
New state:
content:
- type: text
value: ''
param_changes: []
widget:
customization_args: {}
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
sticky: false
widget_id: TextInput
""")
YAML_CONTENT_V3 = ("""author_notes: ''
blurb: ''
default_skin: conversation_v1
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 3
skill_tags: []
states:
(untitled state):
content:
- type: text
value: ''
param_changes: []
widget:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
inputs:
x: InputString
name: Equals
rule_type: atomic
dest: END
feedback:
- Correct!
param_changes: []
- definition:
rule_type: default
dest: (untitled state)
feedback: []
param_changes: []
sticky: false
widget_id: TextInput
New state:
content:
- type: text
value: ''
param_changes: []
widget:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
sticky: false
widget_id: TextInput
""")
YAML_CONTENT_V4 = ("""author_notes: ''
blurb: ''
default_skin: conversation_v1
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 4
skill_tags: []
states:
(untitled state):
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
inputs:
x: InputString
name: Equals
rule_type: atomic
dest: END
feedback:
- Correct!
param_changes: []
- definition:
rule_type: default
dest: (untitled state)
feedback: []
param_changes: []
id: TextInput
param_changes: []
New state:
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
id: TextInput
param_changes: []
""")
YAML_CONTENT_V5 = ("""author_notes: ''
blurb: ''
default_skin: conversation_v1
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 5
skin_customizations:
panels_contents: {}
states:
(untitled state):
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
inputs:
x: InputString
name: Equals
rule_type: atomic
dest: END
feedback:
- Correct!
param_changes: []
- definition:
rule_type: default
dest: (untitled state)
feedback: []
param_changes: []
id: TextInput
param_changes: []
New state:
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
id: TextInput
param_changes: []
widget:
customization_args: {}
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
sticky: false
widget_id: TextInput
END:
content:
- type: text
value: Congratulations, you have finished!
interaction:
customization_args:
recommendedExplorationIds:
value: []
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
id: EndExploration
triggers: []
param_changes: []
tags: []
""")
YAML_CONTENT_V6 = ("""author_notes: ''
blurb: ''
default_skin: conversation_v1
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 6
skin_customizations:
panels_contents: {}
states:
(untitled state):
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
inputs:
x: InputString
name: Equals
rule_type: atomic
dest: END
feedback:
- Correct!
param_changes: []
- definition:
rule_type: default
dest: (untitled state)
feedback: []
param_changes: []
id: TextInput
triggers: []
param_changes: []
END:
content:
- type: text
value: Congratulations, you have finished!
interaction:
customization_args:
recommendedExplorationIds:
value: []
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
id: EndExploration
triggers: []
param_changes: []
New state:
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
id: TextInput
triggers: []
param_changes: []
states_schema_version: 3
tags: []
""")
YAML_CONTENT_V7 = ("""author_notes: ''
blurb: ''
default_skin: conversation_v1
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 7
skin_customizations:
panels_contents: {}
states:
(untitled state):
content:
- type: text
value: ''
interaction:
answer_groups:
- outcome:
dest: END
feedback:
- Correct!
param_changes: []
rule_specs:
- inputs:
x: InputString
rule_type: Equals
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback: []
param_changes: []
id: TextInput
triggers: []
param_changes: []
END:
content:
- type: text
value: Congratulations, you have finished!
interaction:
answer_groups: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
id: EndExploration
triggers: []
param_changes: []
New state:
content:
- type: text
value: ''
interaction:
answer_groups: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback: []
param_changes: []
id: TextInput
triggers: []
param_changes: []
states_schema_version: 4
tags: []
""")
YAML_CONTENT_V8 = ("""author_notes: ''
blurb: ''
default_skin: conversation_v1
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 8
skin_customizations:
panels_contents: {}
states:
(untitled state):
content:
- type: text
value: ''
interaction:
answer_groups:
- outcome:
dest: END
feedback:
- Correct!
param_changes: []
rule_specs:
- inputs:
x: InputString
rule_type: Equals
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback: []
param_changes: []
fallbacks: []
id: TextInput
param_changes: []
END:
content:
- type: text
value: Congratulations, you have finished!
interaction:
answer_groups: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
fallbacks: []
id: EndExploration
param_changes: []
New state:
content:
- type: text
value: ''
interaction:
answer_groups: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback: []
param_changes: []
fallbacks: []
id: TextInput
param_changes: []
states_schema_version: 5
tags: []
""")
YAML_CONTENT_V9 = ("""author_notes: ''
blurb: ''
default_skin: conversation_v1
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 9
skin_customizations:
panels_contents: {}
states:
(untitled state):
content:
- type: text
value: ''
interaction:
answer_groups:
- outcome:
dest: END
feedback:
- Correct!
param_changes: []
rule_specs:
- inputs:
x: InputString
rule_type: Equals
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback: []
param_changes: []
fallbacks: []
id: TextInput
param_changes: []
END:
content:
- type: text
value: Congratulations, you have finished!
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
fallbacks: []
id: EndExploration
param_changes: []
New state:
content:
- type: text
value: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
language:
value: ''
default_outcome:
dest: END
feedback: []
param_changes: []
fallbacks: []
id: CodeRepl
param_changes: []
states_schema_version: 6
tags: []
""")
YAML_CONTENT_V10 = ("""author_notes: ''
blurb: ''
category: Category
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 10
skin_customizations:
panels_contents:
bottom: []
states:
(untitled state):
content:
- type: text
value: ''
interaction:
answer_groups:
- outcome:
dest: END
feedback:
- Correct!
param_changes: []
rule_specs:
- inputs:
x: InputString
rule_type: Equals
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback: []
param_changes: []
fallbacks: []
id: TextInput
param_changes: []
END:
content:
- type: text
value: Congratulations, you have finished!
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
fallbacks: []
id: EndExploration
param_changes: []
New state:
content:
- type: text
value: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback: []
param_changes: []
fallbacks: []
id: TextInput
param_changes: []
states_schema_version: 7
tags: []
title: Title
""")
YAML_CONTENT_V11 = ("""author_notes: ''
blurb: ''
category: Category
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 11
skin_customizations:
panels_contents:
bottom: []
states:
(untitled state):
classifier_model_id: null
content:
- type: text
value: ''
interaction:
answer_groups:
- outcome:
dest: END
feedback:
- Correct!
param_changes: []
rule_specs:
- inputs:
x: InputString
rule_type: Equals
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback: []
param_changes: []
fallbacks: []
id: TextInput
param_changes: []
END:
classifier_model_id: null
content:
- type: text
value: Congratulations, you have finished!
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
fallbacks: []
id: EndExploration
param_changes: []
New state:
classifier_model_id: null
content:
- type: text
value: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback: []
param_changes: []
fallbacks: []
id: TextInput
param_changes: []
states_schema_version: 8
tags: []
title: Title
""")
YAML_CONTENT_V12 = ("""author_notes: ''
blurb: ''
category: Category
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 12
skin_customizations:
panels_contents:
bottom: []
states:
(untitled state):
classifier_model_id: null
content:
- type: text
value: ''
interaction:
answer_groups:
- correct: false
outcome:
dest: END
feedback:
- Correct!
param_changes: []
rule_specs:
- inputs:
x: InputString
rule_type: Equals
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback: []
param_changes: []
fallbacks: []
id: TextInput
param_changes: []
END:
classifier_model_id: null
content:
- type: text
value: Congratulations, you have finished!
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
fallbacks: []
id: EndExploration
param_changes: []
New state:
classifier_model_id: null
content:
- type: text
value: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback: []
param_changes: []
fallbacks:
- outcome:
dest: END
feedback:
- Correct!
id: TextInput
param_changes: []
states_schema_version: 9
tags: []
title: Title
""")
YAML_CONTENT_V13 = ("""author_notes: ''
blurb: ''
category: Category
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 13
skin_customizations:
panels_contents:
bottom: []
states:
(untitled state):
classifier_model_id: null
content:
- type: text
value: ''
interaction:
answer_groups:
- correct: false
outcome:
dest: END
feedback:
- Correct!
param_changes: []
rule_specs:
- inputs:
x: InputString
rule_type: Equals
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback: []
param_changes: []
fallbacks: []
hints: []
id: TextInput
solution: {}
param_changes: []
END:
classifier_model_id: null
content:
- type: text
value: Congratulations, you have finished!
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
fallbacks: []
hints: []
id: EndExploration
solution: {}
param_changes: []
New state:
classifier_model_id: null
content:
- type: text
value: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback: []
param_changes: []
fallbacks: []
hints: []
id: TextInput
solution: {}
param_changes: []
states_schema_version: 10
tags: []
title: Title
""")
YAML_CONTENT_V14 = ("""author_notes: ''
blurb: ''
category: Category
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 14
skin_customizations:
panels_contents:
bottom: []
states:
(untitled state):
classifier_model_id: null
content:
audio_translations: []
html: ''
interaction:
answer_groups:
- correct: false
outcome:
dest: END
feedback:
- Correct!
param_changes: []
rule_specs:
- inputs:
x: InputString
rule_type: Equals
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback: []
param_changes: []
fallbacks: []
hints: []
id: TextInput
solution: {}
param_changes: []
END:
classifier_model_id: null
content:
audio_translations: []
html: Congratulations, you have finished!
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
fallbacks: []
hints: []
id: EndExploration
solution: {}
param_changes: []
New state:
classifier_model_id: null
content:
audio_translations: []
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback: []
param_changes: []
fallbacks: []
hints: []
id: TextInput
solution: {}
param_changes: []
states_schema_version: 11
tags: []
title: Title
""")
YAML_CONTENT_V15 = ("""author_notes: ''
blurb: ''
category: Category
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 15
skin_customizations:
panels_contents:
bottom: []
states:
(untitled state):
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups:
- correct: false
outcome:
dest: END
feedback:
- Correct!
param_changes: []
rule_specs:
- inputs:
x: InputString
rule_type: Equals
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback: []
param_changes: []
fallbacks: []
hints: []
id: TextInput
solution: {}
param_changes: []
END:
classifier_model_id: null
content:
audio_translations: {}
html: Congratulations, you have finished!
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
fallbacks: []
hints: []
id: EndExploration
solution: {}
param_changes: []
New state:
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback: []
param_changes: []
fallbacks: []
hints: []
id: TextInput
solution: {}
param_changes: []
states_schema_version: 12
tags: []
title: Title
""")
YAML_CONTENT_V16 = ("""author_notes: ''
blurb: ''
category: Category
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 16
skin_customizations:
panels_contents:
bottom: []
states:
(untitled state):
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups:
- correct: false
outcome:
dest: END
feedback:
- Correct!
param_changes: []
rule_specs:
- inputs:
x: InputString
rule_type: Equals
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback: []
param_changes: []
hints: []
id: TextInput
solution: null
param_changes: []
END:
classifier_model_id: null
content:
audio_translations: {}
html: Congratulations, you have finished!
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
New state:
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback: []
param_changes: []
hints: []
id: TextInput
solution: null
param_changes: []
states_schema_version: 13
tags: []
title: Title
""")
YAML_CONTENT_V17 = ("""author_notes: ''
blurb: ''
category: Category
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 17
states:
(untitled state):
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups:
- correct: false
outcome:
dest: END
feedback:
- Correct!
param_changes: []
rule_specs:
- inputs:
x: InputString
rule_type: Equals
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback: []
param_changes: []
hints: []
id: TextInput
solution: null
param_changes: []
END:
classifier_model_id: null
content:
audio_translations: {}
html: Congratulations, you have finished!
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
New state:
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback: []
param_changes: []
hints: []
id: TextInput
solution: null
param_changes: []
states_schema_version: 13
tags: []
title: Title
""")
YAML_CONTENT_V18 = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 18
states:
(untitled state):
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups:
- correct: false
outcome:
dest: END
feedback:
- Correct!
param_changes: []
rule_specs:
- inputs:
x: InputString
rule_type: Equals
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback: []
param_changes: []
hints: []
id: TextInput
solution: null
param_changes: []
END:
classifier_model_id: null
content:
audio_translations: {}
html: Congratulations, you have finished!
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
New state:
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback: []
param_changes: []
hints:
- hint_text: ''
id: TextInput
solution:
explanation: ''
answer_is_exclusive: False
correct_answer: Answer
param_changes: []
states_schema_version: 13
tags: []
title: Title
""")
YAML_CONTENT_V19 = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 19
states:
(untitled state):
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups:
- correct: false
outcome:
dest: END
feedback:
audio_translations: {}
html: Correct!
param_changes: []
rule_specs:
- inputs:
x: InputString
rule_type: Equals
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
audio_translations: {}
html: ''
param_changes: []
hints: []
id: TextInput
solution: null
param_changes: []
END:
classifier_model_id: null
content:
audio_translations: {}
html: Congratulations, you have finished!
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
New state:
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
audio_translations: {}
html: ''
param_changes: []
hints: []
id: TextInput
solution: null
param_changes: []
states_schema_version: 14
tags: []
title: Title
""")
YAML_CONTENT_V20 = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 20
states:
(untitled state):
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups:
- labelled_as_correct: false
outcome:
dest: END
feedback:
audio_translations: {}
html: Correct!
param_changes: []
rule_specs:
- inputs:
x: InputString
rule_type: Equals
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
audio_translations: {}
html: ''
param_changes: []
hints: []
id: TextInput
solution: null
param_changes: []
END:
classifier_model_id: null
content:
audio_translations: {}
html: Congratulations, you have finished!
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
New state:
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
audio_translations: {}
html: ''
param_changes: []
hints: []
id: TextInput
solution: null
param_changes: []
states_schema_version: 15
tags: []
title: Title
""")
YAML_CONTENT_V21 = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 21
states:
(untitled state):
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups:
- labelled_as_correct: false
outcome:
dest: END
feedback:
audio_translations: {}
html: Correct!
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
audio_translations: {}
html: ''
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
END:
classifier_model_id: null
content:
audio_translations: {}
html: Congratulations, you have finished!
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
New state:
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
audio_translations: {}
html: ''
param_changes: []
refresher_exploration_id: null
hints: []
id: FractionInput
solution: null
param_changes: []
states_schema_version: 16
tags: []
title: Title
""")
YAML_CONTENT_V22 = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 22
states:
(untitled state):
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups:
- outcome:
dest: END
feedback:
audio_translations: {}
html: Correct!
labelled_as_correct: false
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
audio_translations: {}
html: ''
labelled_as_correct: false
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
END:
classifier_model_id: null
content:
audio_translations: {}
html: Congratulations, you have finished!
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
New state:
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
audio_translations: {}
html: ''
labelled_as_correct: false
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
states_schema_version: 17
tags: []
title: Title
""")
YAML_CONTENT_V23 = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 23
states:
(untitled state):
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups:
- outcome:
dest: END
feedback:
audio_translations: {}
html: Correct!
labelled_as_correct: false
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
audio_translations: {}
html: ''
labelled_as_correct: false
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
END:
classifier_model_id: null
content:
audio_translations: {}
html: Congratulations, you have finished!
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
New state:
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
audio_translations: {}
html: ''
labelled_as_correct: false
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
states_schema_version: 18
tags: []
title: Title
""")
YAML_CONTENT_V24 = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 24
states:
(untitled state):
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups:
- outcome:
dest: END
feedback:
audio_translations: {}
html: Correct!
labelled_as_correct: false
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
training_data: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
audio_translations: {}
html: ''
labelled_as_correct: false
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
END:
classifier_model_id: null
content:
audio_translations: {}
html: Congratulations, you have finished!
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
New state:
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
audio_translations: {}
html: ''
labelled_as_correct: false
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
states_schema_version: 19
tags: []
title: Title
""")
YAML_CONTENT_V25 = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 25
states:
(untitled state):
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups:
- outcome:
dest: END
feedback:
audio_translations: {}
html: Correct!
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
tagged_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
audio_translations: {}
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
END:
classifier_model_id: null
content:
audio_translations: {}
html: Congratulations, you have finished!
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
New state:
classifier_model_id: null
content:
audio_translations: {}
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
audio_translations: {}
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
states_schema_version: 20
tags: []
title: Title
""")
YAML_CONTENT_V26 = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 26
states:
(untitled state):
classifier_model_id: null
content:
content_id: content
html: ''
content_ids_to_audio_translations:
content: {}
default_outcome: {}
feedback_1: {}
interaction:
answer_groups:
- outcome:
dest: END
feedback:
content_id: feedback_1
html: Correct!
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
tagged_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
END:
classifier_model_id: null
content:
content_id: content
html: Congratulations, you have finished!
content_ids_to_audio_translations:
content: {}
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
New state:
classifier_model_id: null
content:
content_id: content
html: ''
content_ids_to_audio_translations:
content: {}
default_outcome: {}
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
states_schema_version: 21
tags: []
title: Title
""")
YAML_CONTENT_V27 = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 27
states:
(untitled state):
classifier_model_id: null
content:
content_id: content
html: ''
content_ids_to_audio_translations:
content: {}
default_outcome: {}
feedback_1: {}
interaction:
answer_groups:
- outcome:
dest: END
feedback:
content_id: feedback_1
html: <p>Correct!</p>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
tagged_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
END:
classifier_model_id: null
content:
content_id: content
html: <p>Congratulations, you have finished!</p>
content_ids_to_audio_translations:
content: {}
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
New state:
classifier_model_id: null
content:
content_id: content
html: ''
content_ids_to_audio_translations:
content: {}
default_outcome: {}
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
states_schema_version: 22
tags: []
title: Title
""")
YAML_CONTENT_V28 = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 28
states:
(untitled state):
classifier_model_id: null
content:
content_id: content
html: ''
content_ids_to_audio_translations:
content: {}
default_outcome: {}
feedback_1: {}
interaction:
answer_groups:
- outcome:
dest: END
feedback:
content_id: feedback_1
html: <p>Correct!</p>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
tagged_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
END:
classifier_model_id: null
content:
content_id: content
html: <p>Congratulations, you have finished!</p>
content_ids_to_audio_translations:
content: {}
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
New state:
classifier_model_id: null
content:
content_id: content
html: ''
content_ids_to_audio_translations:
content: {}
default_outcome: {}
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
states_schema_version: 23
tags: []
title: Title
""")
YAML_CONTENT_V29 = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 29
states:
(untitled state):
classifier_model_id: null
content:
content_id: content
html: ''
content_ids_to_audio_translations:
content: {}
default_outcome: {}
feedback_1: {}
interaction:
answer_groups:
- outcome:
dest: END
feedback:
content_id: feedback_1
html: <p>Correct!</p>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
tagged_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
END:
classifier_model_id: null
content:
content_id: content
html: <p>Congratulations, you have finished!</p>
content_ids_to_audio_translations:
content: {}
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
New state:
classifier_model_id: null
content:
content_id: content
html: ''
content_ids_to_audio_translations:
content: {}
default_outcome: {}
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
imageAndRegions:
value:
imagePath: s1ImagePath.png
default_outcome:
dest: END
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: ImageClickInput
solution: null
param_changes: []
states_schema_version: 24
tags: []
title: Title
""")
YAML_CONTENT_V30 = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 30
states:
(untitled state):
classifier_model_id: null
content:
content_id: content
html: ''
content_ids_to_audio_translations:
content: {}
default_outcome: {}
feedback_1: {}
interaction:
answer_groups:
- outcome:
dest: END
feedback:
content_id: feedback_1
html: <p>Correct!</p>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
tagged_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
END:
classifier_model_id: null
content:
content_id: content
html: <p>Congratulations, you have finished!</p>
content_ids_to_audio_translations:
content: {}
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
New state:
classifier_model_id: null
content:
content_id: content
html: ''
content_ids_to_audio_translations:
content: {}
default_outcome: {}
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
states_schema_version: 25
tags: []
title: Title
""")
YAML_CONTENT_V31 = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 31
states:
(untitled state):
classifier_model_id: null
content:
content_id: content
html: ''
content_ids_to_audio_translations:
content: {}
default_outcome: {}
feedback_1: {}
interaction:
answer_groups:
- outcome:
dest: END
feedback:
content_id: feedback_1
html: <p>Correct!</p>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
tagged_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
END:
classifier_model_id: null
content:
content_id: content
html: <p>Congratulations, you have finished!</p>
content_ids_to_audio_translations:
content: {}
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
New state:
classifier_model_id: null
content:
content_id: content
html: ''
content_ids_to_audio_translations:
content: {}
default_outcome: {}
new_content: {}
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
states_schema_version: 26
tags: []
title: Title
""")
YAML_CONTENT_V32 = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 32
states:
(untitled state):
classifier_model_id: null
content:
content_id: content
html: ''
content_ids_to_audio_translations:
content: {}
default_outcome: {}
feedback_1: {}
interaction:
answer_groups:
- outcome:
dest: END
feedback:
content_id: feedback_1
html: <p>Correct!</p>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
tagged_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
written_translations:
translations_mapping:
content: {}
default_outcome: {}
feedback_1: {}
END:
classifier_model_id: null
content:
content_id: content
html: <p>Congratulations, you have finished!</p>
content_ids_to_audio_translations:
content: {}
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
written_translations:
translations_mapping:
content: {}
New state:
classifier_model_id: null
content:
content_id: content
html: ''
content_ids_to_audio_translations:
content: {}
default_outcome: {}
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
written_translations:
translations_mapping:
content: {}
default_outcome: {}
states_schema_version: 27
tags: []
title: Title
""")
YAML_CONTENT_V33 = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 33
states:
(untitled state):
classifier_model_id: null
content:
content_id: content
html: ''
interaction:
answer_groups:
- outcome:
dest: END
feedback:
content_id: feedback_1
html: <p>Correct!</p>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
tagged_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
feedback_1: {}
written_translations:
translations_mapping:
content: {}
default_outcome: {}
feedback_1: {}
END:
classifier_model_id: null
content:
content_id: content
html: <p>Congratulations, you have finished!</p>
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
written_translations:
translations_mapping:
content: {}
New state:
classifier_model_id: null
content:
content_id: content
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
written_translations:
translations_mapping:
content: {}
default_outcome: {}
states_schema_version: 28
tags: []
title: Title
""")
YAML_CONTENT_V34 = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 34
states:
(untitled state):
classifier_model_id: null
content:
content_id: content
html: ''
interaction:
answer_groups:
- outcome:
dest: END
feedback:
content_id: feedback_1
html: <p>Correct!</p>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
tagged_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
feedback_1: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
default_outcome: {}
feedback_1: {}
END:
classifier_model_id: null
content:
content_id: content
html: <p>Congratulations, you have finished!</p>
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
New state:
classifier_model_id: null
content:
content_id: content
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
default_outcome: {}
states_schema_version: 29
tags: []
title: Title
""")
YAML_CONTENT_V35 = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 35
states:
(untitled state):
classifier_model_id: null
content:
content_id: content
html: ''
interaction:
answer_groups:
- outcome:
dest: END
feedback:
content_id: feedback_1
html: <p>Correct!</p>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
tagged_skill_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
feedback_1: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
default_outcome: {}
feedback_1: {}
END:
classifier_model_id: null
content:
content_id: content
html: <p>Congratulations, you have finished!</p>
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
New state:
classifier_model_id: null
content:
content_id: content
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
default_outcome: {}
states_schema_version: 30
tags: []
title: Title
""")
_LATEST_YAML_CONTENT = YAML_CONTENT_V35
def test_load_from_v1(self):
"""Test direct loading from a v1 yaml file."""
exploration = exp_domain.Exploration.from_untitled_yaml(
'eid', 'Title', 'Category', self.YAML_CONTENT_V1)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v2(self):
"""Test direct loading from a v2 yaml file."""
exploration = exp_domain.Exploration.from_untitled_yaml(
'eid', 'Title', 'Category', self.YAML_CONTENT_V2)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v3(self):
"""Test direct loading from a v3 yaml file."""
exploration = exp_domain.Exploration.from_untitled_yaml(
'eid', 'Title', 'Category', self.YAML_CONTENT_V3)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v4(self):
"""Test direct loading from a v4 yaml file."""
exploration = exp_domain.Exploration.from_untitled_yaml(
'eid', 'Title', 'Category', self.YAML_CONTENT_V4)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v5(self):
"""Test direct loading from a v5 yaml file."""
exploration = exp_domain.Exploration.from_untitled_yaml(
'eid', 'Title', 'Category', self.YAML_CONTENT_V5)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v6(self):
"""Test direct loading from a v6 yaml file."""
exploration = exp_domain.Exploration.from_untitled_yaml(
'eid', 'Title', 'Category', self.YAML_CONTENT_V6)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_cannot_load_from_v6_with_invalid_handler_name(self):
invalid_yaml_content_v6 = ("""author_notes: ''
blurb: ''
default_skin: conversation_v1
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 6
skin_customizations:
panels_contents: {}
states:
(untitled state):
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
inputs:
x: InputString
name: Equals
rule_type: atomic
dest: END
feedback:
- Correct!
param_changes: []
- definition:
rule_type: default
dest: (untitled state)
feedback: []
param_changes: []
id: TextInput
triggers: []
param_changes: []
END:
content:
- type: text
value: Congratulations, you have finished!
interaction:
customization_args:
recommendedExplorationIds:
value: []
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
id: EndExploration
triggers: []
param_changes: []
New state:
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: invalid_handler_name
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
id: TextInput
triggers: []
param_changes: []
states_schema_version: 3
tags: []
""")
with self.assertRaisesRegexp(
Exception,
'Error: Can only convert rules with a name '
'\'submit\' in states v3 to v4 conversion process. '):
exp_domain.Exploration.from_untitled_yaml(
'eid', 'Title', 'Category', invalid_yaml_content_v6)
def test_cannot_load_from_v6_with_invalid_rule(self):
invalid_yaml_content_v6 = ("""author_notes: ''
blurb: ''
default_skin: conversation_v1
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 6
skin_customizations:
panels_contents: {}
states:
(untitled state):
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
inputs:
x: InputString
name: Equals
rule_type: invalid_rule
dest: END
feedback:
- Correct!
param_changes: []
- definition:
rule_type: default
dest: (untitled state)
feedback: []
param_changes: []
id: TextInput
triggers: []
param_changes: []
END:
content:
- type: text
value: Congratulations, you have finished!
interaction:
customization_args:
recommendedExplorationIds:
value: []
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
id: EndExploration
triggers: []
param_changes: []
New state:
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
id: TextInput
triggers: []
param_changes: []
states_schema_version: 3
tags: []
""")
with self.assertRaisesRegexp(
Exception,
'Error: Can only convert default and atomic '
'rules in states v3 to v4 conversion process.'):
exp_domain.Exploration.from_untitled_yaml(
'eid', 'Title', 'Category', invalid_yaml_content_v6)
def test_cannot_load_from_v6_with_invalid_subject(self):
invalid_yaml_content_v6 = ("""author_notes: ''
blurb: ''
default_skin: conversation_v1
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 6
skin_customizations:
panels_contents: {}
states:
(untitled state):
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
inputs:
x: InputString
name: Equals
rule_type: atomic
dest: END
feedback:
- Correct!
param_changes: []
- definition:
rule_type: default
dest: (untitled state)
feedback: []
param_changes: []
id: TextInput
triggers: []
param_changes: []
END:
content:
- type: text
value: Congratulations, you have finished!
interaction:
customization_args:
recommendedExplorationIds:
value: []
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
id: EndExploration
triggers: []
param_changes: []
New state:
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
subject: invalid_subject
dest: END
feedback: []
param_changes: []
id: TextInput
triggers: []
param_changes: []
states_schema_version: 3
tags: []
""")
with self.assertRaisesRegexp(
Exception,
'Error: Can only convert rules with an \'answer\' '
'subject in states v3 to v4 conversion process.'):
exp_domain.Exploration.from_untitled_yaml(
'eid', 'Title', 'Category', invalid_yaml_content_v6)
def test_cannot_load_from_v6_with_invalid_interaction_id(self):
invalid_yaml_content_v6 = ("""author_notes: ''
blurb: ''
default_skin: conversation_v1
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 6
skin_customizations:
panels_contents: {}
states:
(untitled state):
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
inputs:
x: InputString
name: Equals
rule_type: atomic
dest: END
feedback:
- Correct!
param_changes: []
- definition:
rule_type: default
dest: (untitled state)
feedback: []
param_changes: []
id: TextInput
triggers: []
param_changes: []
END:
content:
- type: text
value: Congratulations, you have finished!
interaction:
customization_args:
recommendedExplorationIds:
value: []
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
id: EndExploration
triggers: []
param_changes: []
New state:
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
id: invalid_id
triggers: []
param_changes: []
states_schema_version: 3
tags: []
""")
with self.assertRaisesRegexp(
Exception,
'Trying to migrate exploration containing non-existent '
'interaction ID'):
exp_domain.Exploration.from_untitled_yaml(
'eid', 'Title', 'Category', invalid_yaml_content_v6)
def test_load_from_v7(self):
"""Test direct loading from a v7 yaml file."""
exploration = exp_domain.Exploration.from_untitled_yaml(
'eid', 'Title', 'Category', self.YAML_CONTENT_V7)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v8(self):
"""Test direct loading from a v8 yaml file."""
exploration = exp_domain.Exploration.from_untitled_yaml(
'eid', 'Title', 'Category', self.YAML_CONTENT_V8)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v9(self):
"""Test direct loading from a v9 yaml file."""
latest_yaml_content = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 35
states:
(untitled state):
classifier_model_id: null
content:
content_id: content
html: ''
interaction:
answer_groups:
- outcome:
dest: END
feedback:
content_id: feedback_1
html: <p>Correct!</p>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
tagged_skill_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
feedback_1: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
default_outcome: {}
feedback_1: {}
END:
classifier_model_id: null
content:
content_id: content
html: <p>Congratulations, you have finished!</p>
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
New state:
classifier_model_id: null
content:
content_id: content
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
language:
value: python
placeholder:
value: ''
postCode:
value: ''
preCode:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: CodeRepl
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
default_outcome: {}
states_schema_version: 30
tags: []
title: Title
""")
exploration = exp_domain.Exploration.from_untitled_yaml(
'eid', 'Title', 'Category', self.YAML_CONTENT_V9)
self.assertEqual(exploration.to_yaml(), latest_yaml_content)
def test_load_from_v10(self):
"""Test direct loading from a v10 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V10)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v11(self):
"""Test direct loading from a v11 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V11)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v12(self):
"""Test direct loading from a v12 yaml file."""
latest_yaml_content = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 35
states:
(untitled state):
classifier_model_id: null
content:
content_id: content
html: ''
interaction:
answer_groups:
- outcome:
dest: END
feedback:
content_id: feedback_1
html: <p>Correct!</p>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
tagged_skill_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
feedback_1: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
default_outcome: {}
feedback_1: {}
END:
classifier_model_id: null
content:
content_id: content
html: <p>Congratulations, you have finished!</p>
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
New state:
classifier_model_id: null
content:
content_id: content
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints:
- hint_content:
content_id: hint_1
html: <p>Correct!</p>
id: TextInput
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
hint_1: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
default_outcome: {}
hint_1: {}
states_schema_version: 30
tags: []
title: Title
""")
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V12)
self.assertEqual(exploration.to_yaml(), latest_yaml_content)
def test_load_from_v13(self):
"""Test direct loading from a v13 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V13)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v14(self):
"""Test direct loading from a v14 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V14)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v15(self):
"""Test direct loading from a v15 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V15)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v16(self):
"""Test direct loading from a v16 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V16)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v17(self):
"""Test direct loading from a v17 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V17)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v18(self):
"""Test direct loading from a v18 yaml file."""
latest_yaml_content = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 35
states:
(untitled state):
classifier_model_id: null
content:
content_id: content
html: ''
interaction:
answer_groups:
- outcome:
dest: END
feedback:
content_id: feedback_1
html: <p>Correct!</p>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
tagged_skill_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
feedback_1: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
default_outcome: {}
feedback_1: {}
END:
classifier_model_id: null
content:
content_id: content
html: <p>Congratulations, you have finished!</p>
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
New state:
classifier_model_id: null
content:
content_id: content
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints:
- hint_content:
content_id: hint_1
html: ''
id: TextInput
solution:
answer_is_exclusive: false
correct_answer: Answer
explanation:
content_id: solution
html: ''
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
hint_1: {}
solution: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
default_outcome: {}
hint_1: {}
solution: {}
states_schema_version: 30
tags: []
title: Title
""")
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V18)
self.assertEqual(exploration.to_yaml(), latest_yaml_content)
def test_load_from_v19(self):
"""Test direct loading from a v19 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V19)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v20(self):
"""Test direct loading from a v20 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V20)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v21(self):
"""Test direct loading from a v21 yaml file."""
latest_yaml_content = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 35
states:
(untitled state):
classifier_model_id: null
content:
content_id: content
html: ''
interaction:
answer_groups:
- outcome:
dest: END
feedback:
content_id: feedback_1
html: <p>Correct!</p>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
tagged_skill_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
feedback_1: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
default_outcome: {}
feedback_1: {}
END:
classifier_model_id: null
content:
content_id: content
html: <p>Congratulations, you have finished!</p>
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
New state:
classifier_model_id: null
content:
content_id: content
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
allowImproperFraction:
value: true
allowNonzeroIntegerPart:
value: true
customPlaceholder:
value: ''
placeholder:
value: ''
requireSimplestForm:
value: false
rows:
value: 1
default_outcome:
dest: END
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: FractionInput
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
default_outcome: {}
states_schema_version: 30
tags: []
title: Title
""")
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V21)
self.assertEqual(exploration.to_yaml(), latest_yaml_content)
def test_load_from_v22(self):
"""Test direct loading from a v22 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V22)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v23(self):
"""Test direct loading from a v23 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V23)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v24(self):
"""Test direct loading from a v24 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V24)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v25(self):
"""Test direct loading from a v25 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V25)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v26(self):
"""Test direct loading from a v26 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V26)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v27(self):
"""Test direct loading from a v27 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V27)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v28(self):
"""Test direct loading from a v28 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V28)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v29(self):
"""Test direct loading from a v29 yaml file."""
latest_yaml_content = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 35
states:
(untitled state):
classifier_model_id: null
content:
content_id: content
html: ''
interaction:
answer_groups:
- outcome:
dest: END
feedback:
content_id: feedback_1
html: <p>Correct!</p>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
tagged_skill_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
feedback_1: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
default_outcome: {}
feedback_1: {}
END:
classifier_model_id: null
content:
content_id: content
html: <p>Congratulations, you have finished!</p>
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
New state:
classifier_model_id: null
content:
content_id: content
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
highlightRegionsOnHover:
value: false
imageAndRegions:
value:
imagePath: s1ImagePath_height_120_width_120.png
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: ImageClickInput
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
default_outcome: {}
states_schema_version: 30
tags: []
title: Title
""")
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V29)
self.assertEqual(exploration.to_yaml(), latest_yaml_content)
def test_load_from_v30(self):
"""Test direct loading from a v30 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V30)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v31(self):
"""Test direct loading from a v31 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V31)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v32(self):
"""Test direct loading from a v32 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V32)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v33(self):
"""Test direct loading from a v33 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V33)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_cannot_load_from_yaml_with_no_schema_version(self):
sample_yaml_content = ("""author_notes: ''
blurb: ''
default_skin: conversation_v1
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
skin_customizations:
panels_contents: {}
states:
(untitled state):
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
inputs:
x: InputString
name: Equals
rule_type: atomic
dest: END
feedback:
- Correct!
param_changes: []
- definition:
rule_type: default
dest: (untitled state)
feedback: []
param_changes: []
id: TextInput
param_changes: []
New state:
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
id: TextInput
param_changes: []
widget:
customization_args: {}
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
sticky: false
widget_id: TextInput
END:
content:
- type: text
value: Congratulations, you have finished!
interaction:
customization_args:
recommendedExplorationIds:
value: []
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
id: EndExploration
triggers: []
param_changes: []
tags: []
""")
with self.assertRaisesRegexp(
Exception, 'Invalid YAML file: no schema version specified.'):
exp_domain.Exploration.from_untitled_yaml(
'eid', 'Title', 'Category', sample_yaml_content)
def test_cannot_load_from_yaml_with_invalid_schema_version(self):
sample_yaml_content = ("""author_notes: ''
blurb: ''
default_skin: conversation_v1
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 0
skin_customizations:
panels_contents: {}
states:
(untitled state):
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
inputs:
x: InputString
name: Equals
rule_type: atomic
dest: END
feedback:
- Correct!
param_changes: []
- definition:
rule_type: default
dest: (untitled state)
feedback: []
param_changes: []
id: TextInput
param_changes: []
New state:
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
id: TextInput
param_changes: []
widget:
customization_args: {}
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
sticky: false
widget_id: TextInput
END:
content:
- type: text
value: Congratulations, you have finished!
interaction:
customization_args:
recommendedExplorationIds:
value: []
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
id: EndExploration
triggers: []
param_changes: []
tags: []
""")
with self.assertRaisesRegexp(
Exception,
'Sorry, we can only process v1 to v%s exploration YAML files '
'at present.' % exp_domain.Exploration.CURRENT_EXP_SCHEMA_VERSION):
exp_domain.Exploration.from_untitled_yaml(
'eid', 'Title', 'Category', sample_yaml_content)
class HTMLMigrationUnitTests(test_utils.GenericTestBase):
"""Test HTML migration."""
YAML_CONTENT_V26_TEXTANGULAR = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: category
correctness_feedback_enabled: false
init_state_name: Introduction
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 26
states:
Introduction:
classifier_model_id: null
content:
content_id: content
html: ''
content_ids_to_audio_translations:
content: {}
default_outcome: {}
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args: {}
default_outcome:
dest: Introduction
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: null
solution: null
param_changes: []
state1:
classifier_model_id: null
content:
content_id: content
html: <blockquote><p>Hello, this is state1</p></blockquote>
content_ids_to_audio_translations:
content: {}
default_outcome: {}
solution: {}
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: state2
feedback:
content_id: default_outcome
html: Default <p>outcome</p> for state1
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution:
answer_is_exclusive: true
correct_answer: Answer1
explanation:
content_id: solution
html: This is <i>solution</i> for state1
param_changes: []
state2:
classifier_model_id: null
content:
content_id: content
html: <p>Hello, </p>this <i>is </i>state2
content_ids_to_audio_translations:
content: {}
default_outcome: {}
feedback_1: {}
feedback_2: {}
hint_1: {}
hint_2: {}
interaction:
answer_groups:
- outcome:
dest: state1
feedback:
content_id: feedback_1
html: <div>Outcome1 for state2</div>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: 0
rule_type: Equals
- inputs:
x: 1
rule_type: Equals
tagged_misconception_id: null
training_data: []
- outcome:
dest: state3
feedback:
content_id: feedback_2
html: <pre>Outcome2 <br>for state2</pre>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: 0
rule_type: Equals
tagged_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
choices:
value:
- <p>This is </p>value1 <br>for MultipleChoice
- This is value2<span> for <br>MultipleChoice</span>
default_outcome:
dest: state2
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints:
- hint_content:
content_id: hint_1
html: <p>Hello, this is<div> html1<b> for </b></div>state2</p>
- hint_content:
content_id: hint_2
html: Here is link 2 <oppia-noninteractive-link
text-with-value="&quot;discussion forum&quot;"
url-with-value="&quot;https://groups.google.com/
forum/?fromgroups#!forum/oppia&quot;">
</oppia-noninteractive-link>
id: MultipleChoiceInput
solution: null
param_changes: []
state3:
classifier_model_id: null
content:
content_id: content
html: <p>Hello, this is state3</p>
content_ids_to_audio_translations:
content: {}
default_outcome: {}
feedback_1: {}
interaction:
answer_groups:
- outcome:
dest: state1
feedback:
content_id: feedback_1
html: Here is the image1 <i><oppia-noninteractive-image
caption-with-value="&quot;&quot;"
filepath-with-value="&quot;startBlue.png&quot;"
alt-with-value="&quot;&quot;">
</oppia-noninteractive-image></i>Here is the image2
<div><oppia-noninteractive-image caption-with-value="&quot;&quot;"
filepath-with-value="&quot;startBlue.png&quot;"
alt-with-value="&quot;&quot;">
</oppia-noninteractive-image></div>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x:
- This <span>is value1 for </span>ItemSelectionInput
rule_type: Equals
- inputs:
x:
- This is value3 for ItemSelectionInput
rule_type: Equals
tagged_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
choices:
value:
- This <span>is value1 for </span>ItemSelection
- This <code>is value2</code> for ItemSelection
- This is value3 for ItemSelection
maxAllowableSelectionCount:
value: 1
minAllowableSelectionCount:
value: 1
default_outcome:
dest: state3
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: ItemSelectionInput
solution: null
param_changes: []
states_schema_version: 21
tags: []
title: title
""")
# pylint: disable=line-too-long
YAML_CONTENT_V35_IMAGE_DIMENSIONS = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: category
correctness_feedback_enabled: false
init_state_name: Introduction
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 35
states:
Introduction:
classifier_model_id: null
content:
content_id: content
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args: {}
default_outcome:
dest: Introduction
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: null
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
default_outcome: {}
state1:
classifier_model_id: null
content:
content_id: content
html: <blockquote><p>Hello, this is state1</p></blockquote>
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: state2
feedback:
content_id: default_outcome
html: <p>Default </p><p>outcome</p><p> for state1</p>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution:
answer_is_exclusive: true
correct_answer: Answer1
explanation:
content_id: solution
html: <p>This is <em>solution</em> for state1</p>
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
solution: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
default_outcome: {}
solution: {}
state2:
classifier_model_id: null
content:
content_id: content
html: <p>Hello, </p><p>this <em>is </em>state2</p>
interaction:
answer_groups:
- outcome:
dest: state1
feedback:
content_id: feedback_1
html: <p>Outcome1 for state2</p>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: 0
rule_type: Equals
- inputs:
x: 1
rule_type: Equals
tagged_skill_misconception_id: null
training_data: []
- outcome:
dest: state3
feedback:
content_id: feedback_2
html: "<pre>Outcome2 \\nfor state2</pre>"
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: 0
rule_type: Equals
tagged_skill_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
choices:
value:
- <p>This is </p><p>value1 <br>for MultipleChoice</p>
- <p>This is value2 for <br>MultipleChoice</p>
default_outcome:
dest: state2
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints:
- hint_content:
content_id: hint_1
html: <p>Hello, this is</p><p> html1<strong> for </strong></p><p>state2</p>
- hint_content:
content_id: hint_2
html: <p>Here is link 2 <oppia-noninteractive-link text-with-value="&quot;discussion
forum&quot;" url-with-value="&quot;https://groups.google.com/
forum/?fromgroups#!forum/oppia&quot;"> </oppia-noninteractive-link></p>
id: MultipleChoiceInput
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
feedback_1: {}
feedback_2: {}
hint_1: {}
hint_2: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
default_outcome: {}
feedback_1: {}
feedback_2: {}
hint_1: {}
hint_2: {}
state3:
classifier_model_id: null
content:
content_id: content
html: <p>Hello, this is state3</p>
interaction:
answer_groups:
- outcome:
dest: state1
feedback:
content_id: feedback_1
html: <p>Here is the image1 </p><oppia-noninteractive-image alt-with-value="&quot;&quot;"
caption-with-value="&quot;&quot;" filepath-with-value="&quot;startBlue_height_490_width_120.png&quot;">
</oppia-noninteractive-image><p>Here is the image2 </p><oppia-noninteractive-image
alt-with-value="&quot;&quot;" caption-with-value="&quot;&quot;"
filepath-with-value="&quot;startBlue_height_490_width_120.png&quot;">
</oppia-noninteractive-image>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x:
- <p>This is value1 for ItemSelectionInput</p>
rule_type: Equals
- inputs:
x:
- <p>This is value3 for ItemSelectionInput</p>
rule_type: Equals
tagged_skill_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
choices:
value:
- <p>This is value1 for ItemSelection</p>
- <p>This is value2 for ItemSelection</p>
- <p>This is value3 for ItemSelection</p>
maxAllowableSelectionCount:
value: 1
minAllowableSelectionCount:
value: 1
default_outcome:
dest: state3
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: ItemSelectionInput
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
feedback_1: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
default_outcome: {}
feedback_1: {}
states_schema_version: 30
tags: []
title: title
""")
YAML_CONTENT_V27_WITHOUT_IMAGE_CAPTION = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 27
states:
(untitled state):
classifier_model_id: null
content:
content_id: content
html: <p><oppia-noninteractive-image filepath-with-value="&quot;random.png&quot;"></oppia-noninteractive-image>Hello this
is test case to check image tag inside p tag</p>
content_ids_to_audio_translations:
content: {}
default_outcome: {}
feedback_1: {}
interaction:
answer_groups:
- outcome:
dest: END
feedback:
content_id: feedback_1
html: <p>Correct!</p>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
tagged_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
END:
classifier_model_id: null
content:
content_id: content
html: <p>Congratulations, you have finished!</p>
content_ids_to_audio_translations:
content: {}
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
New state:
classifier_model_id: null
content:
content_id: content
html: ''
content_ids_to_audio_translations:
content: {}
default_outcome: {}
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
states_schema_version: 22
tags: []
title: Title
""")
YAML_CONTENT_V35_WITH_IMAGE_CAPTION = ("""author_notes: ''
auto_tts_enabled: true
blurb: ''
category: Category
correctness_feedback_enabled: false
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 35
states:
(untitled state):
classifier_model_id: null
content:
content_id: content
html: <oppia-noninteractive-image caption-with-value="&quot;&quot;"
filepath-with-value="&quot;random_height_490_width_120.png&quot;"></oppia-noninteractive-image><p>Hello
this is test case to check image tag inside p tag</p>
interaction:
answer_groups:
- outcome:
dest: END
feedback:
content_id: feedback_1
html: <p>Correct!</p>
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
rule_specs:
- inputs:
x: InputString
rule_type: Equals
tagged_skill_misconception_id: null
training_data: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: (untitled state)
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
feedback_1: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
default_outcome: {}
feedback_1: {}
END:
classifier_model_id: null
content:
content_id: content
html: <p>Congratulations, you have finished!</p>
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
recommendedExplorationIds:
value: []
default_outcome: null
hints: []
id: EndExploration
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
New state:
classifier_model_id: null
content:
content_id: content
html: ''
interaction:
answer_groups: []
confirmed_unclassified_answers: []
customization_args:
placeholder:
value: ''
rows:
value: 1
default_outcome:
dest: END
feedback:
content_id: default_outcome
html: ''
labelled_as_correct: false
missing_prerequisite_skill_id: null
param_changes: []
refresher_exploration_id: null
hints: []
id: TextInput
solution: null
param_changes: []
recorded_voiceovers:
voiceovers_mapping:
content: {}
default_outcome: {}
solicit_answer_details: false
written_translations:
translations_mapping:
content: {}
default_outcome: {}
states_schema_version: 30
tags: []
title: Title
""")
# pylint: enable=line-too-long
def test_load_from_v26_textangular(self):
"""Test direct loading from a v26 yaml file."""
mock_get_filename_with_dimensions_context = self.swap(
html_validation_service, 'get_filename_with_dimensions',
mock_get_filename_with_dimensions)
with mock_get_filename_with_dimensions_context:
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V26_TEXTANGULAR)
self.assertEqual(
exploration.to_yaml(), self.YAML_CONTENT_V35_IMAGE_DIMENSIONS)
def test_load_from_v27_without_image_caption(self):
"""Test direct loading from a v27 yaml file."""
mock_get_filename_with_dimensions_context = self.swap(
html_validation_service, 'get_filename_with_dimensions',
mock_get_filename_with_dimensions)
with mock_get_filename_with_dimensions_context:
exploration = exp_domain.Exploration.from_yaml(
'eid', self.YAML_CONTENT_V27_WITHOUT_IMAGE_CAPTION)
self.assertEqual(
exploration.to_yaml(), self.YAML_CONTENT_V35_WITH_IMAGE_CAPTION)
class ConversionUnitTests(test_utils.GenericTestBase):
"""Test conversion methods."""
def test_convert_exploration_to_player_dict(self):
exp_title = 'Title'
second_state_name = 'first state'
exploration = exp_domain.Exploration.create_default_exploration(
'eid', title=exp_title, category='Category')
exploration.add_states([second_state_name])
def _get_default_state_dict(content_str, dest_name):
"""Gets the default state dict of the exploration."""
return {
'classifier_model_id': None,
'content': {
'content_id': 'content',
'html': content_str,
},
'recorded_voiceovers': {
'voiceovers_mapping': {
'content': {},
'default_outcome': {}
}
},
'solicit_answer_details': False,
'written_translations': {
'translations_mapping': {
'content': {},
'default_outcome': {}
}
},
'interaction': {
'answer_groups': [],
'confirmed_unclassified_answers': [],
'customization_args': {},
'default_outcome': {
'dest': dest_name,
'feedback': {
'content_id': feconf.DEFAULT_OUTCOME_CONTENT_ID,
'html': ''
},
'labelled_as_correct': False,
'param_changes': [],
'refresher_exploration_id': None,
'missing_prerequisite_skill_id': None
},
'hints': [],
'id': None,
'solution': None,
},
'param_changes': [],
}
self.assertEqual(exploration.to_player_dict(), {
'init_state_name': feconf.DEFAULT_INIT_STATE_NAME,
'title': exp_title,
'objective': feconf.DEFAULT_EXPLORATION_OBJECTIVE,
'states': {
feconf.DEFAULT_INIT_STATE_NAME: _get_default_state_dict(
feconf.DEFAULT_INIT_STATE_CONTENT_STR,
feconf.DEFAULT_INIT_STATE_NAME),
second_state_name: _get_default_state_dict(
'', second_state_name),
},
'param_changes': [],
'param_specs': {},
'language_code': 'en',
'correctness_feedback_enabled': False,
})
class StateOperationsUnitTests(test_utils.GenericTestBase):
"""Test methods operating on states."""
def test_delete_state(self):
"""Test deletion of states."""
exploration = exp_domain.Exploration.create_default_exploration('eid')
exploration.add_states(['first state'])
with self.assertRaisesRegexp(
ValueError, 'Cannot delete initial state'
):
exploration.delete_state(exploration.init_state_name)
exploration.add_states(['second state'])
exploration.delete_state('second state')
with self.assertRaisesRegexp(ValueError, 'fake state does not exist'):
exploration.delete_state('fake state')
class HtmlCollectionTests(test_utils.GenericTestBase):
"""Test method to obtain all html strings."""
def test_all_html_strings_are_collected(self):
exploration = exp_domain.Exploration.create_default_exploration(
'eid', title='title', category='category')
exploration.add_states(['state1', 'state2', 'state3', 'state4'])
state1 = exploration.states['state1']
state2 = exploration.states['state2']
state3 = exploration.states['state3']
state4 = exploration.states['state4']
content1_dict = {
'content_id': 'content',
'html': '<blockquote>Hello, this is state1</blockquote>'
}
content2_dict = {
'content_id': 'content',
'html': '<pre>Hello, this is state2</pre>'
}
content3_dict = {
'content_id': 'content',
'html': '<p>Hello, this is state3</p>'
}
content4_dict = {
'content_id': 'content',
'html': '<p>Hello, this is state4</p>'
}
state1.update_content(
state_domain.SubtitledHtml.from_dict(content1_dict))
state2.update_content(
state_domain.SubtitledHtml.from_dict(content2_dict))
state3.update_content(
state_domain.SubtitledHtml.from_dict(content3_dict))
state4.update_content(
state_domain.SubtitledHtml.from_dict(content4_dict))
state1.update_interaction_id('TextInput')
state2.update_interaction_id('MultipleChoiceInput')
state3.update_interaction_id('ItemSelectionInput')
state4.update_interaction_id('DragAndDropSortInput')
customization_args_dict1 = {
'placeholder': {'value': ''},
'rows': {'value': 1}
}
customization_args_dict2 = {
'choices': {'value': [
'<p>This is value1 for MultipleChoice</p>',
'<p>This is value2 for MultipleChoice</p>'
]}
}
customization_args_dict3 = {
'choices': {'value': [
'<p>This is value1 for ItemSelection</p>',
'<p>This is value2 for ItemSelection</p>',
'<p>This is value3 for ItemSelection</p>'
]}
}
customization_args_dict4 = {
'choices': {'value': [
'<p>This is value1 for DragAndDropSortInput</p>',
'<p>This is value2 for DragAndDropSortInput</p>',
]}
}
state1.update_interaction_customization_args(customization_args_dict1)
state2.update_interaction_customization_args(customization_args_dict2)
state3.update_interaction_customization_args(customization_args_dict3)
state4.update_interaction_customization_args(customization_args_dict4)
default_outcome_dict1 = {
'dest': 'state2',
'feedback': {
'content_id': 'default_outcome',
'html': '<p>Default outcome for state1</p>'
},
'param_changes': [],
'labelled_as_correct': False,
'refresher_exploration_id': None,
'missing_prerequisite_skill_id': None
}
state1.update_interaction_default_outcome(default_outcome_dict1)
hint_list2 = [{
'hint_content': {
'content_id': 'hint_1',
'html': '<p>Hello, this is html1 for state2</p>'
}
}, {
'hint_content': {
'content_id': 'hint_2',
'html': '<p>Hello, this is html2 for state2</p>'
}
}]
state2.update_interaction_hints(hint_list2)
solution_dict1 = {
'interaction_id': '',
'answer_is_exclusive': True,
'correct_answer': 'Answer1',
'explanation': {
'content_id': 'solution',
'html': '<p>This is solution for state1</p>'
}
}
state1.update_interaction_solution(solution_dict1)
answer_group_list2 = [{
'rule_specs': [{
'rule_type': 'Equals',
'inputs': {'x': 0}
}, {
'rule_type': 'Equals',
'inputs': {'x': 1}
}],
'outcome': {
'dest': 'state1',
'feedback': {
'content_id': 'feedback_1',
'html': '<p>Outcome1 for state2</p>'
},
'param_changes': [],
'labelled_as_correct': False,
'refresher_exploration_id': None,
'missing_prerequisite_skill_id': None
},
'training_data': [],
'tagged_skill_misconception_id': None
}, {
'rule_specs': [{
'rule_type': 'Equals',
'inputs': {'x': 0}
}],
'outcome': {
'dest': 'state3',
'feedback': {
'content_id': 'feedback_2',
'html': '<p>Outcome2 for state2</p>'
},
'param_changes': [],
'labelled_as_correct': False,
'refresher_exploration_id': None,
'missing_prerequisite_skill_id': None
},
'training_data': [],
'tagged_skill_misconception_id': None
}]
answer_group_list3 = [{
'rule_specs': [{
'rule_type': 'Equals',
'inputs': {'x': [
'<p>This is value1 for ItemSelectionInput</p>'
]}
}, {
'rule_type': 'Equals',
'inputs': {'x': [
'<p>This is value3 for ItemSelectionInput</p>'
]}
}],
'outcome': {
'dest': 'state1',
'feedback': {
'content_id': 'feedback_1',
'html': '<p>Outcome for state3</p>'
},
'param_changes': [],
'labelled_as_correct': False,
'refresher_exploration_id': None,
'missing_prerequisite_skill_id': None
},
'training_data': [],
'tagged_skill_misconception_id': None
}]
state2.update_interaction_answer_groups(answer_group_list2)
state3.update_interaction_answer_groups(answer_group_list3)
expected_html_list = [
'',
'',
'<pre>Hello, this is state2</pre>',
'<p>Outcome1 for state2</p>',
'<p>Outcome2 for state2</p>',
'',
'<p>Hello, this is html1 for state2</p>',
'<p>Hello, this is html2 for state2</p>',
'<p>This is value1 for MultipleChoice</p>',
'<p>This is value2 for MultipleChoice</p>',
'<blockquote>Hello, this is state1</blockquote>',
'<p>Default outcome for state1</p>',
'<p>This is solution for state1</p>',
'<p>Hello, this is state3</p>',
'<p>Outcome for state3</p>',
'<p>This is value1 for ItemSelectionInput</p>',
'<p>This is value3 for ItemSelectionInput</p>',
'',
'<p>This is value1 for ItemSelection</p>',
'<p>This is value2 for ItemSelection</p>',
'<p>This is value3 for ItemSelection</p>',
'<p>Hello, this is state4</p>',
'',
'<p>This is value1 for DragAndDropSortInput</p>',
'<p>This is value2 for DragAndDropSortInput</p>'
]
actual_outcome_list = exploration.get_all_html_content_strings()
self.assertEqual(actual_outcome_list, expected_html_list)
| 28.745724 | 135 | 0.603415 |
f7061506fa2cce695f12947d1688e170169e08f4 | 8,490 | py | Python | src/magql/filter.py | ruoyangW-dev/magql | a7b6a217495a785afcac12c350cb234ff60f265e | [
"BSD-3-Clause"
] | 25 | 2020-02-13T21:25:08.000Z | 2021-11-29T07:19:16.000Z | src/magql/filter.py | ruoyangW-dev/magql | a7b6a217495a785afcac12c350cb234ff60f265e | [
"BSD-3-Clause"
] | 40 | 2020-02-14T00:54:06.000Z | 2021-07-28T18:45:32.000Z | src/magql/filter.py | ruoyangW-dev/magql | a7b6a217495a785afcac12c350cb234ff60f265e | [
"BSD-3-Clause"
] | 2 | 2021-06-07T21:50:46.000Z | 2021-12-21T14:47:31.000Z | from __future__ import annotations
import typing as t
from functools import singledispatch
from inflection import underscore
from sqlalchemy import Date
from sqlalchemy import DateTime
from sqlalchemy import Text
from sqlalchemy import Time
from sqlalchemy import Unicode
from sqlalchemy import UnicodeText
from sqlalchemy.orm import RelationshipProperty
from sqlalchemy.types import Boolean
from sqlalchemy.types import Float
from sqlalchemy.types import Integer
from sqlalchemy.types import Numeric
from sqlalchemy.types import String
from sqlalchemy.types import VARCHAR
from sqlalchemy_utils import EmailType
from sqlalchemy_utils import get_mapper
from sqlalchemy_utils import JSONType
from sqlalchemy_utils import PhoneNumberType
from sqlalchemy_utils import URLType
from sqlalchemy_utils.types import ChoiceType
from .definitions import MagqlEnumType
from .definitions import MagqlInputField
from .definitions import MagqlInputObjectType
StringFilter = MagqlInputObjectType(
"StringFilter",
{
"operator": MagqlInputField(
MagqlEnumType(
"StringOperator",
{
"INCLUDES": "INCLUDES",
"EQUALS": "EQUALS",
"EXISTS": "EXISTS",
"DOESNOTEXIST": "DOESNOTEXIST",
},
)
),
"value": MagqlInputField("String"),
},
)
DateFilter = MagqlInputObjectType(
"DateFilter",
{
"operator": MagqlInputField(
MagqlEnumType(
"DateOperator", {"BEFORE": "BEFORE", "ON": "ON", "AFTER": "AFTER"}
)
),
"value": MagqlInputField("String"),
},
)
IntFilter = MagqlInputObjectType(
"IntFilter",
{
"operator": MagqlInputField(
MagqlEnumType(
"IntOperator",
{
"lt": "lt",
"lte": "lte",
"eq": "eq",
"neq": "neq",
"gt": "gt",
"gte": "gte",
},
)
),
"value": MagqlInputField("Int"),
},
)
FloatFilter = MagqlInputObjectType(
"FloatFilter",
{
"operator": MagqlInputField(
MagqlEnumType(
"FloatOperator",
{
"lt": "lt",
"lte": "lte",
"eq": "eq",
"neq": "neq",
"gt": "gt",
"gte": "gte",
},
)
),
"value": MagqlInputField("Float"),
},
)
RelFilter = MagqlInputObjectType(
"RelFilter",
{
"operator": MagqlInputField(
MagqlEnumType("RelOperator", {"INCLUDES": "INCLUDES"})
),
"value": MagqlInputField("Int"),
},
)
BooleanFilter = MagqlInputObjectType(
"BooleanFilter",
{
"operator": MagqlInputField(
MagqlEnumType(
"BooleanOperator", {"EQUALS": "EQUALS", "NOTEQUALS": "NOTEQUALS"}
)
),
"value": MagqlInputField("Boolean"),
},
)
EnumOperator = MagqlEnumType("EnumOperator", {"INCLUDES": "INCLUDES"})
def EnumFilter(base_type: t.Any) -> MagqlInputObjectType:
name = base_type.name + "Filter"
input_ = {
"operator": MagqlInputField(EnumOperator),
"value": MagqlInputField(base_type),
}
return MagqlInputObjectType(name, input_)
@singledispatch
def get_filter_comparator(type: t.Any) -> t.Any:
raise TypeError(f"No comparator registered for {type.__class__.__name__!r}.")
@get_filter_comparator.register(RelationshipProperty)
def _get_relationship_comparator(rel: RelationshipProperty) -> t.Optional[t.Callable]:
direction = rel.direction.name
if "TOONE" in direction:
def condition(filter_value: t.Any, filter_operator: str, field: t.Any) -> t.Any:
if filter_operator == "INCLUDES":
return field == filter_value
return None
return condition
elif "TOMANY" in direction:
def condition(filter_value: t.Any, filter_operator: str, field: t.Any) -> t.Any:
if filter_operator == "INCLUDES":
return field.any(field.contains(filter_value))
return None
return condition
return None
@get_filter_comparator.register(DateTime)
@get_filter_comparator.register(Date)
def _get_date_comparator(_: t.Union[DateTime, Date]) -> t.Callable:
def condition(
filter_value: t.Union[DateTime, Date],
filter_operator: str,
field: t.Union[DateTime, Date],
) -> t.Any:
if filter_operator == "BEFORE":
return field < filter_value
elif filter_operator == "ON":
return field == filter_value
elif filter_operator == "After":
return field > filter_value
return None
return condition
@get_filter_comparator.register(JSONType)
@get_filter_comparator.register(Text)
@get_filter_comparator.register(UnicodeText)
@get_filter_comparator.register(Unicode)
@get_filter_comparator.register(URLType)
@get_filter_comparator.register(PhoneNumberType)
@get_filter_comparator.register(EmailType)
@get_filter_comparator.register(Time)
@get_filter_comparator.register(String)
@get_filter_comparator.register(VARCHAR)
def _get_string_comparator(_: t.Any) -> t.Callable:
def condition(filter_value: t.Any, filter_operator: str, field: t.Any) -> t.Any:
if filter_operator == "INCLUDES":
return field.like(f"%{filter_value}%")
elif filter_operator == "EQUALS":
return field == filter_value
elif filter_operator == "EXISTS":
return field.like("%")
elif filter_operator == "DOESNOTEXIST":
return field.is_(None)
return condition
@get_filter_comparator.register(Float)
@get_filter_comparator.register(Numeric)
@get_filter_comparator.register(Integer)
def _get_number_comparator(_: t.Any) -> t.Callable:
def condition(filter_value: t.Any, filter_operator: str, field: t.Any) -> t.Any:
if filter_operator == "lt":
return field < filter_value
elif filter_operator == "lte":
return field <= filter_value
elif filter_operator == "eq":
return field == filter_value
elif filter_operator == "neq":
return field != filter_value
elif filter_operator == "gt":
return field > filter_value
elif filter_operator == "gte":
return field >= filter_value
return condition
@get_filter_comparator.register(Boolean)
def _get_boolean_comparator(_: t.Any) -> t.Callable:
def condition(filter_value: t.Any, filter_operator: str, field: t.Any) -> t.Any:
if filter_operator == "EQUALS":
return field == filter_value
elif filter_operator == "NOTEQUALS":
return field != filter_value
return condition
@get_filter_comparator.register(ChoiceType)
def _get_choice_comparator(_: t.Any) -> t.Callable:
def condition(filter_value: t.Any, filter_operator: str, field: t.Any) -> t.Any:
if filter_operator == "INCLUDES":
return field == filter_value
return condition
def generate_filters(table: t.Any, info: t.Any, *args: t.Any, **kwargs: t.Any) -> t.Any:
sqla_filters = []
if "filter" in kwargs and kwargs["filter"] is not None:
mapper = get_mapper(table)
gql_filters = kwargs["filter"]
for filter_name, gql_filter in gql_filters.items():
gql_filter_value = gql_filter["value"]
filter_name = underscore(filter_name)
if filter_name in table.c:
filter_type = table.c[filter_name].type
elif filter_name in mapper.relationships:
rel = mapper.relationships[filter_name]
rel_mapper = get_mapper(rel.target)
gql_filter_value = (
info.context.query(rel_mapper.class_)
.filter_by(id=gql_filter_value)
.one()
)
filter_type = rel
else:
raise KeyError(filter_name)
sql_filter = get_filter_comparator(filter_type)(
gql_filter_value,
gql_filter["operator"],
getattr(mapper.class_, filter_name),
)
sqla_filters.append(sql_filter)
return sqla_filters
| 30.430108 | 88 | 0.610718 |
f70621467fab8e1d22e5d59550b5a4137d4f315b | 876 | py | Python | chapter-7/blog/asgi.py | PacktPublishing/Real-time-Django | 07480a089fc0880d752d4ee5740ae6587de93aee | [
"MIT"
] | null | null | null | chapter-7/blog/asgi.py | PacktPublishing/Real-time-Django | 07480a089fc0880d752d4ee5740ae6587de93aee | [
"MIT"
] | null | null | null | chapter-7/blog/asgi.py | PacktPublishing/Real-time-Django | 07480a089fc0880d752d4ee5740ae6587de93aee | [
"MIT"
] | null | null | null | # blog/asgi.py
import os
import django
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "blog.settings")
from django.conf import settings
django.setup()
from django.core.asgi import get_asgi_application
from channels.security.websocket import OriginValidator
from channels.auth import AuthMiddlewareStack
from channels.routing import ProtocolTypeRouter, URLRouter
from django.urls import re_path
from app.website.consumers import ExampleConsumer
application = ProtocolTypeRouter(
{
# Django's ASGI application to handle traditional HTTP requests
"http": get_asgi_application(),
# WebSocket handler
"websocket": OriginValidator(AuthMiddlewareStack(
URLRouter(
[
re_path(r"^ws/example/$", ExampleConsumer.as_asgi()),
]
)
), settings.ALLOWED_HOSTS)
}
)
| 29.2 | 73 | 0.700913 |
f706325e5c227c033d4565561ec304b5bb74a652 | 8,144 | py | Python | gui/sfbrowser/sfbrowser.py | tjd2002/spikeforest2 | 2e393564b858b2995aa2ccccd9bd73065681b5de | [
"Apache-2.0"
] | null | null | null | gui/sfbrowser/sfbrowser.py | tjd2002/spikeforest2 | 2e393564b858b2995aa2ccccd9bd73065681b5de | [
"Apache-2.0"
] | null | null | null | gui/sfbrowser/sfbrowser.py | tjd2002/spikeforest2 | 2e393564b858b2995aa2ccccd9bd73065681b5de | [
"Apache-2.0"
] | null | null | null | import vdomr as vd
import spikeforest as sf
from cairio import client as ca
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
class AccuracyPlot(vd.components.Pyplot):
def __init__(self, snrs, accuracies):
vd.components.Pyplot.__init__(self)
self._snrs = snrs
self._accuracies = accuracies
def plot(self):
plt.scatter(self._snrs, self._accuracies)
class StudySorterFigure(vd.Component):
def __init__(self, sfdata):
vd.Component.__init__(self)
self._plot = None
self._SF_data = sfdata
self._study = None
self._sorter = None
def setStudySorter(self, *, study, sorter):
self._study = study
self._sorter = sorter
self._update_plot()
def _update_plot(self):
SF = self._SF_data
study = SF.study(self._study)
b = _get_study_sorting_results(study)
a = b[self._sorter]
snrs = a['true_unit_snrs']
accuracies = a['num_matches'] / \
(a['num_matches']+a['num_false_positives']+a['num_false_negatives'])
self._plot = AccuracyPlot(snrs, accuracies)
self.refresh()
def render(self):
if self._plot is None:
return vd.div('Nothing')
return vd.div(
vd.div('test '+self._study+' '+self._sorter),
self._plot
)
class SFBrowser(vd.Component):
def __init__(self, output_id):
vd.Component.__init__(self)
self._output_id = output_id
a = ca.loadObject(
key=dict(name='spikeforest_results'),
subkey=output_id
)
if not a:
print('ERROR: unable to open results: '+output_id)
return
if ('recordings' not in a) or ('studies' not in a) or ('sorting_results' not in a):
print('ERROR: problem with output: '+output_id)
return
studies = a['studies']
recordings = a['recordings']
sorting_results = a['sorting_results']
SF = sf.SFData()
SF.loadStudies(studies)
SF.loadRecordings2(recordings)
SF.loadSortingResults(sorting_results)
# sorter_names=[]
# for SR in sorting_results:
# sorter_names.append(SR['sorter']['name'])
# sorter_names=list(set(sorter_names))
# sorter_names.sort()
self._SF_data = SF
self._accuracy_threshold_input = vd.components.LineEdit(
value=0.8, dtype=float, style=dict(width='70px'))
self._update_button = vd.components.Button(
onclick=self._on_update, class_='button', label='Update')
self._study_sorter_fig = StudySorterFigure(SF)
self._study_sorter_table = vd.div() # dummy
vd.devel.loadBootstrap()
self._update_accuracy_table()
def _on_update(self):
self._update_accuracy_table()
def _update_accuracy_table(self):
accuracy_threshold = self._accuracy_threshold_input.value()
self._accuracy_table_data, self._sorters = self._get_accuracy_table_data(
accuracy_threshold=accuracy_threshold)
self._accuracy_table = self._to_table(
self._accuracy_table_data, ['study']+self._sorters)
print(self._accuracy_table_data)
self.refresh()
def _open_study_sorter_fig(self, *, sorter, study):
self._study_sorter_fig.setStudySorter(study=study, sorter=sorter)
def _get_accuracy_table_data(self, *, accuracy_threshold):
SF = self._SF_data
accuracy_table = []
sorters = set()
for sname in SF.studyNames():
print('STUDY: '+sname)
study = SF.study(sname)
b = _get_study_sorting_results(study)
tmp = dict(
study=dict( # first column
text=sname
)
)
for sorter in b:
sorters.add(sorter)
a = b[sorter]
accuracies = a['num_matches'] / \
(a['num_matches']+a['num_false_positives'] +
a['num_false_negatives'])
tmp[sorter] = dict(
text=str(np.count_nonzero(
accuracies >= accuracy_threshold)),
callback=lambda sorter=sorter, study=sname: self._open_study_sorter_fig(
sorter=sorter, study=study)
)
accuracy_table.append(tmp)
sorters = list(sorters)
sorters.sort()
return accuracy_table, sorters
def _to_table(self, X, column_names):
rows = []
rows.append(vd.tr([vd.th(cname) for cname in column_names]))
for x in X:
elmts = []
for cname in column_names:
tmp = x.get(cname)
if tmp:
if 'callback' in tmp:
elmt = vd.a(tmp['text'], onclick=tmp['callback'])
else:
elmt = vd.span(tmp['text'])
else:
elmt = vd.span('N/A')
elmts.append(elmt)
rows.append(vd.tr([vd.td(elmt) for elmt in elmts]))
return vd.table(rows, class_='table')
def render(self):
return vd.div(
vd.table(
vd.tr(
vd.td('Accuracy threshold:'),
vd.td(self._accuracy_threshold_input),
vd.td(self._update_button)
),
class_='table',
style={'max-width': '200px'}
),
vd.components.ScrollArea(
self._accuracy_table,
height=500
),
self._study_sorter_fig,
style=dict(padding='15px')
)
def _get_study_sorting_results(study):
results = []
for rname in study.recordingNames():
rec = study.recording(rname)
true_units_info = rec.trueUnitsInfo(format='json')
true_units_info_by_id = dict()
for true_unit in true_units_info:
true_units_info_by_id[true_unit['unit_id']] = true_unit
for srname in rec.sortingResultNames():
a = rec.sortingResult(srname)
res0 = dict(sorter=srname, recording=rname, study=study.name())
tmp = a.comparisonWithTruth(format='json')
for i in tmp:
tmp[i]['true_unit_info'] = true_units_info_by_id[tmp[i]['unit_id']]
res0['comparison_with_truth'] = tmp
results.append(res0)
sorters = list(set([a['sorter'] for a in results]))
sorters.sort()
units_by_sorter = dict()
for sorter in sorters:
units_by_sorter[sorter] = []
for obj in results:
sorter0 = obj['sorter']
units = [obj['comparison_with_truth'][i]
for i in obj['comparison_with_truth']]
units_by_sorter[sorter0] = units_by_sorter[sorter0]+units
ret = dict()
for sorter in sorters:
units = units_by_sorter[sorter]
try:
ret[sorter] = dict(
true_unit_ids=[unit['unit_id'] for unit in units],
true_unit_snrs=np.array(
[unit['true_unit_info']['snr'] for unit in units]),
true_unit_firing_rates=np.array(
[unit['true_unit_info']['firing_rate'] for unit in units]),
num_matches=np.array([unit['num_matches'] for unit in units]),
num_false_positives=np.array(
[unit['num_false_positives'] for unit in units]),
num_false_negatives=np.array(
[unit['num_false_negatives'] for unit in units])
)
except:
print('WARNING: Problem loading results for sorter: '+sorter)
ret[sorter] = dict(
true_unit_ids=[],
true_unit_snrs=np.array([]),
true_unit_firing_rates=np.array([]),
num_matches=np.array([]),
num_false_positives=np.array([]),
num_false_negatives=np.array([])
)
return ret
| 33.792531 | 92 | 0.55943 |
f7063d183d1a4b14e905709671cb83a6a4ba78be | 964 | py | Python | app/user/views.py | anilbpoyraz/recipe-app-api | 947ff8c54b0abeb9a2a70825bd5bfe74944ccde3 | [
"MIT"
] | null | null | null | app/user/views.py | anilbpoyraz/recipe-app-api | 947ff8c54b0abeb9a2a70825bd5bfe74944ccde3 | [
"MIT"
] | null | null | null | app/user/views.py | anilbpoyraz/recipe-app-api | 947ff8c54b0abeb9a2a70825bd5bfe74944ccde3 | [
"MIT"
] | null | null | null | from django.shortcuts import render
from rest_framework import generics, authentication, permissions
from rest_framework.authtoken.views import ObtainAuthToken
from rest_framework.settings import api_settings
from user.serializers import UserSerializer, AuthTokenSerializer
class CreateUserView(generics.CreateAPIView):
"""Create a new user in the system"""
serializer_class = UserSerializer
class CreateTokenview(ObtainAuthToken):
"""Create a new auth token for user"""
serializer_class = AuthTokenSerializer
renderer_classes = api_settings.DEFAULT_RENDERER_CLASSES
class ManageUserView(generics.RetrieveUpdateAPIView):
"""Manage the authenticated user"""
serializer_class = UserSerializer
authentication_classes = (authentication.TokenAuthentication,)
permission_classes = (permissions.IsAuthenticated,)
def get_object(self):
"""Retrieve and return authentication user"""
return self.request.user
| 32.133333 | 66 | 0.790456 |
f7063f6725e9115c25a43134ff0307df025c8b2a | 14,639 | py | Python | lib/datasets/adas.py | LeftThink/pytorch-lighthead | 5f4bf1c87b9be77bf7242ad89900239a9d66914c | [
"MIT"
] | null | null | null | lib/datasets/adas.py | LeftThink/pytorch-lighthead | 5f4bf1c87b9be77bf7242ad89900239a9d66914c | [
"MIT"
] | null | null | null | lib/datasets/adas.py | LeftThink/pytorch-lighthead | 5f4bf1c87b9be77bf7242ad89900239a9d66914c | [
"MIT"
] | null | null | null | # coding: utf-8
# --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
from __future__ import print_function
import xml.dom.minidom as minidom
import os
# import PIL
import numpy as np
import scipy.sparse
import subprocess
try:
import cPickle
except ImportError:
import pickle as cPickle
import math
import glob
import uuid
import scipy.io as sio
import xml.etree.ElementTree as ET
from .imdb import imdb
from .imdb import ROOT_DIR
from . import ds_utils
from .adas_eval import adas_eval
# TODO: make fast_rcnn irrelevant
# >>>> obsolete, because it depends on sth outside of this project
from model.utils.config import cfg
# <<<< obsolete
class adas(imdb):
def __init__(self, image_set, year, devkit_path=None, sub_type='car'):
imdb.__init__(self, 'adas_' + year + '_' + image_set)
self._year = year
self._image_set = image_set
self._devkit_path = self._get_default_path() if devkit_path is None \
else devkit_path
self._data_path = os.path.join(self._devkit_path, 'ADAS' + self._year)
if sub_type == 'car':
self._classes = ('__background__', #always index 0
'car',)
elif sub_type == 'tired':
self._classes = ('__background__', #always index 0
'o','s','w')
self._class_to_ind = dict(zip(self.classes, range(self.num_classes)))
self._image_ext = '.jpg'
self._image_index = self._load_image_set_index()
# Default to roidb handler
# self._roidb_handler = self.selective_search_roidb
self._roidb_handler = self.gt_roidb
self._salt = str(uuid.uuid4())
self._comp_id = 'comp4'
# PASCAL specific config options
self.config = {'cleanup': True,
'use_salt': True,
'use_diff': False,
'matlab_eval': False,
'rpn_file': None,
'min_size': 2}
assert os.path.exists(self._devkit_path), \
'ADASdevkit path does not exist: {}'.format(self._devkit_path)
assert os.path.exists(self._data_path), \
'Path does not exist: {}'.format(self._data_path)
def image_path_at(self, i):
"""
Return the absolute path to image i in the image sequence.
"""
return self.image_path_from_index(self._image_index[i])
def image_id_at(self, i):
"""
Return the absolute path to image i in the image sequence.
"""
return i
def image_path_from_index(self, index):
"""
Construct an image path from the image's "index" identifier.
"""
image_path = os.path.join(self._data_path, 'JPEGImages',
index + self._image_ext)
assert os.path.exists(image_path), \
'Path does not exist: {}'.format(image_path)
return image_path
def _load_image_set_index(self):
"""
Load the indexes listed in this dataset's image set file.
"""
# Example path to image set file:
# self._devkit_path + /ADASdevkit2007/ADAS2007/ImageSets/Main/val.txt
image_set_file = os.path.join(self._data_path, 'ImageSets', 'Main',
self._image_set + '.txt')
assert os.path.exists(image_set_file), \
'Path does not exist: {}'.format(image_set_file)
with open(image_set_file) as f:
image_index = [x.strip() for x in f.readlines()]
return image_index
def _get_default_path(self):
"""
Return the default path where PASCAL ADAS is expected to be installed.
"""
return os.path.join(cfg.DATA_DIR, 'ADASdevkit' + self._year)
def gt_roidb(self):
"""
Return the database of ground-truth regions of interest.
This function loads/saves from/to a cache file to speed up future calls.
"""
cache_file = os.path.join(self.cache_path, self.name + '_gt_roidb.pkl')
if os.path.exists(cache_file):
print(cache_file)
with open(cache_file, 'rb') as fid:
roidb = cPickle.load(fid)
print('{} gt roidb loaded from {}'.format(self.name, cache_file))
return roidb
gt_roidb = [self._load_pascal_annotation(index)
for index in self.image_index]
with open(cache_file, 'wb') as fid:
cPickle.dump(gt_roidb, fid, cPickle.HIGHEST_PROTOCOL)
print('wrote gt roidb to {}'.format(cache_file))
return gt_roidb
def selective_search_roidb(self):
"""
Return the database of selective search regions of interest.
Ground-truth ROIs are also included.
This function loads/saves from/to a cache file to speed up future calls.
"""
cache_file = os.path.join(self.cache_path,
self.name + '_selective_search_roidb.pkl')
if os.path.exists(cache_file):
with open(cache_file, 'rb') as fid:
roidb = cPickle.load(fid)
print('{} ss roidb loaded from {}'.format(self.name, cache_file))
return roidb
if int(self._year) == 2007 or self._image_set != 'test':
gt_roidb = self.gt_roidb()
ss_roidb = self._load_selective_search_roidb(gt_roidb)
roidb = imdb.merge_roidbs(gt_roidb, ss_roidb)
else:
roidb = self._load_selective_search_roidb(None)
with open(cache_file, 'wb') as fid:
cPickle.dump(roidb, fid, cPickle.HIGHEST_PROTOCOL)
print('wrote ss roidb to {}'.format(cache_file))
return roidb
def rpn_roidb(self):
if int(self._year) == 2007 or self._image_set != 'test':
gt_roidb = self.gt_roidb()
rpn_roidb = self._load_rpn_roidb(gt_roidb)
roidb = imdb.merge_roidbs(gt_roidb, rpn_roidb)
else:
roidb = self._load_rpn_roidb(None)
return roidb
def _load_rpn_roidb(self, gt_roidb):
filename = self.config['rpn_file']
print('loading {}'.format(filename))
assert os.path.exists(filename), \
'rpn data not found at: {}'.format(filename)
with open(filename, 'rb') as f:
box_list = cPickle.load(f)
return self.create_roidb_from_box_list(box_list, gt_roidb)
def _load_selective_search_roidb(self, gt_roidb):
filename = os.path.abspath(os.path.join(cfg.DATA_DIR,
'selective_search_data',
self.name + '.mat'))
assert os.path.exists(filename), \
'Selective search data not found at: {}'.format(filename)
raw_data = sio.loadmat(filename)['boxes'].ravel()
box_list = []
for i in range(raw_data.shape[0]):
boxes = raw_data[i][:, (1, 0, 3, 2)] - 1
keep = ds_utils.unique_boxes(boxes)
boxes = boxes[keep, :]
keep = ds_utils.filter_small_boxes(boxes, self.config['min_size'])
boxes = boxes[keep, :]
box_list.append(boxes)
return self.create_roidb_from_box_list(box_list, gt_roidb)
def _load_pascal_annotation(self, index):
"""
Load image and bounding boxes info from XML file in the PASCAL ADAS
format.
"""
filename = os.path.join(self._data_path, 'Annotations', index + '.xml')
tree = ET.parse(filename)
objs = tree.findall('object')
# if not self.config['use_diff']:
# # Exclude the samples labeled as difficult
# non_diff_objs = [
# obj for obj in objs if int(obj.find('difficult').text) == 0]
# # if len(non_diff_objs) != len(objs):
# # print 'Removed {} difficult objects'.format(
# # len(objs) - len(non_diff_objs))
# objs = non_diff_objs
num_objs = len(objs)
boxes = np.zeros((num_objs, 4), dtype=np.uint16)
gt_classes = np.zeros((num_objs), dtype=np.int32)
overlaps = np.zeros((num_objs, self.num_classes), dtype=np.float32)
# "Seg" area for pascal is just the box area
seg_areas = np.zeros((num_objs), dtype=np.float32)
ishards = np.zeros((num_objs), dtype=np.int32)
# Load object bounding boxes into a data frame.
for ix, obj in enumerate(objs):
bbox = obj.find('bndbox')
# Make pixel indexes 0-based
x1 = float(bbox.find('xmin').text) - 1
y1 = float(bbox.find('ymin').text) - 1
x2 = float(bbox.find('xmax').text) - 1
y2 = float(bbox.find('ymax').text) - 1
diffc = obj.find('difficult')
difficult = 0 if diffc == None else int(diffc.text)
ishards[ix] = difficult
cls = self._class_to_ind[obj.find('name').text.lower().strip()]
boxes[ix, :] = [x1, y1, x2, y2]
gt_classes[ix] = cls
overlaps[ix, cls] = 1.0
seg_areas[ix] = (x2 - x1 + 1) * (y2 - y1 + 1)
overlaps = scipy.sparse.csr_matrix(overlaps)
return {'boxes': boxes,
'gt_classes': gt_classes,
'gt_ishard': ishards,
'gt_overlaps': overlaps,
'flipped': False,
'seg_areas': seg_areas}
def _get_comp_id(self):
comp_id = (self._comp_id + '_' + self._salt if self.config['use_salt']
else self._comp_id)
return comp_id
def _get_adas_results_file_template(self):
# ADASdevkit/results/ADAS2007/Main/<comp_id>_det_test_aeroplane.txt
filename = self._get_comp_id() + '_det_' + self._image_set + '_{:s}.txt'
filedir = os.path.join(self._devkit_path, 'results', 'ADAS' + self._year, 'Main')
if not os.path.exists(filedir):
os.makedirs(filedir)
path = os.path.join(filedir, filename)
return path
def _write_adas_results_file(self, all_boxes):
for cls_ind, cls in enumerate(self.classes):
if cls == '__background__':
continue
print('Writing {} ADAS results file'.format(cls))
filename = self._get_adas_results_file_template().format(cls)
with open(filename, 'wt') as f:
for im_ind, index in enumerate(self.image_index):
dets = all_boxes[cls_ind][im_ind]
if dets == []:
continue
# the ADASdevkit expects 1-based indices
for k in range(dets.shape[0]):
f.write('{:s} {:.3f} {:.1f} {:.1f} {:.1f} {:.1f}\n'.
format(index, dets[k, -1],
dets[k, 0] + 1, dets[k, 1] + 1,
dets[k, 2] + 1, dets[k, 3] + 1))
def _do_python_eval(self, output_dir='output'):
annopath = os.path.join(
self._devkit_path,
'ADAS' + self._year,
'Annotations',
'{:s}.xml')
imagesetfile = os.path.join(
self._devkit_path,
'ADAS' + self._year,
'ImageSets',
'Main',
self._image_set + '.txt')
cachedir = os.path.join(self._devkit_path, 'annotations_cache')
aps = []
if not os.path.isdir(output_dir):
os.mkdir(output_dir)
for i, cls in enumerate(self._classes):
if cls == '__background__':
continue
filename = self._get_adas_results_file_template().format(cls)
rec, prec, ap = adas_eval(
filename, annopath, imagesetfile, cls, cachedir, ovthresh=0.5)
aps += [ap]
print('AP for {} = {:.4f}'.format(cls, ap))
with open(os.path.join(output_dir, cls + '_pr.pkl'), 'w') as f:
cPickle.dump({'rec': rec, 'prec': prec, 'ap': ap}, f)
print('Mean AP = {:.4f}'.format(np.mean(aps)))
print('~~~~~~~~')
print('Results:')
for ap in aps:
print('{:.3f}'.format(ap))
print('{:.3f}'.format(np.mean(aps)))
print('~~~~~~~~')
print('')
print('--------------------------------------------------------------')
print('Results computed with the **unofficial** Python eval code.')
print('Results should be very close to the official MATLAB eval code.')
print('Recompute with `./tools/reval.py --matlab ...` for your paper.')
print('-- Thanks, The Management')
print('--------------------------------------------------------------')
def _do_matlab_eval(self, output_dir='output'):
print('-----------------------------------------------------')
print('Computing results with the official MATLAB eval code.')
print('-----------------------------------------------------')
path = os.path.join(cfg.ROOT_DIR, 'lib', 'datasets',
'ADASdevkit-matlab-wrapper')
cmd = 'cd {} && '.format(path)
cmd += '{:s} -nodisplay -nodesktop '.format(cfg.MATLAB)
cmd += '-r "dbstop if error; '
cmd += 'adas_eval(\'{:s}\',\'{:s}\',\'{:s}\',\'{:s}\'); quit;"' \
.format(self._devkit_path, self._get_comp_id(),
self._image_set, output_dir)
print('Running:\n{}'.format(cmd))
status = subprocess.call(cmd, shell=True)
def evaluate_detections(self, all_boxes, output_dir):
self._write_adas_results_file(all_boxes)
self._do_python_eval(output_dir)
if self.config['matlab_eval']:
self._do_matlab_eval(output_dir)
if self.config['cleanup']:
for cls in self._classes:
if cls == '__background__':
continue
filename = self._get_adas_results_file_template().format(cls)
os.remove(filename)
def competition_mode(self, on):
if on:
self.config['use_salt'] = False
self.config['cleanup'] = False
else:
self.config['use_salt'] = True
self.config['cleanup'] = True
if __name__ == '__main__':
d = adas('trainval', '2017')
res = d.roidb
from IPython import embed;
embed()
| 38.523684 | 89 | 0.548398 |
f7065db5438bbe3dd6134673cfc14c67b2095dac | 29,176 | py | Python | tests/test_plugin.py | scartill/cmd2 | 1b4e1e25f84bcc800a5f369783c3c3448a42361e | [
"MIT"
] | 1 | 2021-07-06T23:59:46.000Z | 2021-07-06T23:59:46.000Z | tests/test_plugin.py | scartill/cmd2 | 1b4e1e25f84bcc800a5f369783c3c3448a42361e | [
"MIT"
] | null | null | null | tests/test_plugin.py | scartill/cmd2 | 1b4e1e25f84bcc800a5f369783c3c3448a42361e | [
"MIT"
] | null | null | null | # coding=utf-8
# flake8: noqa E302
"""
Test plugin infrastructure and hooks.
"""
import sys
import pytest
# Python 3.5 had some regressions in the unitest.mock module, so use 3rd party mock if available
try:
import mock
except ImportError:
from unittest import mock
import cmd2
from cmd2 import plugin
class Plugin:
"""A mixin class for testing hook registration and calling"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.reset_counters()
def reset_counters(self):
self.called_preparse = 0
self.called_postparsing = 0
self.called_precmd = 0
self.called_postcmd = 0
self.called_cmdfinalization = 0
###
#
# preloop and postloop hooks
# which share the same signature and are thus interchangable
#
###
def prepost_hook_one(self) -> None:
"""Method used for preloop or postloop hooks"""
self.poutput("one")
def prepost_hook_two(self) -> None:
"""Another method used for preloop or postloop hooks"""
self.poutput("two")
def prepost_hook_too_many_parameters(self, param) -> None:
"""A preloop or postloop hook with too many parameters"""
pass
def prepost_hook_with_wrong_return_annotation(self) -> bool:
"""A preloop or postloop hook with incorrect return type"""
pass
###
#
# preparse hook
#
###
def preparse(self, data: cmd2.plugin.PostparsingData) -> cmd2.plugin.PostparsingData:
"""Preparsing hook"""
self.called_preparse += 1
return data
###
#
# Postparsing hooks
#
###
def postparse_hook(self, data: cmd2.plugin.PostparsingData) -> cmd2.plugin.PostparsingData:
"""A postparsing hook"""
self.called_postparsing += 1
return data
def postparse_hook_stop(self, data: cmd2.plugin.PostparsingData) -> cmd2.plugin.PostparsingData:
"""A postparsing hook with requests application exit"""
self.called_postparsing += 1
data.stop = True
return data
def postparse_hook_emptystatement(self, data: cmd2.plugin.PostparsingData) -> cmd2.plugin.PostparsingData:
"""A postparsing hook with raises an EmptyStatement exception"""
self.called_postparsing += 1
raise cmd2.EmptyStatement
def postparse_hook_exception(self, data: cmd2.plugin.PostparsingData) -> cmd2.plugin.PostparsingData:
"""A postparsing hook which raises an exception"""
self.called_postparsing += 1
raise ValueError
def postparse_hook_too_many_parameters(self, data1, data2) -> cmd2.plugin.PostparsingData:
"""A postparsing hook with too many parameters"""
pass
def postparse_hook_undeclared_parameter_annotation(self, data) -> cmd2.plugin.PostparsingData:
"""A postparsing hook with an undeclared parameter type"""
pass
def postparse_hook_wrong_parameter_annotation(self, data: str) -> cmd2.plugin.PostparsingData:
"""A postparsing hook with the wrong parameter type"""
pass
def postparse_hook_undeclared_return_annotation(self, data: cmd2.plugin.PostparsingData):
"""A postparsing hook with an undeclared return type"""
pass
def postparse_hook_wrong_return_annotation(self, data: cmd2.plugin.PostparsingData) -> str:
"""A postparsing hook with the wrong return type"""
pass
###
#
# precommand hooks, some valid, some invalid
#
###
def precmd(self, statement: cmd2.Statement) -> cmd2.Statement:
"""Override cmd.Cmd method"""
self.called_precmd += 1
return statement
def precmd_hook(self, data: plugin.PrecommandData) -> plugin.PrecommandData:
"""A precommand hook"""
self.called_precmd += 1
return data
def precmd_hook_emptystatement(self, data: plugin.PrecommandData) -> plugin.PrecommandData:
"""A precommand hook which raises an EmptyStatement exception"""
self.called_precmd += 1
raise cmd2.EmptyStatement
def precmd_hook_exception(self, data: plugin.PrecommandData) -> plugin.PrecommandData:
"""A precommand hook which raises an exception"""
self.called_precmd += 1
raise ValueError
def precmd_hook_not_enough_parameters(self) -> plugin.PrecommandData:
"""A precommand hook with no parameters"""
pass
def precmd_hook_too_many_parameters(self, one: plugin.PrecommandData, two: str) -> plugin.PrecommandData:
"""A precommand hook with too many parameters"""
return one
def precmd_hook_no_parameter_annotation(self, data) -> plugin.PrecommandData:
"""A precommand hook with no type annotation on the parameter"""
return data
def precmd_hook_wrong_parameter_annotation(self, data: str) -> plugin.PrecommandData:
"""A precommand hook with the incorrect type annotation on the parameter"""
return data
def precmd_hook_no_return_annotation(self, data: plugin.PrecommandData):
"""A precommand hook with no type annotation on the return value"""
return data
def precmd_hook_wrong_return_annotation(self, data: plugin.PrecommandData) -> cmd2.Statement:
return self.statement_parser.parse('hi there')
###
#
# postcommand hooks, some valid, some invalid
#
###
def postcmd(self, stop: bool, statement: cmd2.Statement) -> bool:
"""Override cmd.Cmd method"""
self.called_postcmd += 1
return stop
def postcmd_hook(self, data: plugin.PostcommandData) -> plugin.PostcommandData:
"""A postcommand hook"""
self.called_postcmd += 1
return data
def postcmd_hook_exception(self, data: plugin.PostcommandData) -> plugin.PostcommandData:
"""A postcommand hook with raises an exception"""
self.called_postcmd += 1
raise ZeroDivisionError
def postcmd_hook_not_enough_parameters(self) -> plugin.PostcommandData:
"""A precommand hook with no parameters"""
pass
def postcmd_hook_too_many_parameters(self, one: plugin.PostcommandData, two: str) -> plugin.PostcommandData:
"""A precommand hook with too many parameters"""
return one
def postcmd_hook_no_parameter_annotation(self, data) -> plugin.PostcommandData:
"""A precommand hook with no type annotation on the parameter"""
return data
def postcmd_hook_wrong_parameter_annotation(self, data: str) -> plugin.PostcommandData:
"""A precommand hook with the incorrect type annotation on the parameter"""
return data
def postcmd_hook_no_return_annotation(self, data: plugin.PostcommandData):
"""A precommand hook with no type annotation on the return value"""
return data
def postcmd_hook_wrong_return_annotation(self, data: plugin.PostcommandData) -> cmd2.Statement:
return self.statement_parser.parse('hi there')
###
#
# command finalization hooks, some valid, some invalid
#
###
def cmdfinalization_hook(self, data: plugin.CommandFinalizationData) -> plugin.CommandFinalizationData:
"""A command finalization hook."""
self.called_cmdfinalization += 1
return data
def cmdfinalization_hook_stop(self, data: cmd2.plugin.CommandFinalizationData) -> cmd2.plugin.CommandFinalizationData:
"""A command finalization hook which requests application exit"""
self.called_cmdfinalization += 1
data.stop = True
return data
def cmdfinalization_hook_exception(self, data: cmd2.plugin.CommandFinalizationData) -> cmd2.plugin.CommandFinalizationData:
"""A command finalization hook which raises an exception"""
self.called_cmdfinalization += 1
raise ValueError
def cmdfinalization_hook_not_enough_parameters(self) -> plugin.CommandFinalizationData:
"""A command finalization hook with no parameters."""
pass
def cmdfinalization_hook_too_many_parameters(self, one: plugin.CommandFinalizationData, two: str) -> plugin.CommandFinalizationData:
"""A command finalization hook with too many parameters."""
return one
def cmdfinalization_hook_no_parameter_annotation(self, data) -> plugin.CommandFinalizationData:
"""A command finalization hook with no type annotation on the parameter."""
return data
def cmdfinalization_hook_wrong_parameter_annotation(self, data: str) -> plugin.CommandFinalizationData:
"""A command finalization hook with the incorrect type annotation on the parameter."""
return data
def cmdfinalization_hook_no_return_annotation(self, data: plugin.CommandFinalizationData):
"""A command finalizationhook with no type annotation on the return value."""
return data
def cmdfinalization_hook_wrong_return_annotation(self, data: plugin.CommandFinalizationData) -> cmd2.Statement:
"""A command finalization hook with the wrong return type annotation."""
return self.statement_parser.parse('hi there')
class PluggedApp(Plugin, cmd2.Cmd):
"""A sample app with a plugin mixed in"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def do_say(self, statement):
"""Repeat back the arguments"""
self.poutput(statement)
###
#
# test pre and postloop hooks
#
###
def test_register_preloop_hook_too_many_parameters():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_preloop_hook(app.prepost_hook_too_many_parameters)
def test_register_preloop_hook_with_return_annotation():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_preloop_hook(app.prepost_hook_with_wrong_return_annotation)
def test_preloop_hook(capsys):
# Need to patch sys.argv so cmd2 doesn't think it was called with arguments equal to the py.test args
testargs = ["prog", "say hello", 'quit']
with mock.patch.object(sys, 'argv', testargs):
app = PluggedApp()
app.register_preloop_hook(app.prepost_hook_one)
app.cmdloop()
out, err = capsys.readouterr()
assert out == 'one\nhello\n'
assert not err
def test_preloop_hooks(capsys):
# Need to patch sys.argv so cmd2 doesn't think it was called with arguments equal to the py.test args
testargs = ["prog", "say hello", 'quit']
with mock.patch.object(sys, 'argv', testargs):
app = PluggedApp()
app.register_preloop_hook(app.prepost_hook_one)
app.register_preloop_hook(app.prepost_hook_two)
app.cmdloop()
out, err = capsys.readouterr()
assert out == 'one\ntwo\nhello\n'
assert not err
def test_register_postloop_hook_too_many_parameters():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_postloop_hook(app.prepost_hook_too_many_parameters)
def test_register_postloop_hook_with_wrong_return_annotation():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_postloop_hook(app.prepost_hook_with_wrong_return_annotation)
def test_postloop_hook(capsys):
# Need to patch sys.argv so cmd2 doesn't think it was called with arguments equal to the py.test args
testargs = ["prog", "say hello", 'quit']
with mock.patch.object(sys, 'argv', testargs):
app = PluggedApp()
app.register_postloop_hook(app.prepost_hook_one)
app.cmdloop()
out, err = capsys.readouterr()
assert out == 'hello\none\n'
assert not err
def test_postloop_hooks(capsys):
# Need to patch sys.argv so cmd2 doesn't think it was called with arguments equal to the py.test args
testargs = ["prog", "say hello", 'quit']
with mock.patch.object(sys, 'argv', testargs):
app = PluggedApp()
app.register_postloop_hook(app.prepost_hook_one)
app.register_postloop_hook(app.prepost_hook_two)
app.cmdloop()
out, err = capsys.readouterr()
assert out == 'hello\none\ntwo\n'
assert not err
###
#
# test preparse hook
#
###
def test_preparse(capsys):
app = PluggedApp()
app.register_postparsing_hook(app.preparse)
app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert out == 'hello\n'
assert not err
assert app.called_preparse == 1
###
#
# test postparsing hooks
#
###
def test_postparsing_hook_too_many_parameters():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_postparsing_hook(app.postparse_hook_too_many_parameters)
def test_postparsing_hook_undeclared_parameter_annotation():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_postparsing_hook(app.postparse_hook_undeclared_parameter_annotation)
def test_postparsing_hook_wrong_parameter_annotation():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_postparsing_hook(app.postparse_hook_wrong_parameter_annotation)
def test_postparsing_hook_undeclared_return_annotation():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_postparsing_hook(app.postparse_hook_undeclared_return_annotation)
def test_postparsing_hook_wrong_return_annotation():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_postparsing_hook(app.postparse_hook_wrong_return_annotation)
def test_postparsing_hook(capsys):
app = PluggedApp()
app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert out == 'hello\n'
assert not err
assert not app.called_postparsing
app.reset_counters()
app.register_postparsing_hook(app.postparse_hook)
app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert out == 'hello\n'
assert not err
assert app.called_postparsing == 1
# register the function again, so it should be called twice
app.reset_counters()
app.register_postparsing_hook(app.postparse_hook)
app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert out == 'hello\n'
assert not err
assert app.called_postparsing == 2
def test_postparsing_hook_stop_first(capsys):
app = PluggedApp()
app.register_postparsing_hook(app.postparse_hook_stop)
stop = app.onecmd_plus_hooks('say hello')
assert app.called_postparsing == 1
assert stop
# register another function but it shouldn't be called
app.reset_counters()
app.register_postparsing_hook(app.postparse_hook)
stop = app.onecmd_plus_hooks('say hello')
assert app.called_postparsing == 1
assert stop
def test_postparsing_hook_stop_second(capsys):
app = PluggedApp()
app.register_postparsing_hook(app.postparse_hook)
stop = app.onecmd_plus_hooks('say hello')
assert app.called_postparsing == 1
assert not stop
# register another function and make sure it gets called
app.reset_counters()
app.register_postparsing_hook(app.postparse_hook_stop)
stop = app.onecmd_plus_hooks('say hello')
assert app.called_postparsing == 2
assert stop
# register a third function which shouldn't be called
app.reset_counters()
app.register_postparsing_hook(app.postparse_hook)
stop = app.onecmd_plus_hooks('say hello')
assert app.called_postparsing == 2
assert stop
def test_postparsing_hook_emptystatement_first(capsys):
app = PluggedApp()
app.register_postparsing_hook(app.postparse_hook_emptystatement)
stop = app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert not stop
assert not out
assert not err
assert app.called_postparsing == 1
# register another function but it shouldn't be called
app.reset_counters()
stop = app.register_postparsing_hook(app.postparse_hook)
app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert not stop
assert not out
assert not err
assert app.called_postparsing == 1
def test_postparsing_hook_emptystatement_second(capsys):
app = PluggedApp()
app.register_postparsing_hook(app.postparse_hook)
stop = app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert not stop
assert out == 'hello\n'
assert not err
assert app.called_postparsing == 1
# register another function and make sure it gets called
app.reset_counters()
app.register_postparsing_hook(app.postparse_hook_emptystatement)
stop = app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert not stop
assert not out
assert not err
assert app.called_postparsing == 2
# register a third function which shouldn't be called
app.reset_counters()
app.register_postparsing_hook(app.postparse_hook)
stop = app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert not stop
assert not out
assert not err
assert app.called_postparsing == 2
def test_postparsing_hook_exception(capsys):
app = PluggedApp()
app.register_postparsing_hook(app.postparse_hook_exception)
stop = app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert not stop
assert not out
assert err
assert app.called_postparsing == 1
# register another function, but it shouldn't be called
app.reset_counters()
app.register_postparsing_hook(app.postparse_hook)
stop = app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert not stop
assert not out
assert err
assert app.called_postparsing == 1
###
#
# test precmd hooks
#
#####
def test_register_precmd_hook_parameter_count():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_precmd_hook(app.precmd_hook_not_enough_parameters)
with pytest.raises(TypeError):
app.register_precmd_hook(app.precmd_hook_too_many_parameters)
def test_register_precmd_hook_no_parameter_annotation():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_precmd_hook(app.precmd_hook_no_parameter_annotation)
def test_register_precmd_hook_wrong_parameter_annotation():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_precmd_hook(app.precmd_hook_wrong_parameter_annotation)
def test_register_precmd_hook_no_return_annotation():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_precmd_hook(app.precmd_hook_no_return_annotation)
def test_register_precmd_hook_wrong_return_annotation():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_precmd_hook(app.precmd_hook_wrong_return_annotation)
def test_precmd_hook(capsys):
app = PluggedApp()
app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert out == 'hello\n'
assert not err
# without registering any hooks, precmd() should be called
assert app.called_precmd == 1
app.reset_counters()
app.register_precmd_hook(app.precmd_hook)
app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert out == 'hello\n'
assert not err
# with one hook registered, we should get precmd() and the hook
assert app.called_precmd == 2
# register the function again, so it should be called twice
app.reset_counters()
app.register_precmd_hook(app.precmd_hook)
app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert out == 'hello\n'
assert not err
# with two hooks registered, we should get precmd() and both hooks
assert app.called_precmd == 3
def test_precmd_hook_emptystatement_first(capsys):
app = PluggedApp()
app.register_precmd_hook(app.precmd_hook_emptystatement)
stop = app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert not stop
assert not out
assert not err
# since the registered hooks are called before precmd(), if a registered
# hook throws an exception, precmd() is never called
assert app.called_precmd == 1
# register another function but it shouldn't be called
app.reset_counters()
stop = app.register_precmd_hook(app.precmd_hook)
app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert not stop
assert not out
assert not err
# the exception raised by the first hook should prevent the second
# hook from being called, and it also prevents precmd() from being
# called
assert app.called_precmd == 1
def test_precmd_hook_emptystatement_second(capsys):
app = PluggedApp()
app.register_precmd_hook(app.precmd_hook)
stop = app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert not stop
assert out == 'hello\n'
assert not err
# with one hook registered, we should get precmd() and the hook
assert app.called_precmd == 2
# register another function and make sure it gets called
app.reset_counters()
app.register_precmd_hook(app.precmd_hook_emptystatement)
stop = app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert not stop
assert not out
assert not err
# since the registered hooks are called before precmd(), if a registered
# hook throws an exception, precmd() is never called
assert app.called_precmd == 2
# register a third function which shouldn't be called
app.reset_counters()
app.register_precmd_hook(app.precmd_hook)
stop = app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert not stop
assert not out
assert not err
# the exception raised by the second hook should prevent the third
# hook from being called. since the registered hooks are called before precmd(),
# if a registered hook throws an exception, precmd() is never called
assert app.called_precmd == 2
###
#
# test postcmd hooks
#
####
def test_register_postcmd_hook_parameter_count():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_postcmd_hook(app.postcmd_hook_not_enough_parameters)
with pytest.raises(TypeError):
app.register_postcmd_hook(app.postcmd_hook_too_many_parameters)
def test_register_postcmd_hook_no_parameter_annotation():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_postcmd_hook(app.postcmd_hook_no_parameter_annotation)
def test_register_postcmd_hook_wrong_parameter_annotation():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_postcmd_hook(app.postcmd_hook_wrong_parameter_annotation)
def test_register_postcmd_hook_no_return_annotation():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_postcmd_hook(app.postcmd_hook_no_return_annotation)
def test_register_postcmd_hook_wrong_return_annotation():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_postcmd_hook(app.postcmd_hook_wrong_return_annotation)
def test_postcmd(capsys):
app = PluggedApp()
app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert out == 'hello\n'
assert not err
# without registering any hooks, postcmd() should be called
assert app.called_postcmd == 1
app.reset_counters()
app.register_postcmd_hook(app.postcmd_hook)
app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert out == 'hello\n'
assert not err
# with one hook registered, we should get precmd() and the hook
assert app.called_postcmd == 2
# register the function again, so it should be called twice
app.reset_counters()
app.register_postcmd_hook(app.postcmd_hook)
app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert out == 'hello\n'
assert not err
# with two hooks registered, we should get precmd() and both hooks
assert app.called_postcmd == 3
def test_postcmd_exception_first(capsys):
app = PluggedApp()
app.register_postcmd_hook(app.postcmd_hook_exception)
stop = app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert not stop
assert out == 'hello\n'
assert err
# since the registered hooks are called before postcmd(), if a registered
# hook throws an exception, postcmd() is never called. So we should have
# a count of one because we called the hook that raised the exception
assert app.called_postcmd == 1
# register another function but it shouldn't be called
app.reset_counters()
stop = app.register_postcmd_hook(app.postcmd_hook)
app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert not stop
assert out == 'hello\n'
assert err
# the exception raised by the first hook should prevent the second
# hook from being called, and it also prevents postcmd() from being
# called
assert app.called_postcmd == 1
def test_postcmd_exception_second(capsys):
app = PluggedApp()
app.register_postcmd_hook(app.postcmd_hook)
stop = app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert not stop
assert out == 'hello\n'
assert not err
# with one hook registered, we should get the hook and postcmd()
assert app.called_postcmd == 2
# register another function which should be called
app.reset_counters()
stop = app.register_postcmd_hook(app.postcmd_hook_exception)
app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert not stop
assert out == 'hello\n'
assert err
# the exception raised by the first hook should prevent the second
# hook from being called, and it also prevents postcmd() from being
# called. So we have the first hook, and the second hook, which raised
# the exception
assert app.called_postcmd == 2
##
#
# command finalization
#
###
def test_register_cmdfinalization_hook_parameter_count():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_cmdfinalization_hook(app.cmdfinalization_hook_not_enough_parameters)
with pytest.raises(TypeError):
app.register_cmdfinalization_hook(app.cmdfinalization_hook_too_many_parameters)
def test_register_cmdfinalization_hook_no_parameter_annotation():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_cmdfinalization_hook(app.cmdfinalization_hook_no_parameter_annotation)
def test_register_cmdfinalization_hook_wrong_parameter_annotation():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_cmdfinalization_hook(app.cmdfinalization_hook_wrong_parameter_annotation)
def test_register_cmdfinalization_hook_no_return_annotation():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_cmdfinalization_hook(app.cmdfinalization_hook_no_return_annotation)
def test_register_cmdfinalization_hook_wrong_return_annotation():
app = PluggedApp()
with pytest.raises(TypeError):
app.register_cmdfinalization_hook(app.cmdfinalization_hook_wrong_return_annotation)
def test_cmdfinalization(capsys):
app = PluggedApp()
app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert out == 'hello\n'
assert not err
assert app.called_cmdfinalization == 0
app.register_cmdfinalization_hook(app.cmdfinalization_hook)
app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert out == 'hello\n'
assert not err
assert app.called_cmdfinalization == 1
# register the function again, so it should be called twice
app.reset_counters()
app.register_cmdfinalization_hook(app.cmdfinalization_hook)
app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert out == 'hello\n'
assert not err
assert app.called_cmdfinalization == 2
def test_cmdfinalization_stop_first(capsys):
app = PluggedApp()
app.register_cmdfinalization_hook(app.cmdfinalization_hook_stop)
app.register_cmdfinalization_hook(app.cmdfinalization_hook)
stop = app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert out == 'hello\n'
assert not err
assert app.called_cmdfinalization == 2
assert stop
def test_cmdfinalization_stop_second(capsys):
app = PluggedApp()
app.register_cmdfinalization_hook(app.cmdfinalization_hook)
app.register_cmdfinalization_hook(app.cmdfinalization_hook_stop)
stop = app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert out == 'hello\n'
assert not err
assert app.called_cmdfinalization == 2
assert stop
def test_cmdfinalization_hook_exception(capsys):
app = PluggedApp()
app.register_cmdfinalization_hook(app.cmdfinalization_hook_exception)
stop = app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert not stop
assert out == 'hello\n'
assert err
assert app.called_cmdfinalization == 1
# register another function, but it shouldn't be called
app.reset_counters()
app.register_cmdfinalization_hook(app.cmdfinalization_hook)
stop = app.onecmd_plus_hooks('say hello')
out, err = capsys.readouterr()
assert not stop
assert out == 'hello\n'
assert err
assert app.called_cmdfinalization == 1
| 34.774732 | 136 | 0.717165 |
f70660ac38f411cd1d8a0396ef510a16bb61622b | 5,516 | py | Python | esperclient/models/inline_response2005.py | pallavigopi/esper-client-py | f7e71d3f25a5d91f35628b414e8abe9e6849d316 | [
"Apache-2.0"
] | null | null | null | esperclient/models/inline_response2005.py | pallavigopi/esper-client-py | f7e71d3f25a5d91f35628b414e8abe9e6849d316 | [
"Apache-2.0"
] | null | null | null | esperclient/models/inline_response2005.py | pallavigopi/esper-client-py | f7e71d3f25a5d91f35628b414e8abe9e6849d316 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
ESPER API REFERENCE
OpenAPI spec version: 1.0.0
Contact: developer@esper.io
---------------------------------------------------------
Copyright 2019 Shoonya Enterprises Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import pprint
import re
import six
from esperclient.models.app_install import AppInstall
class InlineResponse2005(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'count': 'int',
'next': 'str',
'previous': 'str',
'results': 'list[AppInstall]'
}
attribute_map = {
'count': 'count',
'next': 'next',
'previous': 'previous',
'results': 'results'
}
def __init__(self, count=None, next=None, previous=None, results=None):
"""InlineResponse2005 - a model defined in Swagger"""
self._count = None
self._next = None
self._previous = None
self._results = None
self.discriminator = None
self.count = count
if next is not None:
self.next = next
if previous is not None:
self.previous = previous
self.results = results
@property
def count(self):
"""Gets the count of this InlineResponse2005.
:return: The count of this InlineResponse2005.
:rtype: int
"""
return self._count
@count.setter
def count(self, count):
"""Sets the count of this InlineResponse2005.
:param count: The count of this InlineResponse2005.
:type: int
"""
if count is None:
raise ValueError("Invalid value for `count`, must not be `None`")
self._count = count
@property
def next(self):
"""Gets the next of this InlineResponse2005.
:return: The next of this InlineResponse2005.
:rtype: str
"""
return self._next
@next.setter
def next(self, next):
"""Sets the next of this InlineResponse2005.
:param next: The next of this InlineResponse2005.
:type: str
"""
self._next = next
@property
def previous(self):
"""Gets the previous of this InlineResponse2005.
:return: The previous of this InlineResponse2005.
:rtype: str
"""
return self._previous
@previous.setter
def previous(self, previous):
"""Sets the previous of this InlineResponse2005.
:param previous: The previous of this InlineResponse2005.
:type: str
"""
self._previous = previous
@property
def results(self):
"""Gets the results of this InlineResponse2005.
:return: The results of this InlineResponse2005.
:rtype: list[AppInstall]
"""
return self._results
@results.setter
def results(self, results):
"""Sets the results of this InlineResponse2005.
:param results: The results of this InlineResponse2005.
:type: list[AppInstall]
"""
if results is None:
raise ValueError("Invalid value for `results`, must not be `None`")
self._results = results
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(InlineResponse2005, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, InlineResponse2005):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 26.14218 | 80 | 0.581218 |
f706c0c3e8cc27ca3edb6a7b17a579cd796d7cc7 | 6,962 | py | Python | tests/test_integration.py | prototypefund/lazycluster | e6fbd69dbd73ec9bf101a502f25f7afdf0579f66 | [
"Apache-2.0"
] | 44 | 2019-08-07T12:01:07.000Z | 2021-09-02T16:50:51.000Z | tests/test_integration.py | prototypefund/lazycluster | e6fbd69dbd73ec9bf101a502f25f7afdf0579f66 | [
"Apache-2.0"
] | 9 | 2020-10-26T13:08:32.000Z | 2021-09-16T02:13:58.000Z | tests/test_integration.py | prototypefund/lazycluster | e6fbd69dbd73ec9bf101a502f25f7afdf0579f66 | [
"Apache-2.0"
] | 9 | 2019-09-18T07:52:09.000Z | 2022-02-11T13:48:19.000Z | import os
import re
import sys
import time
from subprocess import PIPE, run
from types import ModuleType
from typing import Union
import docker
import requests
import storm.__main__ as storm
from lazycluster import Runtime, RuntimeGroup, RuntimeManager, RuntimeTask
from .config import RUNTIME_DOCKER_IMAGE, RUNTIME_NAMES, WORKSPACE_PORT
def setup_module(module: ModuleType) -> None:
""" setup any state specific to the execution of the given module."""
docker_client = docker.from_env()
for runtime_name in RUNTIME_NAMES:
_start_runtime_container(runtime_name, docker_client)
# Sleep a moment to give all processes time to start within the Workspace containers
time.sleep(15)
for runtime_name in RUNTIME_NAMES:
_setup_ssh_connection_to_runtime(runtime_name)
def teardown_module(module: ModuleType) -> None:
"""teardown any state that was previously setup with a setup_module
method.
"""
_remove_runtimes()
class TestRuntime:
def test_setup(self) -> None:
for runtime_name in RUNTIME_NAMES:
completed_process = run(
f"ssh {runtime_name} 'echo $WORKSPACE_NAME'",
shell=True,
stdout=PIPE,
stderr=PIPE,
)
assert completed_process.stderr == b"", "The stderr is not emtpy"
stdout = completed_process.stdout.decode("UTF-8").replace("\n", "")
assert stdout == runtime_name, "Stdout is not equal to the runtime_name"
if not RUNTIME_NAMES:
raise RuntimeError("No runtime names in integration/config.py configured")
Runtime(RUNTIME_NAMES[0])
def test_echo(self) -> None:
runtime_name = RUNTIME_NAMES[len(RUNTIME_NAMES) - 1]
rt = Runtime(runtime_name)
msg = f"Hello Runtime {runtime_name}"
assert rt.echo(msg).rstrip("\n") == msg
def test_working(self) -> None:
runtime_name = RUNTIME_NAMES[0]
exp_working_dir = "/etc"
rt = Runtime(runtime_name, working_dir=exp_working_dir)
act_working_dir = rt.echo("${PWD}").rstrip("\n")
assert exp_working_dir == act_working_dir
task = RuntimeTask("get-working-dir").run_command("echo ${PWD}")
rt.execute_task(task, execute_async=False)
assert exp_working_dir == rt.execution_log(task.name)[0].rstrip("\n").rstrip(
"\r"
)
class TestRuntimeGroup:
def test_creation(self) -> None:
runtime_group = RuntimeGroup(hosts=RUNTIME_NAMES)
for runtime_name in RUNTIME_NAMES:
assert runtime_name in runtime_group._runtimes
assert isinstance(runtime_group._runtimes[runtime_name], Runtime)
class TestRuntimeManager:
def test_create_group(self) -> None:
runtime_group = RuntimeManager().create_group()
for runtime_name in RUNTIME_NAMES:
assert runtime_name in runtime_group._runtimes
assert isinstance(runtime_group._runtimes[runtime_name], Runtime)
# -------------------------------------------------------------------------
def _remove_runtimes() -> None:
docker_client = docker.from_env()
for runtime_name in RUNTIME_NAMES:
try:
runtime_container = docker_client.containers.get(runtime_name)
runtime_container.remove(force=True)
except docker.errors.NotFound:
# TODO: handle create a docker container if not running as containerized test
print(f"Conatiner {runtime_name} not found")
# Delete ssh config as well, because the ssh setup fails
# when testing against multiple python versions
storm.delete(runtime_name)
def _get_current_container_id() -> str:
return run(
"awk -F/ '{ print $NF }' /proc/1/cpuset",
shell=True,
stdout=PIPE,
stderr=PIPE,
encoding="UTF-8",
).stdout.rstrip("\n")
def _start_runtime_container(name: str, client: docker.DockerClient) -> None:
try:
container = client.containers.run(
RUNTIME_DOCKER_IMAGE,
name=name,
environment={"WORKSPACE_NAME": name},
detach=True,
)
except docker.errors.APIError:
_remove_runtimes()
raise
container.reload()
ip_address = container.attrs["NetworkSettings"]["Networks"]["bridge"]["IPAddress"]
os.environ[name] = ip_address
_wait_until_started(ip_address, WORKSPACE_PORT)
def _setup_ssh_connection_to_runtime(runtime_name: str) -> None:
runtime_host = os.getenv(runtime_name, "localhost")
response = requests.get(
f"http://{runtime_host}:{WORKSPACE_PORT}/tooling/ssh/setup-command?origin=http://{runtime_host}:{WORKSPACE_PORT}"
)
ssh_script_runner_regex = rf'^\/bin\/bash <\(curl -s --insecure "(http:\/\/{runtime_host}:{WORKSPACE_PORT}\/shared\/ssh\/setup\?token=[a-z0-9]+&host={runtime_host}&port={WORKSPACE_PORT})"\)$'
pattern = re.compile(ssh_script_runner_regex)
match = pattern.match(response.text)
assert match, "SSH setup script url not found"
# Execute the ssh setup script and automatically pass an ssh connection name to the script
script_url = match.groups()[0]
r = requests.get(script_url)
setup_script_path = "./setup-ssh.sh"
_remove_file_if_exists(setup_script_path)
with open(setup_script_path, "w") as file:
file.write(r.text)
# make the file executable for the user
os.chmod(setup_script_path, 0o744)
completed_process = run(
[f'/bin/bash -c "{setup_script_path}"'],
input=runtime_name,
encoding="ascii",
shell=True,
stdout=PIPE,
stderr=PIPE,
)
# child = pexpect.spawn(f"/bin/bash {setup_script_path}", encoding="UTF-8")
# child.expect("Provide a name .*")
# child.sendline(runtime_name)
# child.expect("remote_ikernel was detected .*")
# child.sendline("no")
# child.expect("Do you want to add this connection as mountable SFTP storage .*")
# child.sendline("no")
# child.close()
_remove_file_if_exists(setup_script_path)
assert completed_process.stderr == ""
assert "Connection successful!" in completed_process.stdout
def _wait_until_started(ip_address: str, workspace_port: Union[str, int]) -> None:
index = 0
health_url = f"http://{ip_address}:{str(workspace_port)}/healthy"
response = None
while response is None or (response.status_code != 200 and index < 15):
index += 1
time.sleep(1)
try:
response = requests.get(health_url, allow_redirects=False, timeout=2)
except requests.ConnectionError:
# Catch error that is raised when the workspace container is not reachable yet
pass
if index == 15:
print("The workspace did not start")
sys.exit(-1)
def _remove_file_if_exists(path: str) -> None:
try:
os.remove(path)
except OSError:
pass
| 32.685446 | 195 | 0.657282 |
f706dc272c114b0f5868e62d1d68cbe7a0866c42 | 2,676 | py | Python | mplotlab/graphics/Navigation.py | DedeKite/wxPlotLab | 808d457aeb897ceb37535bcd11d15b65a0a14cd1 | [
"MIT"
] | 6 | 2016-03-21T18:44:23.000Z | 2021-05-16T19:07:02.000Z | mplotlab/graphics/Navigation.py | DedeKite/wxPlotLab | 808d457aeb897ceb37535bcd11d15b65a0a14cd1 | [
"MIT"
] | 1 | 2018-05-15T14:47:03.000Z | 2018-05-15T14:47:03.000Z | mplotlab/graphics/Navigation.py | astyl/mplotlab | 808d457aeb897ceb37535bcd11d15b65a0a14cd1 | [
"MIT"
] | 1 | 2016-01-29T12:38:20.000Z | 2016-01-29T12:38:20.000Z | # -*-coding:Utf-8 -*
from mplotlab import App
from matplotlib.backend_bases import NavigationToolbar2
import wx
class Cursors:
# this class is only used as a simple namespace
HAND, POINTER, SELECT_REGION, MOVE = list(range(4))
cursors = Cursors()
cursord = {
cursors.MOVE : wx.CURSOR_HAND,
cursors.HAND : wx.CURSOR_HAND,
cursors.POINTER : wx.CURSOR_ARROW,
cursors.SELECT_REGION : wx.CURSOR_CROSS,
}
class Navigation(NavigationToolbar2):
def __init__(self,*a,**k):
NavigationToolbar2.__init__(self, *a,**k)
def _init_toolbar(self,*args,**kwargs):
pass
def set_message(self,s):
""" display in the status bar
the mouseover data (x,y)
"""
try:
App().mainWin.GetStatusBar().SetStatusText(s,0)
except:
pass
def set_cursor(self, cursor):
cursor =wx.StockCursor(cursord[cursor])
self.canvas.SetCursor( cursor )
def dynamic_update(self):
d = self._idle
self._idle = False
if d:
self.canvas.draw()
self._idle = True
def press(self, event):
if self._active == 'ZOOM':
self.wxoverlay = wx.Overlay()
def release(self, event):
if self._active == 'ZOOM':
# When the mouse is released we reset the overlay and it
# restores the former content to the window.
self.wxoverlay.Reset()
del self.wxoverlay
def draw_rubberband(self, event, x0, y0, x1, y1):
# Use an Overlay to draw a rubberband-like bounding box.
dc = wx.ClientDC(self.canvas)
odc = wx.DCOverlay(self.wxoverlay, dc)
odc.Clear()
# Mac's DC is already the same as a GCDC, and it causes
# problems with the overlay if we try to use an actual
# wx.GCDC so don't try it.
if 'wxMac' not in wx.PlatformInfo:
dc = wx.GCDC(dc)
height = self.canvas.figure.bbox.height
y1 = height - y1
y0 = height - y0
if y1<y0: y0, y1 = y1, y0
if x1<y0: x0, x1 = x1, x0
w = x1 - x0
h = y1 - y0
rect = wx.Rect(x0, y0, w, h)
rubberBandColor = '#C0C0FF' # or load from config?
# Set a pen for the border
color = wx.NamedColour(rubberBandColor)
dc.SetPen(wx.Pen(color, 1))
# use the same color, plus alpha for the brush
r, g, b = color.Get()
color.Set(r,g,b, 0x60)
dc.SetBrush(wx.Brush(color))
dc.DrawRectangleRect(rect)
| 28.774194 | 69 | 0.554933 |
f706f78145d12ecc2b8193f93c65c8642b4557a3 | 374 | py | Python | autovivification.py | godontop/python-work | ea22e0df8b0b17605f5a434e556a388d1f75aa47 | [
"MIT"
] | null | null | null | autovivification.py | godontop/python-work | ea22e0df8b0b17605f5a434e556a388d1f75aa47 | [
"MIT"
] | null | null | null | autovivification.py | godontop/python-work | ea22e0df8b0b17605f5a434e556a388d1f75aa47 | [
"MIT"
] | null | null | null | class AutoVivification(dict):
"""Implementation of perl's autovivification."""
def __missing__(self, key):
value = self[key] = type(self)()
return value
weather = AutoVivification()
weather['china']['guangdong']['shenzhen'] = 'sunny'
weather['china']['hubei']['wuhan'] = 'sunny'
weather['USA']['California']['Los Angeles'] = 'sunny'
print(weather)
| 28.769231 | 53 | 0.65508 |
f70710e622e29b88ff546c1cea16be884932d06d | 7,825 | py | Python | tests/test_serverless.py | jpvowen/troposphere | 6a9efa7717db75905b846a9f3aafd092f55c7925 | [
"BSD-2-Clause"
] | 1 | 2021-02-14T15:18:12.000Z | 2021-02-14T15:18:12.000Z | tests/test_serverless.py | jpvowen/troposphere | 6a9efa7717db75905b846a9f3aafd092f55c7925 | [
"BSD-2-Clause"
] | null | null | null | tests/test_serverless.py | jpvowen/troposphere | 6a9efa7717db75905b846a9f3aafd092f55c7925 | [
"BSD-2-Clause"
] | 5 | 2020-05-10T13:50:32.000Z | 2021-09-09T09:06:54.000Z | import unittest
from troposphere import Tags, Template
from troposphere.s3 import Filter, Rules, S3Key
from troposphere.serverless import (
Api, DeadLetterQueue, DeploymentPreference, Function, FunctionForPackaging,
LayerVersion, S3Event, S3Location, SimpleTable,
)
class TestServerless(unittest.TestCase):
def test_exactly_one_code(self):
serverless_func = Function(
"SomeHandler",
Handler="index.handler",
Runtime="nodejs",
CodeUri=S3Location(
Bucket="mybucket",
Key="mykey",
),
InlineCode="",
)
t = Template()
t.add_resource(serverless_func)
with self.assertRaises(ValueError):
t.to_json()
def test_s3_location(self):
serverless_func = Function(
"SomeHandler",
Handler="index.handler",
Runtime="nodejs",
CodeUri=S3Location(
Bucket="mybucket",
Key="mykey",
)
)
t = Template()
t.add_resource(serverless_func)
t.to_json()
def test_tags(self):
serverless_func = Function(
"SomeHandler",
Handler="index.handler",
Runtime="nodejs",
CodeUri="s3://bucket/handler.zip",
Tags=Tags({
'Tag1': 'TagValue1',
'Tag2': 'TagValue2'
})
)
t = Template()
t.add_resource(serverless_func)
t.to_json()
def test_DLQ(self):
serverless_func = Function(
"SomeHandler",
Handler="index.handler",
Runtime="nodejs",
CodeUri="s3://bucket/handler.zip",
DeadLetterQueue=DeadLetterQueue(
Type='SNS',
TargetArn='arn:aws:sns:us-east-1:000000000000:SampleTopic'
)
)
t = Template()
t.add_resource(serverless_func)
t.to_json()
def test_required_function(self):
serverless_func = Function(
"SomeHandler",
Handler="index.handler",
Runtime="nodejs",
CodeUri="s3://bucket/handler.zip"
)
t = Template()
t.add_resource(serverless_func)
t.to_json()
def test_optional_auto_publish_alias(self):
serverless_func = Function(
"SomeHandler",
Handler="index.handler",
Runtime="nodejs",
CodeUri="s3://bucket/handler.zip",
AutoPublishAlias="alias"
)
t = Template()
t.add_resource(serverless_func)
t.to_json()
def test_optional_deployment_preference(self):
serverless_func = Function(
"SomeHandler",
Handler="index.handler",
Runtime="nodejs",
CodeUri="s3://bucket/handler.zip",
AutoPublishAlias="alias",
DeploymentPreference=DeploymentPreference(
Type="AllAtOnce"
)
)
t = Template()
t.add_resource(serverless_func)
t.to_json()
def test_required_api_definitionuri(self):
serverless_api = Api(
"SomeApi",
StageName='test',
DefinitionUri='s3://bucket/swagger.yml',
)
t = Template()
t.add_resource(serverless_api)
t.to_json()
swagger = {
"swagger": "2.0",
"info": {
"title": "swagger test",
},
"paths": {
"/test": {
"get": {
},
},
},
}
def test_required_api_both(self):
serverless_api = Api(
"SomeApi",
StageName='test',
DefinitionUri='s3://bucket/swagger.yml',
DefinitionBody=self.swagger,
)
t = Template()
t.add_resource(serverless_api)
with self.assertRaises(ValueError):
t.to_json()
def test_required_api_definitionbody(self):
serverless_api = Api(
"SomeApi",
StageName='test',
DefinitionBody=self.swagger,
)
t = Template()
t.add_resource(serverless_api)
t.to_json()
def test_api_no_definition(self):
serverless_api = Api(
"SomeApi",
StageName='test',
)
t = Template()
t.add_resource(serverless_api)
t.to_json()
def test_simple_table(self):
serverless_table = SimpleTable(
"SomeTable"
)
t = Template()
t.add_resource(serverless_table)
t.to_json()
def test_layer_version(self):
layer_version = LayerVersion(
"SomeLayer",
ContentUri="someuri",
)
t = Template()
t.add_resource(layer_version)
t.to_json()
layer_version = LayerVersion(
"SomeLayer",
)
t = Template()
t.add_resource(layer_version)
with self.assertRaises(ValueError):
t.to_json()
def test_s3_filter(self):
t = Template()
t.add_resource(
Function(
"ProcessorFunction",
Handler='process_file.handler',
CodeUri='.',
Runtime='python3.6',
Policies='AmazonS3FullAccess',
Events={
'FileUpload': S3Event(
'FileUpload',
Bucket="bucket",
Events=['s3:ObjectCreated:*'],
Filter=Filter(S3Key=S3Key(
Rules=[
Rules(Name="prefix", Value="upload/"),
Rules(Name="suffix", Value=".txt"),
],
))
)
}
)
)
t.to_json()
def test_policy_document(self):
t = Template()
t.add_resource(
Function(
"ProcessorFunction",
Handler='process_file.handler',
CodeUri='.',
Runtime='python3.6',
Policies="AmazonS3ReadOnly"
)
)
t.to_json()
t = Template()
t.add_resource(
Function(
"ProcessorFunction",
Handler='process_file.handler',
CodeUri='.',
Runtime='python3.6',
Policies=["AmazonS3FullAccess", "AmazonDynamoDBFullAccess"]
)
)
t.to_json()
t = Template()
t.add_resource(
Function(
"ProcessorFunction",
Handler='process_file.handler',
CodeUri='.',
Runtime='python3.6',
Policies={
"Statement": [{
"Effect": "Allow",
"Action": ["s3:GetObject", "s3:PutObject"],
"Resource": ["arn:aws:s3:::bucket/*"],
}]
},
)
)
t.to_json()
def test_packaging(self):
# test for no CodeUri or InlineCode
t = Template()
t.add_resource(
FunctionForPackaging(
"ProcessorFunction",
Handler='process_file.handler',
Runtime='python3.6',
Policies={
"Statement": [{
"Effect": "Allow",
"Action": ["s3:GetObject", "s3:PutObject"],
"Resource": ["arn:aws:s3:::bucket/*"],
}]
},
)
)
t.to_json()
if __name__ == '__main__':
unittest.main()
| 28.046595 | 79 | 0.474121 |
f7071f677e4a2851bd5ef43d794f9ff207bdcbfa | 94 | py | Python | project_RL/linear_sarsa/__init__.py | Ronnypetson/gym-minigrid | 1f0a607160960f00b664130c756a4f3bab356752 | [
"Apache-2.0"
] | null | null | null | project_RL/linear_sarsa/__init__.py | Ronnypetson/gym-minigrid | 1f0a607160960f00b664130c756a4f3bab356752 | [
"Apache-2.0"
] | 2 | 2021-10-15T01:57:47.000Z | 2021-10-29T20:24:56.000Z | project_RL/linear_sarsa/__init__.py | Ronnypetson/gym-minigrid | 1f0a607160960f00b664130c756a4f3bab356752 | [
"Apache-2.0"
] | null | null | null | import project_RL.linear_sarsa.train
from project_RL.linear_sarsa.sarsa_lambda_agent import *
| 31.333333 | 56 | 0.882979 |
f7072282324b2c842adb0702a11c0bbc9e10dbfb | 3,821 | py | Python | Tutorials/MLADS-spring-2018/CNTK_distributed/CNTK_distributed.py | cvitolo/DataScienceVM | 97e1b780de572266dcdab89d443af55d5b930f42 | [
"MIT"
] | 165 | 2017-04-21T07:42:38.000Z | 2022-02-08T13:13:20.000Z | Tutorials/MLADS-spring-2018/CNTK_distributed/CNTK_distributed.py | cvitolo/DataScienceVM | 97e1b780de572266dcdab89d443af55d5b930f42 | [
"MIT"
] | 32 | 2017-08-14T16:50:23.000Z | 2021-06-29T09:27:01.000Z | Tutorials/MLADS-spring-2018/CNTK_distributed/CNTK_distributed.py | cvitolo/DataScienceVM | 97e1b780de572266dcdab89d443af55d5b930f42 | [
"MIT"
] | 106 | 2017-04-26T08:46:36.000Z | 2022-03-20T11:44:08.000Z | import numpy as np
import os
import sys
import cntk
from cntk.layers import Convolution2D, MaxPooling, Dense, Dropout
from utils import *
import argparse
from cntk.train.distributed import Communicator, mpi_communicator
# Hyperparams
EPOCHS = 1
BATCHSIZE = 64 * 4
LR = 0.01
MOMENTUM = 0.9
N_CLASSES = 10
def create_basic_model(input, out_dims):
with cntk.layers.default_options(init=cntk.glorot_uniform(), activation=cntk.relu):
net = cntk.layers.Convolution((5,5), 32, pad=True)(input)
net = cntk.layers.MaxPooling((3,3), strides=(2,2))(net)
net = cntk.layers.Convolution((5,5), 32, pad=True)(net)
net = cntk.layers.MaxPooling((3,3), strides=(2,2))(net)
net = cntk.layers.Convolution((5,5), 64, pad=True)(net)
net = cntk.layers.MaxPooling((3,3), strides=(2,2))(net)
net = cntk.layers.Dense(64)(net)
net = cntk.layers.Dense(out_dims, activation=None)(net)
return net
def init_model(m):
progress_writers = [cntk.logging.ProgressPrinter(
freq=int(BATCHSIZE / 2),
rank=cntk.train.distributed.Communicator.rank(),
num_epochs=EPOCHS)]
# Loss (dense labels); check if support for sparse labels
loss = cntk.cross_entropy_with_softmax(m, labels)
# Momentum SGD
# https://github.com/Microsoft/CNTK/blob/master/Manual/Manual_How_to_use_learners.ipynb
# unit_gain=False: momentum_direction = momentum*old_momentum_direction + gradient
# if unit_gain=True then ...(1-momentum)*gradient
local_learner = cntk.momentum_sgd(m.parameters,
lr=cntk.learning_rate_schedule(LR, cntk.UnitType.minibatch) ,
momentum=cntk.momentum_schedule(MOMENTUM),
unit_gain=False)
distributed_learner = cntk.train.distributed.data_parallel_distributed_learner(local_learner)
trainer = cntk.Trainer(m, (loss, cntk.classification_error(m, labels)), [distributed_learner], progress_writers)
return trainer, distributed_learner
parser = argparse.ArgumentParser()
parser.add_argument('--input_dir')
#parser.add_argument('--output_dir')
print(sys.argv)
args = parser.parse_args()
# Data into format for library
x_train, x_test, y_train, y_test = cifar_for_library(download_dir=args.input_dir, channel_first=True, one_hot=True)
# CNTK format
y_train = y_train.astype(np.float32)
y_test = y_test.astype(np.float32)
print(x_train.shape, x_test.shape, y_train.shape, y_test.shape)
print(x_train.dtype, x_test.dtype, y_train.dtype, y_test.dtype)
# Placeholders
features = cntk.input_variable((3, 32, 32), np.float32)
labels = cntk.input_variable(N_CLASSES, np.float32)
# Load symbol
sym = create_basic_model(features, N_CLASSES)
def save_model(model, learner, file_name):
if learner.communicator().is_main():
model.save(file_name)
trainer, learner = init_model(sym)
for j in range(EPOCHS):
for data, label in yield_mb(x_train, y_train, BATCHSIZE, shuffle=True):
trainer.train_minibatch({features: data, labels: label})
# Log (this is just last batch in epoch, not average of batches)
eval_error = trainer.previous_minibatch_evaluation_average
print("Epoch %d | Accuracy: %.6f" % (j+1, (1-eval_error)))
z = cntk.softmax(sym)
save_model(sym, learner, "{}/cifar_final.model".format(args.input_dir))
n_samples = (y_test.shape[0]//BATCHSIZE)*BATCHSIZE
y_guess = np.zeros(n_samples, dtype=np.int)
y_truth = np.argmax(y_test[:n_samples], axis=-1)
c = 0
for data, label in yield_mb(x_test, y_test, BATCHSIZE):
predicted_label_probs = z.eval({features : data})
y_guess[c*BATCHSIZE:(c+1)*BATCHSIZE] = np.argmax(predicted_label_probs, axis=-1)
c += 1
print("Accuracy: ", sum(y_guess == y_truth)/len(y_guess))
cntk.train.distributed.Communicator.finalize()
| 35.71028 | 116 | 0.708715 |
f7072395412ad1d9fe5f43c10e8343cbe181879e | 926 | py | Python | config/settings.py | hadadiashkan/yousician | 99b8e6a6d04f0304fccbaef8ee5f423ac92b7b45 | [
"MIT"
] | null | null | null | config/settings.py | hadadiashkan/yousician | 99b8e6a6d04f0304fccbaef8ee5f423ac92b7b45 | [
"MIT"
] | null | null | null | config/settings.py | hadadiashkan/yousician | 99b8e6a6d04f0304fccbaef8ee5f423ac92b7b45 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Application configuration.
Most configuration is set via environment variables.
For local development, use a .env file to set
environment variables.
"""
from os import path, urandom
class Config:
"""Application Configuration."""
from environs import Env
env = Env()
env.read_env()
BASE_DIR = path.dirname(path.dirname(__file__))
ENV = env.str("FLASK_ENV", default="production")
DEBUG = ENV == "development"
MONGODB_SETTINGS = [
{
"db": env.str("MONGODB_DB"),
"host": env.str("MONGODB_HOST"),
"port": env.int("MONGODB_PORT"),
}
]
UPLOAD_DIR = env.str("UPLOAD_DIR")
INSTALLED_RESOURCES = [
"song",
]
# To enable flask to catch package exceptions
PROPAGATE_EXCEPTIONS = True
SECRET_KEY = urandom(24)
SEND_FILE_MAX_AGE_DEFAULT = env.int("SEND_FILE_MAX_AGE_DEFAULT")
| 21.045455 | 68 | 0.631749 |
f7073889c5af752e055589ef6c34b11b55b8eee6 | 2,666 | py | Python | va_explorer/users/tests/test_validators.py | VA-Explorer/va_explorer | e43cfbff0ce5209c12134b7ac4ce439db6fc87a2 | [
"Apache-2.0"
] | null | null | null | va_explorer/users/tests/test_validators.py | VA-Explorer/va_explorer | e43cfbff0ce5209c12134b7ac4ce439db6fc87a2 | [
"Apache-2.0"
] | 125 | 2020-10-07T12:00:15.000Z | 2022-03-31T21:29:21.000Z | va_explorer/users/tests/test_validators.py | VA-Explorer/va_explorer | e43cfbff0ce5209c12134b7ac4ce439db6fc87a2 | [
"Apache-2.0"
] | 2 | 2020-10-29T16:08:42.000Z | 2020-12-08T19:03:41.000Z | from unittest import TestCase
import pytest
from django.core.exceptions import ValidationError
from va_explorer.tests.factories import UserFactory
from va_explorer.users.models import UserPasswordHistory
from va_explorer.users.validators import PasswordComplexityValidator, PasswordHistoryValidator
pytestmark = pytest.mark.django_db
class TestPasswordComplexityValidator(TestCase):
def setUp(self):
self.user = UserFactory.create()
self.validator = PasswordComplexityValidator()
def test_rejects_no_number(self):
with self.assertRaisesRegex(ValidationError, "number"):
self.validator.validate("Password!", self.user)
def test_rejects_no_lower(self):
with self.assertRaisesRegex(ValidationError, "lowercase"):
self.validator.validate("PASSWORD!", self.user)
def test_rejects_no_upper(self):
with self.assertRaisesRegex(ValidationError, "uppercase"):
self.validator.validate("password!", self.user)
def test_rejects_no_special(self):
with self.assertRaisesRegex(ValidationError, "nonalphanumeric"):
self.validator.validate("Password", self.user)
def test_rejects_multiple(self):
# Expect no_number, no_upper, and no_special in that order
with self.assertRaisesRegex(ValidationError, "(number).*(uppercase).*(nonalphanumeric)"):
self.validator.validate("pass", self.user)
def test_accepts_complex_password(self):
try:
self.validator.validate('Password1!', self.user)
except ValidationError:
self.fail("PasswordComplexityValidator raised ValidationError unexpectedly")
class TestPasswordHistoryValidator(TestCase):
def setUp(self):
self.user = UserFactory.create()
self.validator = PasswordHistoryValidator()
def test_accepts_new_password(self):
try:
self.validator.validate('test1', self.user)
except ValidationError:
self.fail("PasswordHistoryValidator raised ValidationError unexpectedly")
def test_rejects_repeated_password(self):
for i in range(0, 13):
self.user.set_password(f"test{i}")
self.user.save()
with self.assertRaises(ValidationError):
self.validator.validate("test7", self.user)
def test_keeps_limited_history(self):
for i in range(0, 13):
self.user.set_password(f"test{i}")
self.user.save()
self.validator.validate("new_password", self.user)
password_history = UserPasswordHistory.objects.filter(user_id=self.user)
self.assertEqual(password_history.count(), 12)
| 37.027778 | 97 | 0.704426 |
f7075e0814a6b52830ec7abfbaab90ad9441a256 | 732 | py | Python | load_pkl_model.py | Rayaction/ECO-paddle | 28c9adf0f6626dd8d262848fd6a2d7147e76048e | [
"MIT"
] | null | null | null | load_pkl_model.py | Rayaction/ECO-paddle | 28c9adf0f6626dd8d262848fd6a2d7147e76048e | [
"MIT"
] | null | null | null | load_pkl_model.py | Rayaction/ECO-paddle | 28c9adf0f6626dd8d262848fd6a2d7147e76048e | [
"MIT"
] | null | null | null | import pickle
import sys
sys.path.append("..")
from model import ECO
import paddle.fluid as fluid
# Load pickle, since pretrained model is too bigger than the threshold(150M), split them into 2 parts and then reload them
f0 = open('seg0.pkl', 'rb')
f1 = open('seg1.pkl', 'rb')
model_out = dict()
model_0 = pickle.load(f0)
model_1 = pickle.load(f1)
for i,key in enumerate(model_0):
model_out[key]=model_0[key]
for i,key in enumerate(model_1):
model_out[key]=model_1[key]
with fluid.dygraph.guard():
paddle_model = ECO.ECO(num_classes=101, num_segments=24)
paddle_model.load_dict(model_out)
fluid.dygraph.save_dygraph(paddle_model.state_dict(), 'ECO_FULL_RGB__seg16')
print('finished')
| 31.826087 | 123 | 0.714481 |
f7075e944abb59672f7df904d36157a6ba3e5bfa | 1,750 | py | Python | otp/level/LevelMgr.py | LittleNed/toontown-stride | 1252a8f9a8816c1810106006d09c8bdfe6ad1e57 | [
"Apache-2.0"
] | 3 | 2020-01-02T08:43:36.000Z | 2020-07-05T08:59:02.000Z | otp/level/LevelMgr.py | NoraTT/Historical-Commits-Project-Altis-Source | fe88e6d07edf418f7de6ad5b3d9ecb3d0d285179 | [
"Apache-2.0"
] | null | null | null | otp/level/LevelMgr.py | NoraTT/Historical-Commits-Project-Altis-Source | fe88e6d07edf418f7de6ad5b3d9ecb3d0d285179 | [
"Apache-2.0"
] | 4 | 2019-06-20T23:45:23.000Z | 2020-10-14T20:30:15.000Z | from toontown.toonbase.ToonPythonUtil import Functor
from otp.level import LevelMgrBase
class LevelMgr(LevelMgrBase.LevelMgrBase):
def __init__(self, level, entId):
LevelMgrBase.LevelMgrBase.__init__(self, level, entId)
self.geom = loader.loadModel(self.modelFilename)
if not self.geom:
import pdb
pdb.set_trace()
self.zoneNums = []
self.level.zoneNum2zoneId = {}
self.level.zoneId2zoneNum = {}
self.accept(self.level.getEntityOfTypeCreateEvent('zone'), self.handleZoneCreated)
def destroy(self):
del self.level.zoneIds
del self.level.zoneId2zoneNum
del self.level.zoneNum2zoneId
self.geom.removeNode()
del self.geom
LevelMgrBase.LevelMgrBase.destroy(self)
def handleZoneCreated(self, entId):
zoneEnt = self.level.getEntity(entId)
self.zoneNums.append(zoneEnt.entId)
self.privAssignZoneIds()
self.accept(self.level.getEntityDestroyEvent(entId), Functor(self.handleZoneDestroy, entId))
def handleZoneDestroy(self, entId):
zoneEnt = self.level.getEntity(entId)
del self.level.zoneId2zoneNum[self.level.zoneNum2zoneId[zoneEnt.entId]]
del self.level.zoneNum2zoneId[zoneEnt.entId]
self.zoneNums.remove(zoneEnt.entId)
self.privAssignZoneIds()
def privAssignZoneIds(self):
self.zoneNums.sort()
for i in xrange(len(self.zoneNums)):
zoneNum = self.zoneNums[i]
zoneEnt = self.level.getEntity(zoneNum)
zoneId = self.level.zoneIds[i]
zoneEnt.setZoneId(zoneId)
self.level.zoneNum2zoneId[zoneNum] = zoneId
self.level.zoneId2zoneNum[zoneId] = zoneNum
| 37.234043 | 100 | 0.668 |
f7077532ef595e987257cd5055e8eba48e9aa978 | 26,132 | py | Python | alphagradient/utils.py | nathanheidacker/AlphaGradient | cf031058f3e91381575e2df44cc029bcc7f4cc73 | [
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | alphagradient/utils.py | nathanheidacker/AlphaGradient | cf031058f3e91381575e2df44cc029bcc7f4cc73 | [
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | alphagradient/utils.py | nathanheidacker/AlphaGradient | cf031058f3e91381575e2df44cc029bcc7f4cc73 | [
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""Standard utility functions used throughout AlphaGradient"""
# Standard Imports
from __future__ import annotations
from abc import ABC, abstractmethod
import builtins
from datetime import (
date,
datetime,
time,
timedelta,
)
import math
from pathlib import Path
# Third Party Imports
import numpy as np
import pandas as pd
# Typing
from typing import (
TYPE_CHECKING,
Any,
Literal,
Generator,
Generic,
Iterable,
Optional,
TypeVar,
Union,
)
T = TypeVar("T")
class PropertyType(Generic[T]):
"""A Type class for property objects themselves, before being bound to a class instance"""
def fget(self, *args: Any) -> T:
...
Property = builtins.property
"""A Type for builtin properties that have been bound to a class instance"""
PyNumber = Union[int, float]
"""Numeric type that does not include complex numbers (only native python types)"""
Number = Union[PyNumber, np.number, pd.core.arrays.numeric.NumericDtype]
"""Numeric type that does not include complex numbers"""
DatetimeLike = Union[pd.Timestamp, np.datetime64, date, datetime, str]
"""Objects convertable to python datetimes"""
TimeLike = Union[time, str]
"""Objects convertable to python time objects"""
DateOrTime = Union[DatetimeLike, time]
"""Objects that are either DatetimeLike or TimeLike in nature"""
if TYPE_CHECKING:
from typeshed import SupportsLessThanT as SLTT
_global_persistent_path: PropertyType[Path]
def auto_batch(iterable: Iterable) -> Generator:
"""
Returns a generator which yields automatically sized batches
Given a sized iterable, determines an optimal batch size to be used for
multiprocessing purposes. Using this batch size, returns a generator which
yields batches of the iterable with the optimal size
Parameters:
iterable: An iterable from which to create a batch generator
Returns:
The batch generator of the iterable input
"""
return get_batches(iterable, auto_batch_size(iterable))
def auto_batch_size(iterable: Iterable) -> int:
"""
Returns a multiprocessing-optimal batch size for an iterable
Given an iterable, returns an integer value representing an optimal batch
size for use in python's multiprocessing library
Parameters:
iterable (Iterable): Sized iterable to determine optimal batch size for
Returns:
The optimal batch size for multiprocessing
"""
# Converting to a sized iterable to guarantee __len__ functionality
iterable = list(iterable)
# Output Parameters
horizontal_offset = 10000
horizontal_stretch = 70 / 100_000_000
vertical_offset = 100
# Building the quadratic
output: Number
output = len(iterable) - horizontal_offset
output = output**2
output *= -1
output *= horizontal_stretch
output += vertical_offset
# Output bounded between 30 and 100
return bounded(int(output), lower=30, upper=100)
def bounded(
to_bound: SLTT, lower: Optional[SLTT] = None, upper: Optional[SLTT] = None
) -> SLTT:
"""
Bounds an object between a lower and upper bound
Given an object that defines behavior for comparison (__lt__, __gt__),
returns the object bounded between the lower and upper bounds. Boundaries
will be ommited if they are not provided (None). If lower and upper are not
None, they must be of the same type as to_bound.
Type Explanation:
SLTT (SupportsLessThanT): A TypeVar which implements the __lt__ method.
Parameters:
to_bound (SLTT): the object to be bounded
lower (Optional[SLTT]): the lower boundary of the operation
upper (Optional[SLTT]): the upper boundary of the operation
Returns:
The bounded object
"""
if lower is None and upper is None:
raise ValueError(
"Of the parameters 'lower' and 'upper', at least one must be" "specified"
)
if lower:
to_bound = max(to_bound, lower)
if upper:
to_bound = min(to_bound, upper)
return to_bound
def deconstruct_dt(dt: DateOrTime) -> dict[str, float]:
"""
Returns a dictionary of datetime attribute values on object 'dt'
Given a DatetimeLike object, returns a dictionary where keys are the
object's date and time related attribute names, and values are the object's
associated attribute values.
Parameters:
dt (DateOrTime): the dt to deconstruct
Returns:
A dictionary of attributes and their associated values on dt
Raises:
TypeError: Raised if dt is not a datetime-like object, as it wont have
the proper attributes.
"""
# The potential attributes to be accessed
d = ["year", "month", "day"]
t = ["hour", "minute", "second", "microsecond"]
attrs = []
# Accept string arguments to convert to datetime
if isinstance(dt, str):
dt = read_timestring(dt)
# Determine which elements should be accessed on the dt
if isinstance(dt, datetime):
attrs = d + t
elif isinstance(dt, time):
attrs = t
elif isinstance(dt, date):
attrs = d
else:
raise TypeError(f"{dt=} is not a valid datetime object")
# Collecting the attributes
dtdict = {}
for attr in attrs:
dtdict[attr] = getattr(dt, attr)
return dtdict
def get_batches(iterable: Iterable, size: int = 100) -> Generator:
"""
Returns a generator of the iterable which yields batches of the given size
Given an iterable, uses the size parameter to create a generator which
yields batches of the iterable of the given size.
Parameter:
iterable: The iterable to yield batches of
size: The batch size of the returned generator
Returns:
A generator which yields batches of size 'size' of the iterable
"""
# Because we will be indexing the iterable, we must instantiate the entire
# thing in memory in case it isnt (ie generators)
iterable = list(iterable)
last = len(iterable)
for i in range(math.ceil(last / size)):
start = i * size
end = start + size
end = end if end < last else last
yield iterable[start:end]
def get_time(t: DateOrTime) -> time:
"""
Given a timestring or datetime-like object, returns a datetime.time object
Given an object t which represents a time or a datetime, returns a native
python datetime.time object of the appropriate time. t can be an isoformat
time string or datetime string, or a datetime-like object
Parameters:
dt (DateOrTime): The time object to convert
Returns:
The converted datetime.time object
"""
if isinstance(t, (time, str)):
return to_time(t)
return to_datetime(t).time()
def get_weekday(dt: DatetimeLike) -> str:
"""
Returns the day of the week on which a DatetimeLike object falls
Parameters:
dt (DatetimeLike): The object whose weekday is determined
Returns:
String of the day of the week on which the DatetimeLike object falls
"""
weekdays = {
0: "Monday",
1: "Tuesday",
2: "Wednesday",
3: "Thursday",
4: "Friday",
5: "Saturday",
6: "Sunday",
}
return weekdays[to_datetime(dt).weekday()]
def is_func(f: Any) -> bool:
"""
Returns a boolean value indicating whether or not f is a kind of function
Given an object f, returns a boolean value indicating whether or not the
object is a function. Idenfities all python objects whose sole or primary
purpose is to be called directly, rather than objects that simply support
an implementation of __call__.
Behavior is slightly different than the inspect module's isfunction(), as it
includes methods (bound and unbound), as well as abstract, static, and class
methods.
A 'function' is an instance of any of the following:
* function
* method (bound or unbound)
* staticmethod
* classmethod
* abstractmethod
* lambda
* built-in-function
Parameters:
f: The object who's status as a function is being determined
Returns:
True if f is a method, function, builtin-method-or-function, or lambda,
else False
"""
# Fake class to access type 'method' and 'classmethod'
class C:
def method(self):
pass
# Getting abstract base methods
class ABCC(ABC):
@abstractmethod
def amethod(self):
pass
# Fake function to access type 'function'
def func():
pass
# Getting classic and static methods
cmethod = classmethod(func)
smethod = staticmethod(func)
# Fake lambda to access type 'lambda'
lamb = lambda: None
# Fake instance to access type 'bound method'
c = C()
# Gathering all callable types
functype = type(func)
methodtype = type(C.method)
classmethodtype = type(cmethod)
staticmethodtype = type(smethod)
abstractmethodtype = type(ABCC.amethod)
boundmethodtype = type(c.method)
lambdatype = type(lamb)
builtintype = type(print)
return isinstance(
f,
(
functype,
methodtype,
boundmethodtype,
lambdatype,
builtintype,
abstractmethodtype,
classmethodtype,
staticmethodtype,
),
)
def nearest_expiry(
expiry: DatetimeLike, method: Literal["after", "before", "both"] = "after"
) -> datetime:
"""
Returns the nearest valid expiry to the input datetime object
Determining expiries for options contracts can be difficult, because they
must fall on a business day, and their expiry time must be the market close.
Given an expiry whose validity is unknown, this function returns the
nearest expiry that is guaranteed to be valid. If the given expiry is
valid, it will be unchanged when it is returned.
The method argument is used to determine how the 'nearest' is defined. It
has three options: "after", "before", and "both"
Method must be one of the following string literals:
* "after": returns the nearest expiry that is AFTER the input expiry
* "before": returns the nearest expiry that is BEFORE the input expiry.
* | "both": compares the distances of the nearest before and after, and
| return the smaller of the two. In the case that they are equal, the
| date determined by "after" will be used.
The default argument is "after" because using "before" or "both" can
potentially lead to dangerous behavior for algorithms, as it can return an
expiry which is before the current date of the algorithm. This can cause
options contracts to initialize as expired. Only change the method
argument if you are positive that the returned expiry will be greater
than the algorithm's current date.
Parameters:
expiry (DatetimeLike):
The expiry who's closest valid expiry will be determined
method:
One of "after", "before", or "both"
Returns:
The nearest valid expiry
"""
# Ensuring expiry is a pydatetime
expiry = to_datetime(expiry)
# All expiries must expire at market close (4PM)
expiry = set_time(expiry, "4:00 PM")
# Change the expiry day if it is not a weekday
if expiry.weekday() > 4:
# Closest AFTER
if method == "after":
dist = 7 - expiry.weekday()
expiry += timedelta(days=dist)
# Closest BEFORE
elif method == "before":
dist = expiry.weekday() - 4
expiry -= timedelta(days=dist)
# Comparing both
elif method == "both":
bdist = expiry.weekday() - 4
adist = 7 - expiry.weekday()
if bdist < adist:
expiry -= timedelta(days=bdist)
else:
expiry += timedelta(days=adist)
return expiry
def optimal_start(
start: datetime,
max_start: datetime,
min_end: datetime,
end: Optional[DatetimeLike] = None,
t: Optional[TimeLike] = None,
) -> datetime:
"""
Based an Environment's instantiated/tracked assets, returns an optimal datetime
for starting a backtest
Returns a backtest starting datetime that:
* Is guaranteed to be within the date range of all intantiated assets
* | Is guaranteed to have ample time for calculations of historical
| volatility, beta, percent change etc. BEFORE the start date
* Automatically adjusts to accomodate shorter ending periods
Parameters:
start:
A datetime object indictating the actual starting datetime
max_start:
A datetime object indicating the maximum possible starting datetime
min_end:
A datetime object indicating the minimum possible ending datetime
end (Optional[DatetimeLike]):
The desired endpoint on which to base the optimal start point
t (Optional[TimeLike]):
The returned optimal start's time
Returns:
The optimal starting datetime
"""
end = min_end if end is None else to_datetime(end)
# If the maximum start date is before the minimum end date, there is
# no valid 'optimal start', because there is no date range that allows
# backtesting of all available data.
if max_start >= end:
return start
# Determining the optimal start period. To avoid errors, we will not sync to the beginning
optimal_delta = (end - max_start) / 2
optimal_date = max_start + optimal_delta
# Setting the optimal date's time to market open unless specified otherwise
t = "00:00:00" if t is None else to_time(t)
set_time(optimal_date, t)
# Bounding the date to acceptable minimums and maximums
lower_bound = set_time(max_start + timedelta(days=1), t)
upper_bound = set_time(max_start + timedelta(days=365), t)
optimal_start = bounded(optimal_date, lower=lower_bound, upper=upper_bound)
return optimal_start
def progress_print(to_print: Any, last: list[int] = [0]) -> None:
"""Prints, but returns the carriage to the front of the last print"""
print("\r" + (" " * last[0]), end="\r", flush=True) # type: ignore[operator]
print(to_print, end="", flush=True)
last[0] = len(str(to_print))
def read_timestring(timestring: str) -> time:
"""
Given a timestring, returns a datetime.time object representative of the time
This function reads in 'timestrings', which are one of two things:
#. | Isoformat times as strings, using 24 hours
| (eg 04:00:00, 18:30, 02:59:59.99, etc)
#. | Strings based on 12 hour clocks
| (see ag.utils.read_twelve_hour_timestring docs)
Using this timestring, returns a python datetime.time object corresponding
to the time in the timestring. if dtype is set to dict, a deconstructed
datetime attr dictionary will instead be returned. For more info on
dtdicts, read the docs for ag.utils.deconstruct_dt
Parameters:
timestring:
string representing the time
dtype:
The type of data to return
Returns:
The time or dict object corresponding to the time in the timestring
"""
try:
return read_twelve_hour_timestring(timestring)
except (TypeError, ValueError) as e:
return time.fromisoformat(timestring)
def read_twelve_hour_timestring(timestring: str) -> time:
"""Reads a timestring based on a 12 hour clock and returns a time
Given a timestring representing a time on a 12 hour clock, returns the
appropriate time object
Must be formatted as follows:
* hour | This is the only required value, integer
* minute | separated from hour by a colon, optional, integer
* second | separated from minute by a colon, optional, float
* AM/PM | string 'AM' or 'PM', separated from second by a space
When AM or PM is not provided in the timestring, AM will be assumed.
Valid Examples:
* '4:30 PM'
* '4:30 AM'
* '1 PM'
* '1'
* '11:59:59.999 PM'
* '12:00:00 AM'
Invalid Examples:
* '0:00'
* '13:30'
* '103 PM'
* '0'
* '22'
* '4:30:99 PM'
* '3:99 PM'
Parameters:
timestring: The string containing the time to convert to a time object
Returns:
The corresponding time object
Raises:
TypeError:
When timestring is not a string. Only str objects can be parsed
ValueError:
When the timetring is invalid / improperly formatted.
"""
# Timestrings must be strs
if not isinstance(timestring, str):
raise TypeError(f"timestring must be a string, got {type(timestring)}")
# Variable Initialization
ampm = "AM"
info = []
timestring = timestring.split(" ") # type: ignore[assignment]
# Getting AM/PM component
if len(timestring) > 1:
ampm = timestring[1]
# Getting individual time components
info = timestring[0].split(":")
# isoformat is 00:00:00.00, max 3 colons
if len(info) > 4:
raise ValueError(f"Failed to parse timestring {timestring}")
# collecting the attributes necessary to create a time object
tdict = {}
attrs = ["hour", "minute", "second", "microsecond"]
for attr, value in zip(attrs, info):
tdict[attr] = int(value)
# Setting missing components to 0
for attr in attrs:
if not tdict.get(attr):
tdict[attr] = 0
# hours less and 1 and more than 12 are off limits in 12 hour clocks
if not 1 <= tdict["hour"] <= 12:
raise ValueError(f"Failed to parse timestring {timestring}")
# 12:30 AM is 00:30 isoformat
if ampm == "AM" and tdict["hour"] == 12:
tdict["hour"] == 0
# 12:30 PM is 12:30 isoformat, 1:30 PM is 13:30 isoformat
elif ampm == "PM" and tdict["hour"] < 12:
tdict["hour"] += 12
# Building and returning a time object
return time(**tdict) # type: ignore[arg-type]
def set_time(dt: DatetimeLike, t: DateOrTime) -> datetime:
"""Sets the given datetime-like object to the given time
Given a DatetimeLike object 'dt' and a time-like object 't', returns a
datetime like object that shares the date of dt and the time of t.
Very similar to datetime.combine, but accepts datetime objects for both
inputs.
Parameters:
dt (DatetimeLike): Datetime to convert
t (DateOrTime): Time to convert to
Returns:
python datetime.datetime object with converted time
"""
# Initializing the new time that will be set
newtime: dict[str, float] = {}
# Reading the necessary time attributes
if isinstance(t, str):
t = read_timestring(t)
newtime = deconstruct_dt(t)
elif isinstance(t, time):
newtime = deconstruct_dt(t)
else:
newtime = deconstruct_dt(to_datetime(t).time())
# Creating the new datetime with t=t
return to_datetime(dt).replace(**newtime) # type: ignore [arg-type]
def timestring(t: DateOrTime) -> str:
"""Converts a time-like object to a 12-hour-clock timestring
Given a time-like object t, returns a timestring represented by the
12-hour-clock (eg. 4:30 PM).
Parameters:
t (DateOrTime):
date or time object to read into a 12-hour-clock-based timestring
Returns:
A string representing the time on a 12-hour-clock
"""
# Ensuring that t is a time object
if not isinstance(t, time):
t = to_datetime(t).time()
# Deconstructing components to create a time string
ampm = "AM"
hour = t.hour
minute = t.minute if t.minute > 9 else f"0{t.minute}"
if hour > 12:
ampm = "PM"
hour -= 12
return f"{hour}:{minute} {ampm}"
def to_datetime(dtlike: DatetimeLike) -> datetime:
"""
Given a datetime-like object, converts it to a python standard datetime
Parameters:
dtlike (DatetimeLike):
The Datetime-convertable object
Returns:
The converted python datetime
Raises:
TypeError: Only accepts python-datetime-convertable objects
"""
if isinstance(dtlike, datetime):
return dtlike
elif isinstance(dtlike, pd.Timestamp):
return dtlike.to_pydatetime()
elif isinstance(dtlike, np.datetime64):
return pd.Timestamp(dtlike).to_pydatetime()
elif isinstance(dtlike, date):
return datetime.combine(dtlike, datetime.min.time())
elif isinstance(dtlike, str):
return datetime.fromisoformat(dtlike)
raise TypeError(f"Can not convert passed object {dtlike} to python datetime")
def to_step(current: datetime, delta: Union[DateOrTime, timedelta, float]) -> timedelta:
"""
Converts an ambiguous delta object to a python timedelta
Given an amiguous object which can in some way be interpreted as a timedelta
relative to some 'current' time, converts that object to an appropriate
timedelta object, or 'step' in time.
Parameters:
current:
The 'current' time, which determines how to interpret the delta
delta (Union[DateOrTime, timedelta, float]);
The object being passed that may represent a 'step' in time
Returns:
the appropriate timedelta 'step'
Raises:
TypeError:
When passed a type that can not be coerced/interpreted
ValueError:
When a type-appropriate object can not be coerced, or is in some way
invalid (eg. the step in time is BEFORE the current time)
"""
# Multiple parses must be made on strings to successfully coerce all of them
if isinstance(delta, str):
try:
delta = set_time(current, read_timestring(delta))
except ValueError:
delta = datetime.fromisoformat(delta) # type: ignore[arg-type]
elif isinstance(delta, time):
delta = set_time(current, delta)
elif isinstance(delta, (float, int)):
delta = current + timedelta(days=delta)
elif isinstance(delta, timedelta):
delta = current + delta
# if isinstance(delta, DatetimeLike):
else:
delta = to_datetime(delta)
if delta > current:
return delta - current
raise ValueError(
f"Passed delta {delta} is prior to current time {current}. Please "
"choose a time AFTER the current date."
)
def to_time(tlike: TimeLike) -> time:
"""
Given a TimeLike object, converts it to a python standard time object
Parameters:
tlike:
The time-convertable object
Returns:
The converted python time object
Raises:
TypeError: Only accepts python-time-convertable objects
"""
if isinstance(tlike, str):
return read_timestring(tlike)
elif isinstance(tlike, time):
return tlike
raise TypeError(f"Can not convert passed object {tlike} to python time")
class NullClass:
"""
A class designed to take the place of other functions, modules, or classes
This class stands in place of a function, class, or module attached to
another class as an attribute. When an attribute is initialized as a
NullClass, one can safely access it as an attribute, call it, and access
attributes on it. These actions can also be performed recursively; any of
these operations performed on the nullclass will simply return itself,
allowing them to be chained infinitely.
Use this class in place of another function or class in order to safely
use an attribute without making constant checks.
This is most useful in place of functions/classes that perform
logging/printing, but also makes sense in place of functions that modify
things in place or always return None.
Examples:
.. highlight:: python
.. code-block:: python
class MyClass:
def __init__(self, data, verbose=False):
# This is cleaner and more pythonic than...
self.print = print if verbose else NullClass()
self.print("Initialized as Verbose!")
# Alternative 1
self.print = print if verbose else lambda *args, **kwargs: None
self.print("Initialized as Verbose!")
# Alternative 2
self.print = print if print is verbose else None
if self.print is not None:
self.print("Initialized as Verbose!")
# Alternative 3
self.verbose = verbose
if self.verbose:
print("Initialized as Verbose!")
# etc etc etc...
# This is cleaner and more pythonic than...
self.tqdm = tqdm.progress_bar if verbose else NullClass()
with self.tqdm(total=1000) as pbar:
while condition:
self.do_something()
pbar.update(1) # Safe!
# Alternative
self.verbose = verbose
if verbose:
with tqdm.progress_bar(total=1000) as pbar:
while condition:
self.do_something()
pbar.update(1)
else:
while condition:
self.do_something() # gross.
"""
def __call__(self, *args: Any, **kwargs: Any) -> NullClass:
return self
def __getattr__(self, attr: str) -> NullClass:
return self
def __enter__(self, *args, **kwargs) -> NullClass:
return self
def __exit__(self, *args, **kwargs) -> None:
pass
def __bool__(self) -> bool:
return False
| 30.85242 | 94 | 0.641321 |
f7081d04f130b03fc952042146402d7f447fa934 | 604 | py | Python | cuor/harvester/general.py | tocororo/cuor | 0202970f19b927562f34eb4367ea4f91e08e6706 | [
"MIT"
] | null | null | null | cuor/harvester/general.py | tocororo/cuor | 0202970f19b927562f34eb4367ea4f91e08e6706 | [
"MIT"
] | null | null | null | cuor/harvester/general.py | tocororo/cuor | 0202970f19b927562f34eb4367ea4f91e08e6706 | [
"MIT"
] | null | null | null | from cuor.organizations.api import OrganizationRecord
import traceback
def remove_nulls(d):
return {k: v for k, v in d.items() if v is not None}
def _assing_if_exist(data, record, field):
if field in record:
data[field] = record[field]
def insert_in_cuor(data, inst):
# try:
OrganizationRecord.create_or_update(None, data, dbcommit=True, reindex=True)
# except Exception as e:
# print(e)
# print("------------")
#print(data)
#print("------------")
#print(inst)
#print("------------")
#print(traceback.format_exc())
| 25.166667 | 80 | 0.587748 |
f7082209033fa16bf0b05b5db5d4f2960b3c91f8 | 4,412 | py | Python | hotsos/core/host_helpers/config.py | KellenRenshaw/hotsos | e3fc51ab7f8af606a5846a3486a7fda23d761583 | [
"Apache-2.0"
] | 6 | 2021-10-01T19:46:14.000Z | 2022-03-31T17:05:08.000Z | hotsos/core/host_helpers/config.py | KellenRenshaw/hotsos | e3fc51ab7f8af606a5846a3486a7fda23d761583 | [
"Apache-2.0"
] | 111 | 2021-10-01T18:18:17.000Z | 2022-03-29T12:23:20.000Z | hotsos/core/host_helpers/config.py | KellenRenshaw/hotsos | e3fc51ab7f8af606a5846a3486a7fda23d761583 | [
"Apache-2.0"
] | 10 | 2021-09-29T14:47:54.000Z | 2022-03-18T14:52:16.000Z | import os
import re
class ConfigBase(object):
def __init__(self, path):
self.path = path
@classmethod
def squash_int_range(cls, ilist):
"""Takes a list of integers and squashes consecutive values into a
string range. Returned list contains mix of strings and ints.
"""
irange = []
rstart = None
rprev = None
sorted(ilist)
for i, value in enumerate(ilist):
if rstart is None:
if i == (len(ilist) - 1):
irange.append(value)
break
rstart = value
if rprev is not None:
if rprev != (value - 1):
if rstart == rprev:
irange.append(rstart)
else:
irange.append("{}-{}".format(rstart, rprev))
if i == (len(ilist) - 1):
irange.append(value)
rstart = value
elif i == (len(ilist) - 1):
irange.append("{}-{}".format(rstart, value))
break
rprev = value
return ','.join(irange)
@classmethod
def expand_value_ranges(cls, ranges):
"""
Takes a string containing ranges of values such as 1-3 and 4,5,6,7 and
expands them into a single list.
"""
if not ranges:
return ranges
expanded = []
ranges = ranges.split(',')
for subrange in ranges:
# expand ranges
subrange = subrange.partition('-')
if subrange[1] == '-':
expanded += range(int(subrange[0]), int(subrange[2]) + 1)
else:
for val in subrange[0].split():
expanded.append(int(val))
return sorted(expanded)
@property
def exists(self):
if os.path.exists(self.path):
return True
return False
def get(self, key, section=None, expand_to_list=False):
raise NotImplementedError
class SectionalConfigBase(ConfigBase):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._sections = {}
# this provides an easy sectionless lookup but is prone to collisions.
# always returns the last value for key found in config file.
self._flattened_config = {}
self._load()
@staticmethod
def bool_str(val):
if val.lower() == "true":
return True
elif val.lower() == "false":
return False
return val
@property
def all(self):
return self._sections
def get(self, key, section=None, expand_to_list=False):
""" If section is None use flattened """
if section is None:
value = self._flattened_config.get(key)
else:
value = self._sections.get(section, {}).get(key)
if expand_to_list:
return self.expand_value_ranges(value)
return value
@property
def dump(self):
with open(self.path) as fd:
return fd.read()
def _load(self):
if not self.exists:
return
current_section = None
with open(self.path) as fd:
for line in fd:
if re.compile(r"^\s*#").search(line):
continue
# section names are not expected to contain whitespace
ret = re.compile(r"^\s*\[(\S+)].*").search(line)
if ret:
current_section = ret.group(1)
self._sections[current_section] = {}
continue
if current_section is None:
continue
# key names may contain whitespace
# values may contain whitespace
expr = r"^\s*(\S+(?:\s+\S+)?)\s*=\s*(.+)\s*"
ret = re.compile(expr).search(line)
if ret:
key = ret.group(1)
val = self.bool_str(ret.group(2))
if type(val) == str:
val = val.strip()
for char in ["'", '"']:
val = val.strip(char)
self._sections[current_section][key] = val
self._flattened_config[key] = val
| 28.836601 | 78 | 0.487307 |
f708236a902da8255569087afbd1a500653aa211 | 7,208 | py | Python | modules/android.py | inzanez/pdfium-lib | f4e6fbb3b29c100ff3f291944944fd7e38fafbcd | [
"MIT"
] | 69 | 2021-01-27T18:53:22.000Z | 2022-02-25T00:41:41.000Z | modules/android.py | inzanez/pdfium-lib | f4e6fbb3b29c100ff3f291944944fd7e38fafbcd | [
"MIT"
] | 31 | 2021-01-23T17:14:46.000Z | 2022-03-04T18:06:23.000Z | modules/android.py | inzanez/pdfium-lib | f4e6fbb3b29c100ff3f291944944fd7e38fafbcd | [
"MIT"
] | 19 | 2021-01-27T18:57:07.000Z | 2022-01-04T02:56:03.000Z | import glob
import os
import tarfile
from subprocess import check_call
import modules.config as c
import modules.functions as f
def run_task_build_pdfium():
f.debug("Building PDFium...")
target = "android"
build_dir = os.path.join("build", target)
f.create_dir(build_dir)
target_dir = os.path.join(build_dir, "pdfium")
f.remove_dir(target_dir)
cwd = build_dir
command = " ".join(
[
"gclient",
"config",
"--unmanaged",
"https://pdfium.googlesource.com/pdfium.git",
]
)
check_call(command, cwd=cwd, shell=True)
gclient_file = os.path.join(build_dir, ".gclient")
f.append_to_file(gclient_file, "target_os = [ 'android' ]")
cwd = build_dir
command = " ".join(["gclient", "sync"])
check_call(command, cwd=cwd, shell=True)
cwd = target_dir
command = " ".join(["git", "checkout", c.pdfium_git_commit])
check_call(command, cwd=cwd, shell=True)
def run_task_patch():
f.debug("Patching...")
source_dir = os.path.join("build", "android", "pdfium")
# build gn
source_file = os.path.join(
source_dir,
"BUILD.gn",
)
if f.file_line_has_content(source_file, 25, " ]\n"):
f.replace_line_in_file(source_file, 25, ' "FPDFSDK_EXPORTS",\n ]\n')
f.debug("Applied: Build GN")
else:
f.debug("Skipped: Build GN")
# build gn flags
source_file = os.path.join(
source_dir,
"BUILD.gn",
)
if f.file_line_has_content(source_file, 19, " cflags = []\n"):
f.replace_line_in_file(
source_file, 19, ' cflags = [ "-fvisibility=default" ]\n'
)
f.debug("Applied: Build GN Flags")
else:
f.debug("Skipped: Build GN Flags")
pass
def run_task_build():
f.debug("Building libraries...")
current_dir = os.getcwd()
# configs
for config in c.configurations_android:
# targets
for target in c.targets_android:
main_dir = os.path.join(
"build",
target["target_os"],
"pdfium",
"out",
"{0}-{1}-{2}".format(target["target_os"], target["target_cpu"], config),
)
f.remove_dir(main_dir)
f.create_dir(main_dir)
os.chdir(
os.path.join(
"build",
target["target_os"],
"pdfium",
)
)
# generating files...
f.debug(
'Generating files to arch "{0}" and configuration "{1}"...'.format(
target["target_cpu"], config
)
)
arg_is_debug = "true" if config == "debug" else "false"
args = []
args.append('target_os="{0}"'.format(target["pdfium_os"]))
args.append('target_cpu="{0}"'.format(target["target_cpu"]))
args.append("use_goma=false")
args.append("is_debug={0}".format(arg_is_debug))
args.append("pdf_use_skia=false")
args.append("pdf_use_skia_paths=false")
args.append("pdf_enable_xfa=false")
args.append("pdf_enable_v8=false")
args.append("is_component_build=true")
args.append("pdf_is_standalone=true")
args.append("pdf_bundle_freetype=true")
if config == "release":
args.append("symbol_level=0")
args_str = " ".join(args)
command = " ".join(
[
"gn",
"gen",
"out/{0}-{1}-{2}".format(
target["target_os"], target["target_cpu"], config
),
"--args='{0}'".format(args_str),
]
)
check_call(command, shell=True)
# compiling...
f.debug(
'Compiling to arch "{0}" and configuration "{1}"...'.format(
target["target_cpu"], config
)
)
command = " ".join(
[
"ninja",
"-C",
"out/{0}-{1}-{2}".format(
target["target_os"], target["target_cpu"], config
),
"pdfium",
"-v",
]
)
check_call(command, shell=True)
os.chdir(current_dir)
def run_task_install():
f.debug("Installing libraries...")
# configs
for config in c.configurations_android:
f.remove_dir(os.path.join("build", "android", config))
f.create_dir(os.path.join("build", "android", config))
# targets
for target in c.targets_android:
out_dir = "{0}-{1}-{2}".format(
target["target_os"], target["target_cpu"], config
)
source_lib_dir = os.path.join("build", "android", "pdfium", "out", out_dir)
lib_dir = os.path.join("build", "android", config, "lib")
target_dir = os.path.join(lib_dir, target["android_cpu"])
f.remove_dir(target_dir)
f.create_dir(target_dir)
for basename in os.listdir(source_lib_dir):
if basename.endswith(".so"):
pathname = os.path.join(source_lib_dir, basename)
if os.path.isfile(pathname):
f.copy_file2(pathname, target_dir)
# include
include_dir = os.path.join("build", "android", "pdfium", "public")
target_include_dir = os.path.join("build", "android", config, "include")
f.remove_dir(target_include_dir)
f.create_dir(target_include_dir)
for basename in os.listdir(include_dir):
if basename.endswith(".h"):
pathname = os.path.join(include_dir, basename)
if os.path.isfile(pathname):
f.copy_file2(pathname, target_include_dir)
def run_task_test():
f.debug("Testing...")
for config in c.configurations_android:
for target in c.targets_android:
lib_dir = os.path.join(
"build", "android", config, "lib", target["android_cpu"]
)
command = " ".join(["file", os.path.join(lib_dir, "libpdfium.so")])
check_call(command, shell=True)
def run_task_archive():
f.debug("Archiving...")
current_dir = os.getcwd()
lib_dir = os.path.join(current_dir, "build", "android")
output_filename = os.path.join(current_dir, "android.tgz")
tar = tarfile.open(output_filename, "w:gz")
for configuration in c.configurations_android:
tar.add(
name=os.path.join(lib_dir, configuration),
arcname=os.path.basename(os.path.join(lib_dir, configuration)),
filter=lambda x: (
None
if "_" in x.name
and not x.name.endswith(".h")
and not x.name.endswith(".so")
and os.path.isfile(x.name)
else x
),
)
tar.close()
| 29.182186 | 88 | 0.517064 |
f70828b88a42c74567ef99b3f01c0cad6c366906 | 2,053 | py | Python | tensorflow_datasets/image/__init__.py | ubershmekel/datasets | 555220a3bf048a1bed6aed5db97696fb83088b83 | [
"Apache-2.0"
] | null | null | null | tensorflow_datasets/image/__init__.py | ubershmekel/datasets | 555220a3bf048a1bed6aed5db97696fb83088b83 | [
"Apache-2.0"
] | null | null | null | tensorflow_datasets/image/__init__.py | ubershmekel/datasets | 555220a3bf048a1bed6aed5db97696fb83088b83 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2019 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Image datasets."""
from tensorflow_datasets.image.cats_vs_dogs import CatsVsDogs
from tensorflow_datasets.image.celeba import CelebA
from tensorflow_datasets.image.celebahq import CelebAHq
from tensorflow_datasets.image.chexpert import Chexpert
from tensorflow_datasets.image.cifar import Cifar10
from tensorflow_datasets.image.cifar import Cifar100
from tensorflow_datasets.image.coco import Coco2014
from tensorflow_datasets.image.colorectal_histology import ColorectalHistology
from tensorflow_datasets.image.colorectal_histology import ColorectalHistologyLarge
from tensorflow_datasets.image.diabetic_retinopathy_detection import DiabeticRetinopathyDetection
from tensorflow_datasets.image.flowers import TFFlowers
from tensorflow_datasets.image.horses_or_humans import HorsesOrHumans
from tensorflow_datasets.image.image_folder import ImageLabelFolder
from tensorflow_datasets.image.imagenet import Imagenet2012
from tensorflow_datasets.image.lsun import Lsun
from tensorflow_datasets.image.mnist import FashionMNIST
from tensorflow_datasets.image.mnist import KMNIST
from tensorflow_datasets.image.mnist import MNIST
from tensorflow_datasets.image.omniglot import Omniglot
from tensorflow_datasets.image.open_images import OpenImagesV4
from tensorflow_datasets.image.quickdraw import QuickdrawBitmap
from tensorflow_datasets.image.rock_paper_scissors import RockPaperScissors
from tensorflow_datasets.image.svhn import SvhnCropped
| 50.073171 | 97 | 0.857282 |
f7083d021a8246eb35e98c07308c8cc6808d8bec | 9,979 | py | Python | ertk/stats.py | bagustris/emotion | 5bd83d3ca8a6eb930f449b7a990fefd75d0c7d36 | [
"MIT"
] | 3 | 2020-11-03T14:54:22.000Z | 2021-04-12T12:23:10.000Z | ertk/stats.py | bagustris/emotion | 5bd83d3ca8a6eb930f449b7a990fefd75d0c7d36 | [
"MIT"
] | null | null | null | ertk/stats.py | bagustris/emotion | 5bd83d3ca8a6eb930f449b7a990fefd75d0c7d36 | [
"MIT"
] | 2 | 2020-12-03T06:21:59.000Z | 2021-01-16T04:47:12.000Z | from functools import partial
from typing import Callable, List, Union
import numpy as np
import pandas as pd
from scipy.stats import friedmanchisquare, rankdata
from sklearn.metrics.pairwise import pairwise_distances
from statsmodels.stats.libqsturng import qsturng
Matrix = List[List[float]]
def friedman_nemenyi(table: pd.DataFrame, alpha: float = 0.05):
"""Runs Friedman test on given table and optionally graphs a
critical-difference diagram.
Args:
-----
table: DataFrame
The data table, with subjects as rows and independent variable
(condition) as columns.
alpha: float
Significance level, must be in the range (0, 1), default is
0.05.
Returns:
--------
pval: float
The p-value for the Friedman test.
cd: float
The critical difference from the Nemenyi post-hoc test.
df: pd.DataFrame
A table containing statistics relating to ranking and average
values of the condiions. The dataframe has these columns:
"mean_rank", "mean", "std", "median", "mad", "effect_size".
"""
_, pval = friedmanchisquare(*table.transpose().to_numpy())
names = list(table.columns)
avgrank = rankdata(-table.to_numpy(), axis=1).mean(0)
df = pd.DataFrame(
{
"mean_rank": avgrank,
"mean": table.mean(),
"std": table.std(),
"median": table.median(),
"mad": table.mad(),
},
index=names,
).sort_values("mean_rank")
topclf = df.index[0]
n, k = table.shape
# Effect size is calculated in terms of differences in MAD
df["effect_size"] = (df.loc[topclf, "median"] - df["median"]) / np.sqrt(
((n - 1) * df.loc[topclf, "mad"] ** 2 + (n - 1) * df["mad"] ** 2) / (2 * n - 2)
)
cd = qsturng(1 - alpha, k, np.inf) * np.sqrt((k * (k + 1)) / (12 * n))
return pval, cd, df
def _get_dist_func(metric: Union[Callable, str], **kwargs):
if callable(metric):
return partial(metric, **kwargs)
else:
if metric != "minkowski" and "p" in kwargs:
del kwargs["p"]
if metric != "mahalanobis" and "VI" in kwargs:
del kwargs["VI"]
return partial(pairwise_distances, metric=metric, **kwargs)
def bhattacharyya_dist(x: np.ndarray, y: np.ndarray, pinv: bool = False):
"""Calculate Bhattacharyya distance between multivariate Gaussian
distributions.
Args:
-----
x: array-like
Data matrix of shape (n1_samples, n_features) corresponding to
the first group.
y: array-like
Data matrix of shape (n2_samples, n_features) corresponding to
the second group.
pinv: bool
Use pseudoinverse instead of inverse. This is useful if the
covariance matrices don't have full rank or otherwise aren't
invertible.
"""
mu1 = np.expand_dims(np.mean(x, axis=0), 1)
mu2 = np.expand_dims(np.mean(y, axis=0), 1)
cov1 = np.cov(x, rowvar=False)
cov2 = np.cov(y, rowvar=False)
cov = (cov1 + cov2) / 2
_, ldet1 = np.linalg.slogdet(cov1)
_, ldet2 = np.linalg.slogdet(cov2)
_, ldet = np.linalg.slogdet(cov)
if pinv:
covinv = np.linalg.pinv(cov, hermitian=True, rcond=1e-8)
else:
covinv = np.linalg.inv(cov)
db = (mu1 - mu2).T.dot(covinv).dot(mu1 - mu2) / 8 + ldet / 2 - ldet1 / 4 - ldet2 / 4
return db.item()
def corr_ratio(x: np.ndarray, groups: Union[List[int], np.ndarray]):
"""Calculates correlation ratio for each feature using the given
groups.
Args:
-----
data: numpy.ndarray
Data matrix, with shape (n_instances, n_features).
groups: list or numpy.ndarray
1D array of groups assignments of length n_instances. Groups
should be labelled from 0 to G - 1 inclusive, where G is the
number of groups.
Returns:
--------
eta: numpy.ndarray
1D array of correlation coefficients of length n_features. Each
value is in [0, 1] except if a feature takes only one value, in
which case eta will be nan.
"""
groups = np.array(groups)
n_groups = groups.max() + 1
counts = np.bincount(groups)
mean = x.mean(0)
g_means = np.empty((n_groups, x.shape[1]))
for g in range(n_groups):
g_means[g, :] = x[groups == g].mean(0)
num = np.sum(counts[:, None] * (g_means - mean) ** 2, axis=0)
den = np.sum((x - mean) ** 2, axis=0)
old_err = np.seterr(divide="ignore", invalid="ignore")
eta2 = num / den
np.seterr(**old_err)
return np.sqrt(eta2)
def dunn(
x: np.ndarray,
clusters: Union[List[int], np.ndarray],
intra_method: str = "mean",
inter_method: str = "cent",
metric: Union[Callable, str] = "l2",
p: int = 2,
):
"""Calculates the Dunn index for cluster "goodness".
Args:
-----
data: numpy.ndarray
Data matrix, with shape (n_instances, n_features).
clusters: list or numpy.ndarray
1D array of cluster assignments of length n_instances. Clusters
should be labelled from 0 to C - 1 inclusive, where C is the
number of clusters.
intra_method: str
Method for calculating intra-cluster distance. One of "max",
"mean", "cent".
inter_method: str
Method for calculating inter-cluster distance. One of "cent".
metric: str or callable
Distance metric. If str, must be one of the sklearn or scipy
distance methods. If callable, must take one positional argument
and return a pairwise distance matrix.
p: int
Value of p for p-norm when using "lp" distance metric.
Returns:
--------
dunn: float
The Dunn index for this data and cluster assignment.
"""
clusters = np.array(clusters, dtype=int)
n_clusters = clusters.max() + 1
d = _get_dist_func(metric, p=p)
intra = np.zeros(n_clusters)
for c in range(n_clusters):
clust_data = x[clusters == c]
if intra_method == "max":
idx = np.triu_indices(len(clust_data))
intra[c] = d(clust_data)[idx].max()
elif intra_method == "mean":
idx = np.triu_indices(len(clust_data))
intra[c] = d(clust_data)[idx].mean()
elif intra_method == "cent":
mean = clust_data.mean(0)
intra[c] = d(clust_data, mean[None, :]).mean()
inter = np.zeros((n_clusters, n_clusters))
for i in range(n_clusters):
inter[i, i] = np.inf # To avoid min = 0
for j in range(i + 1, n_clusters):
if inter_method == "cent":
mean_i = x[clusters == i].mean(0)
mean_j = x[clusters == j].mean(0)
inter[i, j] = inter[j, i] = d(mean_i[None, :], mean_j[None, :])
return inter.min() / intra.max()
def kappa(data: np.ndarray):
"""Calculates Fleiss' kappa for inter-rater agreement.
Args:
-----
data: numpy.ndarray
The data matrix, in the form (raters x units).
"""
cats = np.unique(data)
n, N = data.shape
counts = np.stack([np.sum(data == c, 0) for c in cats], 1)
p_j = np.sum(counts, axis=0) / (N * n)
assert np.isclose(np.sum(p_j), 1)
Pe = np.sum(p_j ** 2)
P = (np.sum(counts ** 2, 1) - n) / (n * (n - 1))
Pbar = np.mean(P)
return (Pbar - Pe) / (1 - Pe)
class Deltas:
@staticmethod
def nominal(c: int, k: int):
return float(c != k)
@staticmethod
def interval(c: float, k: float):
return (c - k) ** 2
def alpha(
data: np.ndarray,
delta: Union[Callable[[int, int], float], List[List[float]], str] = "nominal",
):
"""Calculates Krippendorff's alpha coefficient [1, sec. 11.3] for
inter-rater agreement.
[1] K. Krippendorff, Content analysis: An introduction to its
methodology. Sage publications, 2004.
Args:
-----
data: numpy.ndarray
The data matrix, shape (n_raters, n_units). Each cell (i, j)
represents the value assigned to unit j by rater i, or 0
representing no response.
delta: callable, 2-D array-like or str
The delta metric. Default is the nominal metric, which takes the
value 1 in case c != k and 0 otherwise.
"""
# The following implementation was based off the Wikipedia article:
# https://en.wikipedia.org/wiki/Krippendorff%27s_alpha
# Response categories go from 1 to R, 0 represents no response
R = np.max(data)
counts = np.apply_along_axis(lambda x: np.bincount(x, minlength=R + 1), 0, data).T
count_sum = np.sum(counts, 0)
assert len(count_sum) == R + 1
def ordinal(c: int, k: int):
if k < c:
c, k = k, c
s = (
sum(count_sum[g] for g in range(c, k + 1))
- (count_sum[c] + count_sum[k]) / 2
)
return s ** 2
if isinstance(delta, str):
delta = {
"nominal": Deltas.nominal,
"ordinal": ordinal,
"interval": Deltas.interval,
}[delta]
if not callable(delta):
try:
delta[0][0]
except IndexError:
raise TypeError("delta must be either str, callable or 2D array.")
def _delta(c, k):
new_delta = delta
return new_delta[c][k]
delta = _delta
m_u = np.sum(counts[:, 1:], 1)
valid = m_u >= 2
counts = counts[valid]
m_u = m_u[valid]
data = data[:, valid]
n = np.sum(m_u)
n_cku = np.matmul(counts[:, :, None], counts[:, None, :])
for i in range(R + 1):
n_cku[:, i, i] = counts[:, i] * (counts[:, i] - 1)
D_o = 0
for c in range(1, R + 1):
for k in range(1, R + 1):
D_o += delta(c, k) * n_cku[:, c, k]
D_o = np.sum(D_o / (n * (m_u - 1)))
D_e = 0
P_ck = np.bincount(data.flat)
for c in range(1, R + 1):
for k in range(1, R + 1):
D_e += delta(c, k) * P_ck[c] * P_ck[k]
D_e /= n * (n - 1)
return 1 - D_o / D_e
| 30.990683 | 88 | 0.585029 |
f7086a22a8b6cbc948ed244b0ec6f73cdc2f4cd2 | 2,987 | py | Python | newsroom/evaluate/run.py | peter-xbs/newsroom_chinese | 7fcae68b2ea5584d08d0c48faee34a0734237e6b | [
"Apache-2.0"
] | null | null | null | newsroom/evaluate/run.py | peter-xbs/newsroom_chinese | 7fcae68b2ea5584d08d0c48faee34a0734237e6b | [
"Apache-2.0"
] | null | null | null | newsroom/evaluate/run.py | peter-xbs/newsroom_chinese | 7fcae68b2ea5584d08d0c48faee34a0734237e6b | [
"Apache-2.0"
] | null | null | null | ################################################################################
from subprocess import Popen, PIPE, STDOUT
from threading import Thread
import bz2, json, click
from newsroom import jsonl
from . import readiter
from tqdm import tqdm
################################################################################
def _writer(process, dataset_file, keys):
for article in dataset_file:
subset = {k: article[k] for k in keys if k in article}
encoded = json.dumps(subset).encode("utf-8")
process.stdin.write(encoded + b"\n")
process.stdin.close()
################################################################################
articles_file = click.Path(
exists = True,
dir_okay = False,
readable = True,
resolve_path = True,
)
summaries_file = click.Path(
exists = False,
dir_okay = False,
writable = True,
resolve_path = True,
)
################################################################################
@click.command()
@click.option(
"--system",
type = str,
required = True,
help = "Name of docker image."
)
@click.option(
"--dataset",
type = articles_file,
required = True,
help = "Input path to full dataset."
)
@click.option(
"--summaries",
type = summaries_file,
required = True,
help = "Output path for system generated summaries."
)
@click.option(
"--keys",
type = str,
default = "text",
help = "List of dataset keys to pass to system. [default = text]"
)
################################################################################
def main(system, dataset, summaries, keys):
print("Starting", system, "Docker image.")
process = Popen(
[
"docker", "run", "--rm",
"-a", "stdin", "-a", "stdout",
"-i", system
],
stdin = PIPE,
stdout = PIPE,
)
dataset_file = jsonl.open(dataset, gzip = True)
# Check the size of the dataset.
# As a sanity check and for the progress bar.
print("Loading articles... ", end = "", flush = True)
dataset_length = len(dataset_file)
print("found", dataset_length, "articles.\n")
# Start new thread to feed summaries into container.
Thread(
target = _writer,
args = (process, dataset_file, keys.split(","))
).start()
# Start progress bar.
progress = tqdm(
readiter(process.stdout),
total = dataset_length,
desc = "Running " + system,
)
# Prepare to decode summaries.
is_json = True
with jsonl.open(summaries, gzip = True) as summaries_file:
summaries_file.delete()
with progress as output:
for line in output:
summaries_file.appendline({"system": line})
print("\nRun complete. Next, evaluate with newsroom-score.")
################################################################################
| 22.628788 | 80 | 0.494811 |
f7089f47db2e2b77a6377a7ce32ab1734102e04b | 12,276 | py | Python | src/fuzzysearch/substitutions_only.py | klauer/fuzzysearch | 55fc21e469495bc84fe6f81b0c148e105765182d | [
"MIT"
] | null | null | null | src/fuzzysearch/substitutions_only.py | klauer/fuzzysearch | 55fc21e469495bc84fe6f81b0c148e105765182d | [
"MIT"
] | null | null | null | src/fuzzysearch/substitutions_only.py | klauer/fuzzysearch | 55fc21e469495bc84fe6f81b0c148e105765182d | [
"MIT"
] | null | null | null | from collections import deque, defaultdict
from itertools import islice
from functools import wraps
from fuzzysearch.common import FuzzySearchBase, Match, \
count_differences_with_maximum, get_best_match_in_group, group_matches
from fuzzysearch.compat import text_type
from fuzzysearch.search_exact import search_exact
def _check_arguments(subsequence, sequence, max_substitutions):
if not subsequence:
raise ValueError('Given subsequence is empty!')
if max_substitutions is None or max_substitutions < 0:
raise ValueError('Maximum number of substitutions must be >= 0!')
def has_near_match_substitutions(subsequence, sequence, max_substitutions):
_check_arguments(subsequence, sequence, max_substitutions)
if max_substitutions == 0:
for start_index in search_exact(subsequence, sequence):
return True
return False
elif len(subsequence) // (max_substitutions + 1) >= 3:
return has_near_match_substitutions_ngrams(
subsequence, sequence, max_substitutions,
)
else:
return has_near_match_substitutions_lp(
subsequence, sequence, max_substitutions,
)
def find_near_matches_substitutions(subsequence, sequence, max_substitutions):
"""Find near-matches of the subsequence in the sequence.
This chooses a suitable fuzzy search implementation according to the given
parameters.
Returns a list of fuzzysearch.Match objects describing the matching parts
of the sequence.
"""
_check_arguments(subsequence, sequence, max_substitutions)
if max_substitutions == 0:
return [
Match(start_index, start_index + len(subsequence), 0,
sequence[start_index:start_index + len(subsequence)])
for start_index in search_exact(subsequence, sequence)
]
elif len(subsequence) // (max_substitutions + 1) >= 3:
return find_near_matches_substitutions_ngrams(
subsequence, sequence, max_substitutions,
)
else:
return find_near_matches_substitutions_lp(
subsequence, sequence, max_substitutions,
)
def find_near_matches_substitutions_lp(subsequence, sequence,
max_substitutions):
"""search for near-matches of subsequence in sequence
This searches for near-matches, where the nearly-matching parts of the
sequence must meet the following limitations (relative to the subsequence):
* the number of character substitutions must be less than max_substitutions
* no deletions or insertions are allowed
"""
_check_arguments(subsequence, sequence, max_substitutions)
return list(_find_near_matches_substitutions_lp(subsequence, sequence,
max_substitutions))
def _find_near_matches_substitutions_lp(subsequence, sequence,
max_substitutions):
# simple optimization: prepare some often used things in advance
_SUBSEQ_LEN = len(subsequence)
_SUBSEQ_LEN_MINUS_ONE = _SUBSEQ_LEN - 1
def make_match(start, end, dist):
return Match(start, end, dist, matched=sequence[start:end])
# prepare quick lookup of where a character appears in the subsequence
char_indexes_in_subsequence = defaultdict(list)
for (index, char) in enumerate(subsequence):
char_indexes_in_subsequence[char].append(index)
# we'll iterate over the sequence once, but the iteration is split into two
# for loops; therefore we prepare an iterator in advance which will be used
# in both of the loops
sequence_enum_iter = enumerate(sequence)
# We'll count the number of matching characters assuming various attempted
# alignments of the subsequence to the sequence. At any point in the
# sequence there will be N such alignments to update. We'll keep
# these in a "circular array" (a.k.a. a ring) which we'll rotate after each
# iteration to re-align the indexing.
# Initialize the candidate counts by iterating over the first N-1 items in
# the sequence. No possible matches in this step!
candidates = deque([0], maxlen=_SUBSEQ_LEN)
for (index, char) in islice(sequence_enum_iter, _SUBSEQ_LEN_MINUS_ONE):
for subseq_index in [idx for idx in char_indexes_in_subsequence[char] if idx <= index]:
candidates[subseq_index] += 1
candidates.appendleft(0)
# From the N-th item onwards, we'll update the candidate counts exactly as
# above, and additionally check if the part of the sequence whic began N-1
# items before the current index was a near enough match to the given
# sub-sequence.
for (index, char) in sequence_enum_iter:
for subseq_index in char_indexes_in_subsequence[char]:
candidates[subseq_index] += 1
# rotate the ring of candidate counts
candidates.rotate(1)
# fetch the count for the candidate which started N-1 items ago
n_substitutions = _SUBSEQ_LEN - candidates[0]
# set the count for the next index to zero
candidates[0] = 0
# if the candidate had few enough mismatches, yield a match
if n_substitutions <= max_substitutions:
yield make_match(
start=index - _SUBSEQ_LEN_MINUS_ONE,
end=index + 1,
dist=n_substitutions,
)
def has_near_match_substitutions_lp(subsequence, sequence, max_substitutions):
_check_arguments(subsequence, sequence, max_substitutions)
for match in _find_near_matches_substitutions_lp(subsequence, sequence,
max_substitutions):
return True
return False
def find_near_matches_substitutions_ngrams(subsequence, sequence,
max_substitutions):
"""search for near-matches of subsequence in sequence
This searches for near-matches, where the nearly-matching parts of the
sequence must meet the following limitations (relative to the subsequence):
* the number of character substitutions must be less than max_substitutions
* no deletions or insertions are allowed
"""
_check_arguments(subsequence, sequence, max_substitutions)
match_starts = set()
matches = []
for match in _find_near_matches_substitutions_ngrams(subsequence, sequence,
max_substitutions):
if match.start not in match_starts:
match_starts.add(match.start)
matches.append(match)
return sorted(matches, key=lambda match: match.start)
def _find_near_matches_substitutions_ngrams(subsequence, sequence,
max_substitutions):
subseq_len = len(subsequence)
seq_len = len(sequence)
def make_match(start, end, dist):
return Match(start, end, dist, matched=sequence[start:end])
ngram_len = subseq_len // (max_substitutions + 1)
if ngram_len == 0:
raise ValueError(
"The subsequence's length must be greater than max_substitutions!"
)
for ngram_start in range(0, len(subsequence) - ngram_len + 1, ngram_len):
ngram_end = ngram_start + ngram_len
subseq_before = subsequence[:ngram_start]
subseq_after = subsequence[ngram_end:]
for index in search_exact(
subsequence[ngram_start:ngram_end], sequence,
ngram_start, seq_len - (subseq_len - ngram_end),
):
n_substitutions = 0
seq_before = sequence[index - ngram_start:index]
if subseq_before != seq_before:
n_substitutions += count_differences_with_maximum(
seq_before, subseq_before,
max_substitutions - n_substitutions + 1)
if n_substitutions > max_substitutions:
continue
seq_after = sequence[index + ngram_len:index - ngram_start + subseq_len]
if subseq_after != seq_after:
if n_substitutions == max_substitutions:
continue
n_substitutions += count_differences_with_maximum(
seq_after, subseq_after,
max_substitutions - n_substitutions + 1)
if n_substitutions > max_substitutions:
continue
yield make_match(
start=index - ngram_start,
end=index - ngram_start + subseq_len,
dist=n_substitutions,
)
def has_near_match_substitutions_ngrams(subsequence, sequence,
max_substitutions):
"""search for near-matches of subsequence in sequence
This searches for near-matches, where the nearly-matching parts of the
sequence must meet the following limitations (relative to the subsequence):
* the number of character substitutions must be less than max_substitutions
* no deletions or insertions are allowed
"""
_check_arguments(subsequence, sequence, max_substitutions)
for match in _find_near_matches_substitutions_ngrams(subsequence, sequence,
max_substitutions):
return True
return False
try:
from fuzzysearch._substitutions_only import \
substitutions_only_has_near_matches_ngrams_byteslike, \
substitutions_only_find_near_matches_ngrams_byteslike as \
_subs_only_fnm_ngram_byteslike
except ImportError:
pass
else:
py_has_near_match_substitutions_ngrams = has_near_match_substitutions_ngrams
@wraps(py_has_near_match_substitutions_ngrams)
def has_near_match_substitutions_ngrams(subsequence, sequence,
max_substitutions):
if not (
isinstance(subsequence, text_type) or
isinstance(sequence, text_type)
):
try:
return substitutions_only_has_near_matches_ngrams_byteslike(
subsequence, sequence, max_substitutions)
except TypeError:
pass
return py_has_near_match_substitutions_ngrams(
subsequence, sequence, max_substitutions)
py_find_near_matches_substitutions_ngrams = \
find_near_matches_substitutions_ngrams
@wraps(py_find_near_matches_substitutions_ngrams)
def find_near_matches_substitutions_ngrams(subsequence, sequence,
max_substitutions):
if not (
isinstance(subsequence, text_type) or
isinstance(sequence, text_type)
):
try:
results = _subs_only_fnm_ngram_byteslike(
subsequence, sequence, max_substitutions)
except TypeError:
pass
else:
matches = [
Match(
index,
index + len(subsequence),
count_differences_with_maximum(
sequence[index:index+len(subsequence)],
subsequence,
max_substitutions + 1,
),
matched=sequence[index:index + len(subsequence)],
)
for index in results
]
return [
get_best_match_in_group(group)
for group in group_matches(matches)
]
return py_find_near_matches_substitutions_ngrams(
subsequence, sequence, max_substitutions)
class SubstitutionsOnlySearch(FuzzySearchBase):
@classmethod
def search(cls, subsequence, sequence, search_params):
actual_max_subs = min(
x for x in [search_params.max_l_dist,
search_params.max_substitutions]
if x is not None
)
return find_near_matches_substitutions(subsequence, sequence,
actual_max_subs)
@classmethod
def extra_items_for_chunked_search(cls, subsequence, search_params):
return 0
| 39.220447 | 95 | 0.646546 |
f708e062dee09bdd223cf03577105bdf406b13fd | 1,386 | py | Python | var/spack/repos/builtin/packages/r-multcomp/package.py | kehw/spack | 4f49b1a9301447a8cf880c99820cad65e5c2d7e3 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2 | 2020-09-10T22:50:08.000Z | 2021-01-12T22:18:54.000Z | var/spack/repos/builtin/packages/r-multcomp/package.py | kehw/spack | 4f49b1a9301447a8cf880c99820cad65e5c2d7e3 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 11 | 2021-01-08T22:23:53.000Z | 2022-03-30T11:08:17.000Z | var/spack/repos/builtin/packages/r-multcomp/package.py | kehw/spack | 4f49b1a9301447a8cf880c99820cad65e5c2d7e3 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RMultcomp(RPackage):
"""Simultaneous tests and confidence intervals for general linear
hypotheses in parametric models, including linear, generalized linear,
linear mixed effects, and survival models. The package includes demos
reproducing analyzes presented in the book "Multiple Comparisons Using R"
(Bretz, Hothorn, Westfall, 2010, CRC Press)."""
homepage = "http://multcomp.r-forge.r-project.org/"
url = "https://cloud.r-project.org/src/contrib/multcomp_1.4-6.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/multcomp"
version('1.4-10', sha256='29bcc635c0262e304551b139cd9ee655ab25a908d9693e1cacabfc2a936df5cf')
version('1.4-8', sha256='a20876619312310e9523d67e9090af501383ce49dc6113c6b4ca30f9c943a73a')
version('1.4-6', sha256='fe9efbe671416a49819cbdb9137cc218faebcd76e0f170fd1c8d3c84c42eeda2')
depends_on('r-mvtnorm@1.0-10:', type=('build', 'run'))
depends_on('r-survival@2.39-4:', type=('build', 'run'))
depends_on('r-th-data@1.0-2:', type=('build', 'run'))
depends_on('r-sandwich@2.3-0:', type=('build', 'run'))
depends_on('r-codetools', type=('build', 'run'))
| 47.793103 | 96 | 0.727273 |
f708e9a4590e61d74102e1c7483f7e1fb43ae436 | 1,710 | py | Python | button.py | qodzero/ukivy | d7179a83c2e6e357cf50113f53d24c780bf29789 | [
"MIT"
] | null | null | null | button.py | qodzero/ukivy | d7179a83c2e6e357cf50113f53d24c780bf29789 | [
"MIT"
] | null | null | null | button.py | qodzero/ukivy | d7179a83c2e6e357cf50113f53d24c780bf29789 | [
"MIT"
] | null | null | null | from kivy.uix.button import Button
from kivy.properties import StringProperty, BooleanProperty, NumericProperty, ObjectProperty
from kivy.graphics import Color, Rectangle, RoundedRectangle, Ellipse
from kivy.lang import Builder
Builder.load_string('''
<FlatButton>:
background_normal: ''
background_color: [0,0,0,0]
text_size: self.size
valign: 'middle'
halign: 'center'
markup: True
''')
class RoundedButton(FlatButton):
radius = NumericProperty(10)
def update_back(self):
with self.canvas.before:
self.color = Color(rgba=self.background_color)
self.rect = RoundedRectangle(
pos=self.pos,
size=self.size,
radius=self.radius)
def on_radius(self, _, value):
"""When the radius is set/changed, this function
is called to update the radius of the button on the
canvas
Parameters
----------
_ : widget
This is usually the instance calling the function,
we dont care about this
value : number
The value of the radius property
Returns
-------
None
"""
self.rect.radius = value
class FlatButton(Button):
"""A normal ::class `kivy.uix.button.Button` with all
the visual representations removed, this button
basically just looks like a label, but ofcourse, unlike
a label, its clickable.
Since this inherits from a normal Button, it
supports all of its properties.
Usage
---------
from ukivy.button import FlatButton
...
btn = FlatButton(text='myButton')
some_widget.add_widget(btn)
...
"""
pass
| 24.428571 | 92 | 0.623392 |
f708ecc2659e7dad9f641b77c908e306e5f808bc | 3,029 | py | Python | watcher_dashboard/utils/utils.py | openstack/watcher-dashboard | 146e547da934c2464ec5f49326eabed0eecfda96 | [
"Apache-2.0"
] | 15 | 2016-02-12T07:33:42.000Z | 2019-01-28T22:13:27.000Z | watcher_dashboard/utils/utils.py | openstack/watcher-dashboard | 146e547da934c2464ec5f49326eabed0eecfda96 | [
"Apache-2.0"
] | null | null | null | watcher_dashboard/utils/utils.py | openstack/watcher-dashboard | 146e547da934c2464ec5f49326eabed0eecfda96 | [
"Apache-2.0"
] | 2 | 2017-08-11T02:25:37.000Z | 2017-10-10T09:59:40.000Z | # -*- coding: utf8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
CAMEL_RE = re.compile(r'([A-Z][a-z]+|[A-Z]+(?=[A-Z\s]|$))')
def de_camel_case(text):
"""Convert CamelCase names to human-readable format."""
return ' '.join(w.strip() for w in CAMEL_RE.split(text) if w.strip())
def list_to_dict(object_list, key_attribute='id'):
"""Converts an object list to a dict
:param object_list: list of objects to be put into a dict
:type object_list: list
:param key_attribute: object attribute used as index by dict
:type key_attribute: str
:return: dict containing the objects in the list
:rtype: dict
"""
return dict((getattr(o, key_attribute), o) for o in object_list)
def length(iterator):
"""A length function for iterators
Returns the number of items in the specified iterator. Note that this
function consumes the iterator in the process.
"""
return sum(1 for _item in iterator)
def check_image_type(image, image_type):
"""Check if image 'type' property matches passed-in image_type.
If image has no 'type' property' return True, as we cannot
be sure what type of image it is.
"""
return (image.properties.get('type', image_type) == image_type)
def filter_items(items, **kwargs):
"""Filters the list of items and returns the filtered list.
Example usage:
>>> class Item(object):
... def __init__(self, index):
... self.index = index
... def __repr__(self):
... return '<Item index=%d>' % self.index
>>> items = [Item(i) for i in range(7)]
>>> list(filter_items(items, index=1))
[<Item index=1>]
>>> list(filter_items(items, index__in=(1, 2, 3)))
[<Item index=1>, <Item index=2>, <Item index=3>]
>>> list(filter_items(items, index__not_in=(1, 2, 3)))
[<Item index=0>, <Item index=4>, <Item index=5>, <Item index=6>]
"""
for item in items:
for name, value in kwargs.items():
if name.endswith('__in'):
if getattr(item, name[:-len('__in')]) not in value:
break
elif name.endswith('__not_in'):
if getattr(item, name[:-len('__not_in')]) in value:
break
else:
if getattr(item, name) != value:
break
else:
yield item
def safe_int_cast(value):
try:
return int(value)
except (TypeError, ValueError):
return 0
| 31.884211 | 78 | 0.621657 |
f7090240b3aff921b5983ce0da0f77c0d2b72c2b | 311 | py | Python | python/p153.py | forewing/lc | 314468a1a3bb7d38eccf1f34b0d1b7da04a34784 | [
"CC0-1.0"
] | null | null | null | python/p153.py | forewing/lc | 314468a1a3bb7d38eccf1f34b0d1b7da04a34784 | [
"CC0-1.0"
] | null | null | null | python/p153.py | forewing/lc | 314468a1a3bb7d38eccf1f34b0d1b7da04a34784 | [
"CC0-1.0"
] | null | null | null | class Solution:
def findMin(self, nums: List[int]) -> int:
l = 0
r = len(nums) - 1
while r - l > 3:
m = (l + r) // 2
if nums[m] > nums[l] and nums[m] > nums[r]:
l = m + 1
else:
r = m
return min(nums[l:r+1])
| 25.916667 | 55 | 0.366559 |
f7091944434db8d19ca5662d7bf98cc15182bb16 | 13,500 | py | Python | python/infra/DisplayCoverage.py | AvciRecep/chaste_2019 | 1d46cdac647820d5c5030f8a9ea3a1019f6651c1 | [
"Apache-2.0",
"BSD-3-Clause"
] | 1 | 2020-04-05T12:11:54.000Z | 2020-04-05T12:11:54.000Z | python/infra/DisplayCoverage.py | AvciRecep/chaste_2019 | 1d46cdac647820d5c5030f8a9ea3a1019f6651c1 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | python/infra/DisplayCoverage.py | AvciRecep/chaste_2019 | 1d46cdac647820d5c5030f8a9ea3a1019f6651c1 | [
"Apache-2.0",
"BSD-3-Clause"
] | 2 | 2020-04-05T14:26:13.000Z | 2021-03-09T08:18:17.000Z | #!/usr/bin/env python
"""Copyright (c) 2005-2019, University of Oxford.
All rights reserved.
University of Oxford means the Chancellor, Masters and Scholars of the
University of Oxford, having an administrative office at Wellington
Square, Oxford OX1 2JD, UK.
This file is part of Chaste.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the University of Oxford nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
# Script to run gcov on source files after a Coverage build has been done,
# and summarise the results.
# The script takes arguments:
# <output_dir> The directory in which to generate summary files and
# an index page.
# <build_type> The build type used; defaults to Coverage.
import itertools
import glob
import os
import sys
parent_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
sys.path[0:0] = [parent_path]
import BuildTypes
# Arguments to gcov
# -l Create long file names for included source files.
# -p Preserve complete path information in the names of generated .gcov files.
gcov_flags = ' -lp '
# Get output dir and build type object
if len(sys.argv) < 2:
print "Syntax error."
print "Usage:", sys.argv[0], "<test output dir> [<build type> [<project> ...]]"
sys.exit(1)
output_dir = sys.argv[1]
if len(sys.argv) > 2:
build_type = sys.argv[2]
projects = sys.argv[3:]
else:
build_type = 'Coverage'
projects = []
build = BuildTypes.GetBuildType(build_type)
# Remove any old output files/test results from output_dir
for filename in os.listdir(output_dir):
os.remove(os.path.join(output_dir, filename))
# Find .gcda files to determine which source files to run gcov on
# First, find appropriate build directories
build_dirs = glob.glob('*/build/' + build.build_dir)
build_dirs.extend(map(lambda p: os.path.join(p, 'build', build.build_dir), projects))
# Now find .gcda files within there
gcda_files = []
for build_dir in build_dirs:
for dirpath, dirnames, filenames in os.walk(build_dir):
for filename in filenames:
if filename[-5:] == '.gcda':
gcda_files.append({'dir': dirpath, 'file': filename})
# Run gcov on all the .cpp files which have .gcda files.
for gcda_file in gcda_files:
# For added interest, the source file to process is in different locations
# depending on whether it is a test or not.
if gcda_file['file'][:4] == 'Test' or \
gcda_file['dir'][-5:] == '/test':
#gcda_file['dir'].find('/test/') != -1:
# .cpp file is in the same folder
os.system('gcov -o ' + gcda_file['dir'] + gcov_flags +
os.path.join(gcda_file['dir'], gcda_file['file'][:-4] + 'cpp'))
else:
# .cpp file is contained within the Chaste source tree
# gcda_file['dir'] should look something like mesh/build/coverage/src/reader
# We then want to look in mesh/src/reader
try:
toplevel, rest = gcda_file['dir'].split('build')
except:
print gcda_file
raise
# Get rid of slashes (or system equivalent)
toplevel = os.path.dirname(toplevel)
# Drop the '/coverage/'
rest = rest.split(os.path.sep, 2)[-1]
# Run gcov
os.system('gcov -o ' + gcda_file['dir'] + gcov_flags +
os.path.join(toplevel, rest, gcda_file['file'][:-4] + 'cpp'))
# Now find all our source files
src_dirs = glob.glob('*/src')
src_dirs.remove('apps/src')
src_dirs.extend(map(lambda p: os.path.join(p, 'src'), projects))
src_files = []
for src_dir in src_dirs:
for dirpath, dirnames, filenames in os.walk(src_dir):
for filename in filenames:
if filename[-4:] in ['.cpp', '.hpp']:
src_files.append({'dir': dirpath, 'file': filename})
def coverage_ignore(src_file):
"""Whether to ignore the fact that a source file is not used.
If a file contains only typedefs, for example, this is not an error.
For .hpp files we check this by looking for the presence of either
'template' or 'class' at the start of a line. If neither are found,
we assume the file contains no real code.
This will only work if header files don't contain non-template function
definitions, which should be the case if we're being good programmers.
Unfortunately the boost serialization tweaking file "SerializationExportWrapper.hpp"
has some templated definitions which are not code, for this reason we only
scrape the file for "template" or "class" definitions that are not surrounded
by COVERAGE_IGNORE.
"""
ignore = False
if src_file['dir'].endswith('fortests'):
# 'Source' code that is only used for tests, and hence coverage doesn't
# matter.
ignore = True
elif src_file['file'] == 'triangle.cpp' or src_file['file'] == 'tetgen.cpp' or src_file['file'] == 'predicates.cpp':
# We don't try to cover other people's code
ignore = True
elif src_file['file'] in ['HeartRegionCodes.cpp', 'Version.hpp']:
# Special cases
ignore = True
elif src_file['file'][-4:] == '.hpp':
ignore = True
fp = open(os.path.join(src_file['dir'], src_file['file']))
code = True
for line in fp:
if line.find('// LCOV_EXCL_START') != -1:
code = False
elif line.find('// LCOV_EXCL_STOP') != -1:
code = True
if code and (line.startswith('template') or line.startswith('class ')):
ignore = False
break
fp.close()
return ignore
for src_file in src_files:
# Mangle the name like gcov does
mangled_dir = src_file['dir'].replace(os.path.sep, '#')
# Find .gcov files relating to this source file
gcov_files = glob.glob('*' + mangled_dir + '#' + src_file['file'] + '.gcov')
# Open all the files, and an output file
gcov_fps = [open(gcov_file) for gcov_file in gcov_files]
out_file_name = os.path.join(output_dir, mangled_dir + '#' + src_file['file'])
out_file_name = out_file_name.replace('#', '-')
out_file = open(out_file_name, 'w')
# Now go through them line by line in lock-step,
# aggregating line execution counts
covered_line_count, missed_line_count, warn, ignore = 0, 0, True, False
for lines in itertools.izip(*gcov_fps):
aggregated_count = 0
maybe_not_code, really_uncovered = False, False
for line in lines:
count, line_no, src_line = line.split(':', 2)
count, line_no = count.strip(), line_no.strip()
if src_line.find('// LCOV_EXCL_START') != -1:
ignore = True
out_file.write("%9s:%5s:%s" % ('ignored', line_no, src_line))
break
elif src_line.find('// LCOV_EXCL_STOP') != -1:
ignore = False
out_file.write("%9s:%5s:%s" % ('ignored', line_no, src_line))
break
if line_no == 0:
# This is a gcov header line; what it is doesn't matter
out_file.write(line)
break
if count == '-':
# This line "isn't code". This may be because it's blank, a comment, or
# similar. Or it may be because it's within a templated method that hasn't
# been instantiated in this particular execution, but it might be in another.
maybe_not_code = True
elif count == '#####' or count == '=====':
# The line was really uncovered here, so it must be code.
# From gcov documentation, # indicates reachable by non-exceptional paths;
# = only by an exceptional path (e.g. catch block).
really_uncovered = True
else:
aggregated_count += int(count)
else:
if aggregated_count == 0:
if maybe_not_code and not really_uncovered:
# This really wasn't a code line (or the template is *never* instantiated).
# Would be nice to differentiate these cases, but doing so is decidedly
# non-trivial.
aggregated_count = '-'
else:
src_line_stripped = src_line.strip()
# gcov is buggy: it claims some non-code lines are uncovered.
# There are some other cases it gets wrong for better reasons too.
if not (ignore or src_line_stripped in ['{', '}', 'NEVER_REACHED;'] or
(src_line_stripped.startswith('return') and
src_line_stripped[6] in [';', ' ']) or
src_line_stripped.startswith('TERMINATE(') or
src_line_stripped.startswith('assert(DIM') or
src_line_stripped.startswith('assert(ELEM_DIM') or
src_line_stripped.startswith('assert(SPACE_DIM') or
src_line_stripped.startswith('assert(ELEMENT_DIM') or
src_line_stripped.startswith('EXCEPT_IF_NOT(ELEMENT_DIM') or
src_line_stripped.startswith('#') or
src_line_stripped.startswith('EXPORT_TEMPLATE') or
src_line_stripped.startswith('template class ') or
(src_line_stripped.startswith('virtual ') and src_line_stripped.endswith('(')) or
(src_line_stripped.startswith('catch ') and #Line is catch (...)
src_line_stripped[-1] == ')') or
src_line_stripped.startswith('class ') or
#Method definition (possibly). Currently overlaps with previous 'catch' ignore
(len(src_line_stripped) > 0 and
(src_line_stripped[-1] == ')' or src_line_stripped.endswith(') const')))
):
warn = False
aggregated_count = '#####'
#print 'Full details of coverage: ', src_line_stripped,'\t',src_file,'\t',aggregated_count,'\t', line_no,'\t', src_line
else:
aggregated_count = 'ignored'
missed_line_count += 1
else:
covered_line_count += 1
out_file.write("%9s:%5s:%s" % (aggregated_count, line_no, src_line))
# Output a summary
if not gcov_files:
# No gcov files found for this source file.
# This may not be an error, if the source file in question is an .hpp file with
# an associated .cpp file containing all the code for the class.
##print src_file
if src_file['file'][-4:] == '.hpp' and \
os.path.exists(os.path.join(src_file['dir'], src_file['file'][:-3]+'cpp')):
status = '' # So output file will be deleted
else:
out_file.write("This source file wasn't used at all!\n\nFailed 1 of 1 test\n")
status = "1_1"
elif not ignore and missed_line_count == 0:
out_file.write('\nOK!\n\n')
status = 'OK'
else:
counts = (missed_line_count, missed_line_count+covered_line_count)
out_file.write('\nFailed %d of %d tests\n\n' % counts)
status = "%d_%d" % counts
if warn:
status = 'warn_' + status
if ignore:
status = 'ignore_' + status
if coverage_ignore(src_file):
# All special case ignorable files (not just ones with partial coverage)
status = ''
# Close all files
[fp.close() for fp in gcov_fps]
out_file.close()
# Alter file name to indicate summary
if status:
os.rename(out_file_name, out_file_name + '.' + status + '.0')
else:
os.remove(out_file_name)
# Now remove .gcov files from the Chaste root directory
for filename in os.listdir('.'):
if filename[-5:] == '.gcov':
os.remove(filename)
# And generate a summary page
os.system('python python/DisplayTests.py '+output_dir+' '+build_type)
| 45.918367 | 143 | 0.616593 |
f7091b68ebbfbd69780202759a09375d54581042 | 2,147 | py | Python | tests/test_encoding.py | samv/unique | d5d8deb109d0b14ce072118432baf0bebc11826b | [
"MIT"
] | 1 | 2015-04-02T20:27:25.000Z | 2015-04-02T20:27:25.000Z | tests/test_encoding.py | samv/unique | d5d8deb109d0b14ce072118432baf0bebc11826b | [
"MIT"
] | null | null | null | tests/test_encoding.py | samv/unique | d5d8deb109d0b14ce072118432baf0bebc11826b | [
"MIT"
] | null | null | null |
import json
import unittest2
from normalize import from_json
from normalize import JsonProperty
from normalize import JsonRecord
from normalize import Property
from normalize import Record
from normalize import to_json
from unique.encoding import JSONRecordIO
from testclasses import MultiLevelKeyValue
from testclasses import SimpleKeyValue
def jdump(obj):
return json.dumps(
obj,
indent=4,
separators=(',', ': '),
sort_keys=True,
)
class CustomMarshalled(JsonRecord):
key = Property(json_name="id")
value = Property()
def json_data(self, **args):
jd = super(CustomMarshalled, self).json_data(**args)
jd['oid'] = "1234567"
return jd
@classmethod
def json_to_initkwargs(cls, json_data, kwargs):
return super(CustomMarshalled, cls).json_to_initkwargs(
dict((k, v) for k, v in json_data.items() if k != 'oid'),
kwargs,
)
class SanityTest(unittest2.TestCase):
def test_simple_key(self):
sk = SimpleKeyValue(key="Bob", value="bill")
encoded = JSONRecordIO.encode_str(sk)
self.assertEqual(
encoded, '{\n "key": "Bob",\n "value": "bill"\n}',
)
decoded = JSONRecordIO.decode_str(SimpleKeyValue, encoded)[0]
self.assertEqual(sk, decoded)
def test_multi_level_key(self):
mlkv = MultiLevelKeyValue(
key="Casper",
items=[{"key": "toast", "value": "Charlie_Brown"},
{"key": "ham", "value": "Lucy"},
{"key": "spam", "value": "Franklin"}],
custom_val="Minotaur",
)
# IO using regular normalize
default_json = jdump(to_json(mlkv))
default_decoded = from_json(MultiLevelKeyValue, json.loads(default_json))
self.assertEqual(mlkv, default_decoded)
encoded = JSONRecordIO.encode_str(mlkv)
decoded = JSONRecordIO.decode_str(MultiLevelKeyValue, encoded)[0]
# FIXME: visitor should either respect all JsonRecord hints or none.
decoded.custom_val = 'Minotaur'
self.assertEqual(mlkv, decoded)
| 28.25 | 81 | 0.63251 |
f7092a6a9bede6870067db1af0ef8ca88e82b286 | 2,116 | py | Python | setup.py | ZLLentz/pcdscalc | 6279d3eb8bd62da0e5ac9d9f3b451519e5f13aea | [
"BSD-3-Clause-LBNL"
] | null | null | null | setup.py | ZLLentz/pcdscalc | 6279d3eb8bd62da0e5ac9d9f3b451519e5f13aea | [
"BSD-3-Clause-LBNL"
] | null | null | null | setup.py | ZLLentz/pcdscalc | 6279d3eb8bd62da0e5ac9d9f3b451519e5f13aea | [
"BSD-3-Clause-LBNL"
] | null | null | null | import sys
from os import path
from setuptools import find_packages, setup
import versioneer
min_version = (3, 6)
if sys.version_info < min_version:
error = """
pcdscalc does not support Python {0}.{1}.
Python {2}.{3} and above is required. Check your Python version like so:
python3 --version
This may be due to an out-of-date pip. Make sure you have pip >= 9.0.1.
Upgrade pip like so:
pip install --upgrade pip
""".format(*sys.version_info[:2], *min_version)
sys.exit(error)
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as readme_file:
readme = readme_file.read()
with open(path.join(here, 'requirements.txt')) as requirements_file:
# Parse requirements.txt, ignoring any commented-out lines.
requirements = [line for line in requirements_file.read().splitlines()
if not line.startswith('#')]
git_requirements = [r for r in requirements if r.startswith('git+')]
if git_requirements:
print('User must install the following packages manually:')
print()
print("\n".join(f'* {r}' for r in git_requirements))
print()
setup(
name='pcdscalc',
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
license='BSD',
author='SLAC National Accelerator Laboratory',
packages=find_packages(exclude=['docs', 'tests']),
description='PCDS Calculation Routines',
long_description=readme,
url='https://github.com/pcdshub/pcdscalc', # noqa
entry_points={
'console_scripts': [
# 'pcdscalc=pcdscalc.__main__:main', # noqa
],
},
include_package_data=True,
package_data={
'pcdscalc': [
# When adding files here, remember to update MANIFEST.in as well,
# or else they will not be included in the distribution on PyPI!
# 'path/to/data_file',
]
},
install_requires=requirements,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Natural Language :: English',
'Programming Language :: Python :: 3',
],
)
| 28.594595 | 77 | 0.654537 |
f7092c10da42e20fd36d0a193c9d2a7e83185c7d | 22,395 | py | Python | lib/utils/SegDataGenerator.py | Grusinator/BirdClassification | c78ca3dbf70c2509c79ca4641102a2d725084d2a | [
"MIT"
] | 1 | 2018-04-16T19:01:48.000Z | 2018-04-16T19:01:48.000Z | lib/utils/SegDataGenerator.py | Grusinator/BirdClassification | c78ca3dbf70c2509c79ca4641102a2d725084d2a | [
"MIT"
] | null | null | null | lib/utils/SegDataGenerator.py | Grusinator/BirdClassification | c78ca3dbf70c2509c79ca4641102a2d725084d2a | [
"MIT"
] | null | null | null | from keras.preprocessing.image import *
from keras.applications.imagenet_utils import preprocess_input
from keras import backend as K
from PIL import Image
import numpy as np
import os
#import cv2
def center_crop(x, center_crop_size, data_format, **kwargs):
if data_format == 'channels_first':
centerh, centerw = x.shape[1] // 2, x.shape[2] // 2
elif data_format == 'channels_last':
centerh, centerw = x.shape[0] // 2, x.shape[1] // 2
lh, lw = center_crop_size[0] // 2, center_crop_size[1] // 2
rh, rw = center_crop_size[0] - lh, center_crop_size[1] - lw
h_start, h_end = centerh - lh, centerh + rh
w_start, w_end = centerw - lw, centerw + rw
if data_format == 'channels_first':
return x[:, h_start:h_end, w_start:w_end]
elif data_format == 'channels_last':
return x[h_start:h_end, w_start:w_end, :]
def pair_center_crop(x, y, center_crop_size, data_format, **kwargs):
if data_format == 'channels_first':
centerh, centerw = x.shape[1] // 2, x.shape[2] // 2
elif data_format == 'channels_last':
centerh, centerw = x.shape[0] // 2, x.shape[1] // 2
lh, lw = center_crop_size[0] // 2, center_crop_size[1] // 2
rh, rw = center_crop_size[0] - lh, center_crop_size[1] - lw
h_start, h_end = centerh - lh, centerh + rh
w_start, w_end = centerw - lw, centerw + rw
if data_format == 'channels_first':
return x[:, h_start:h_end, w_start:w_end], \
y[:, h_start:h_end, w_start:w_end]
elif data_format == 'channels_last':
return x[h_start:h_end, w_start:w_end, :], \
y[h_start:h_end, w_start:w_end, :]
def random_crop(x, random_crop_size, data_format, sync_seed=None, **kwargs):
np.random.seed(sync_seed)
if data_format == 'channels_first':
h, w = x.shape[1], x.shape[2]
elif data_format == 'channels_last':
h, w = x.shape[0], x.shape[1]
rangeh = (h - random_crop_size[0]) // 2
rangew = (w - random_crop_size[1]) // 2
offseth = 0 if rangeh == 0 else np.random.randint(rangeh)
offsetw = 0 if rangew == 0 else np.random.randint(rangew)
h_start, h_end = offseth, offseth + random_crop_size[0]
w_start, w_end = offsetw, offsetw + random_crop_size[1]
if data_format == 'channels_first':
return x[:, h_start:h_end, w_start:w_end]
elif data_format == 'channels_last':
return x[h_start:h_end, w_start:w_end, :]
def pair_random_crop(x, y, random_crop_size, data_format, sync_seed=None, **kwargs):
np.random.seed(sync_seed)
if data_format == 'channels_first':
h, w = x.shape[1], x.shape[2]
elif data_format == 'channels_last':
h, w = x.shape[0], x.shape[1]
rangeh = (h - random_crop_size[0]) // 2
rangew = (w - random_crop_size[1]) // 2
offseth = 0 if rangeh == 0 else np.random.randint(rangeh)
offsetw = 0 if rangew == 0 else np.random.randint(rangew)
h_start, h_end = offseth, offseth + random_crop_size[0]
w_start, w_end = offsetw, offsetw + random_crop_size[1]
if data_format == 'channels_first':
return x[:, h_start:h_end, w_start:w_end], y[:, h_start:h_end, h_start:h_end]
elif data_format == 'channels_last':
return x[h_start:h_end, w_start:w_end, :], y[h_start:h_end, w_start:w_end, :]
class SegDirectoryIterator(Iterator):
'''
Users need to ensure that all files exist.
Label images should be png images where pixel values represents class number.
find images -name *.jpg > images.txt
find labels -name *.png > labels.txt
for a file name 2011_002920.jpg, each row should contain 2011_002920
file_path: location of train.txt, or val.txt in PASCAL VOC2012 format,
listing image file path components without extension
data_dir: location of image files referred to by file in file_path
label_dir: location of label files
data_suffix: image file extension, such as `.jpg` or `.png`
label_suffix: label file suffix, such as `.png`, or `.npy`
loss_shape: shape to use when applying loss function to the label data
'''
def __init__(self, file_path, seg_data_generator,
data_dir, data_suffix,
label_dir, label_suffix, classes, ignore_label=255,
crop_mode='none', label_cval=255, pad_size=None,
target_size=None, color_mode='rgb',
data_format='default', class_mode='sparse',
batch_size=1, shuffle=True, seed=None,
save_to_dir=None, save_prefix='', save_format='jpeg',
loss_shape=None):
if data_format == 'default':
data_format = K.image_data_format()
self.file_path = file_path
self.data_dir = data_dir
self.data_suffix = data_suffix
self.label_suffix = label_suffix
self.label_dir = label_dir
self.classes = classes
self.seg_data_generator = seg_data_generator
self.target_size = tuple(target_size)
self.ignore_label = ignore_label
self.crop_mode = crop_mode
self.label_cval = label_cval
self.pad_size = pad_size
if color_mode not in {'rgb', 'grayscale'}:
raise ValueError('Invalid color mode:', color_mode,
'; expected "rgb" or "grayscale".')
self.color_mode = color_mode
self.data_format = data_format
self.nb_label_ch = 1
self.loss_shape = loss_shape
if (self.label_suffix == '.npy') or (self.label_suffix == 'npy'):
self.label_file_format = 'npy'
else:
self.label_file_format = 'img'
if target_size:
if self.color_mode == 'rgb':
if self.data_format == 'channels_last':
self.image_shape = self.target_size + (3,)
else:
self.image_shape = (3,) + self.target_size
else:
if self.data_format == 'channels_last':
self.image_shape = self.target_size + (1,)
else:
self.image_shape = (1,) + self.target_size
if self.data_format == 'channels_last':
self.label_shape = self.target_size + (self.nb_label_ch,)
else:
self.label_shape = (self.nb_label_ch,) + self.target_size
elif batch_size != 1:
raise ValueError(
'Batch size must be 1 when target image size is undetermined')
else:
self.image_shape = None
self.label_shape = None
if class_mode not in {'sparse', None}:
raise ValueError('Invalid class_mode:', class_mode,
'; expected one of '
'"sparse", or None.')
self.class_mode = class_mode
if save_to_dir:
self.palette = None
self.save_to_dir = save_to_dir
self.save_prefix = save_prefix
self.save_format = save_format
white_list_formats = {'png', 'jpg', 'jpeg', 'bmp', 'npy'}
# build lists for data files and label files
self.data_files = []
self.label_files = []
fp = open(file_path)
lines = fp.readlines()
fp.close()
self.nb_sample = len(lines)
for line in lines:
line = line.strip('\n')
self.data_files.append(line + data_suffix)
self.label_files.append(line + label_suffix)
super(SegDirectoryIterator, self).__init__(
self.nb_sample, batch_size, shuffle, seed)
def next(self):
with self.lock:
index_array, current_index, current_batch_size = next(
self.index_generator)
# The transformation of images is not under thread lock so it can be
# done in parallel
if self.target_size:
# TODO(ahundt) make dtype properly configurable
batch_x = np.zeros((current_batch_size,) + self.image_shape)
if self.loss_shape is None and self.label_file_format is 'img':
batch_y = np.zeros((current_batch_size,) + self.label_shape,
dtype=int)
elif self.loss_shape is None:
batch_y = np.zeros((current_batch_size,) + self.label_shape)
else:
batch_y = np.zeros((current_batch_size,) + self.loss_shape,
dtype=np.uint8)
grayscale = self.color_mode == 'grayscale'
# build batch of image data and labels
for i, j in enumerate(index_array):
data_file = self.data_files[j]
label_file = self.label_files[j]
img_file_format = 'img'
img = load_img(os.path.join(self.data_dir, data_file),
grayscale=grayscale, target_size=None)
label_filepath = os.path.join(self.label_dir, label_file)
if self.label_file_format == 'npy':
y = np.load(label_filepath)
else:
label = Image.open(label_filepath)
if self.save_to_dir and self.palette is None:
self.palette = label.palette
# do padding
if self.target_size:
if self.crop_mode != 'none':
x = img_to_array(img, data_format=self.data_format)
if self.label_file_format is not 'npy':
y = img_to_array(
label, data_format=self.data_format).astype(int)
img_w, img_h = img.size
if self.pad_size:
pad_w = max(self.pad_size[1] - img_w, 0)
pad_h = max(self.pad_size[0] - img_h, 0)
else:
pad_w = max(self.target_size[1] - img_w, 0)
pad_h = max(self.target_size[0] - img_h, 0)
if self.data_format == 'channels_first':
x = np.lib.pad(x, ((0, 0), (pad_h / 2, pad_h - pad_h / 2), (pad_w / 2, pad_w - pad_w / 2)), 'constant', constant_values=0.)
y = np.lib.pad(y, ((0, 0), (pad_h / 2, pad_h - pad_h / 2), (pad_w / 2, pad_w - pad_w / 2)),
'constant', constant_values=self.label_cval)
elif self.data_format == 'channels_last':
x = np.lib.pad(x, ((pad_h / 2, pad_h - pad_h / 2), (pad_w / 2, pad_w - pad_w / 2), (0, 0)), 'constant', constant_values=0.)
y = np.lib.pad(y, ((pad_h / 2, pad_h - pad_h / 2), (pad_w / 2, pad_w - pad_w / 2), (0, 0)), 'constant', constant_values=self.label_cval)
else:
x = img_to_array(img.resize((self.target_size[1], self.target_size[0]),
Image.BILINEAR),
data_format=self.data_format)
if self.label_file_format is not 'npy':
y = img_to_array(label.resize((self.target_size[1], self.target_size[
0]), Image.NEAREST), data_format=self.data_format).astype(int)
else:
print('ERROR: resize not implemented for label npy file')
if self.target_size is None:
batch_x = np.zeros((current_batch_size,) + x.shape)
if self.loss_shape is not None:
batch_y = np.zeros((current_batch_size,) + self.loss_shape)
else:
batch_y = np.zeros((current_batch_size,) + y.shape)
x, y = self.seg_data_generator.random_transform(x, y)
x = self.seg_data_generator.standardize(x)
if self.ignore_label:
y[np.where(y == self.ignore_label)] = self.classes
if self.loss_shape is not None:
y = np.reshape(y, self.loss_shape)
batch_x[i] = x
batch_y[i] = y
# optionally save augmented images to disk for debugging purposes
if self.save_to_dir:
for i in range(current_batch_size):
img = array_to_img(batch_x[i], self.data_format, scale=True)
label = batch_y[i][:, :, 0].astype('uint8')
label[np.where(label == self.classes)] = self.ignore_label
label = Image.fromarray(label, mode='P')
label.palette = self.palette
fname = '{prefix}_{index}_{hash}'.format(prefix=self.save_prefix,
index=current_index + i,
hash=np.random.randint(1e4))
img.save(os.path.join(self.save_to_dir, 'img_' +
fname + '.{format}'.format(format=self.save_format)))
label.save(os.path.join(self.save_to_dir,
'label_' + fname + '.png'))
# return
batch_x = preprocess_input(batch_x)
if self.class_mode == 'sparse':
return batch_x, batch_y
else:
return batch_x
class SegDataGenerator(object):
def __init__(self,
featurewise_center=False,
samplewise_center=False,
featurewise_std_normalization=False,
samplewise_std_normalization=False,
channelwise_center=False,
rotation_range=0.,
width_shift_range=0.,
height_shift_range=0.,
shear_range=0.,
zoom_range=0.,
zoom_maintain_shape=True,
channel_shift_range=0.,
fill_mode='constant',
cval=0.,
label_cval=255,
crop_mode='none',
crop_size=(0, 0),
pad_size=None,
horizontal_flip=False,
vertical_flip=False,
rescale=None,
data_format='default'):
if data_format == 'default':
data_format = K.image_data_format()
self.__dict__.update(locals())
self.mean = None
self.ch_mean = None
self.std = None
self.principal_components = None
self.rescale = rescale
if data_format not in {'channels_last', 'channels_first'}:
raise Exception('data_format should be channels_last (channel after row and '
'column) or channels_first (channel before row and column). '
'Received arg: ', data_format)
if crop_mode not in {'none', 'random', 'center'}:
raise Exception('crop_mode should be "none" or "random" or "center" '
'Received arg: ', crop_mode)
self.data_format = data_format
if data_format == 'channels_first':
self.channel_index = 1
self.row_index = 2
self.col_index = 3
if data_format == 'channels_last':
self.channel_index = 3
self.row_index = 1
self.col_index = 2
if np.isscalar(zoom_range):
self.zoom_range = [1 - zoom_range, 1 + zoom_range]
elif len(zoom_range) == 2:
self.zoom_range = [zoom_range[0], zoom_range[1]]
else:
raise Exception('zoom_range should be a float or '
'a tuple or list of two floats. '
'Received arg: ', zoom_range)
def flow_from_directory(self, file_path, data_dir, data_suffix,
label_dir, label_suffix, classes,
ignore_label=255,
target_size=None, color_mode='rgb',
class_mode='sparse',
batch_size=32, shuffle=True, seed=None,
save_to_dir=None, save_prefix='', save_format='jpeg',
loss_shape=None):
if self.crop_mode == 'random' or self.crop_mode == 'center':
target_size = self.crop_size
return SegDirectoryIterator(
file_path, self,
data_dir=data_dir, data_suffix=data_suffix,
label_dir=label_dir, label_suffix=label_suffix,
classes=classes, ignore_label=ignore_label,
crop_mode=self.crop_mode, label_cval=self.label_cval,
pad_size=self.pad_size,
target_size=target_size, color_mode=color_mode,
data_format=self.data_format, class_mode=class_mode,
batch_size=batch_size, shuffle=shuffle, seed=seed,
save_to_dir=save_to_dir, save_prefix=save_prefix,
save_format=save_format,
loss_shape=loss_shape)
def standardize(self, x):
if self.rescale:
x *= self.rescale
# x is a single image, so it doesn't have image number at index 0
img_channel_index = self.channel_index - 1
if self.samplewise_center:
x -= np.mean(x, axis=img_channel_index, keepdims=True)
if self.samplewise_std_normalization:
x /= (np.std(x, axis=img_channel_index, keepdims=True) + 1e-7)
if self.featurewise_center:
x -= self.mean
if self.featurewise_std_normalization:
x /= (self.std + 1e-7)
if self.channelwise_center:
x -= self.ch_mean
return x
def random_transform(self, x, y):
# x is a single image, so it doesn't have image number at index 0
img_row_index = self.row_index - 1
img_col_index = self.col_index - 1
img_channel_index = self.channel_index - 1
if self.crop_mode == 'none':
crop_size = (x.shape[img_row_index], x.shape[img_col_index])
else:
crop_size = self.crop_size
assert x.shape[img_row_index] == y.shape[img_row_index] and x.shape[img_col_index] == y.shape[
img_col_index], 'DATA ERROR: Different shape of data and label!\ndata shape: %s, label shape: %s' % (str(x.shape), str(y.shape))
# use composition of homographies to generate final transform that
# needs to be applied
if self.rotation_range:
theta = np.pi / 180 * \
np.random.uniform(-self.rotation_range, self.rotation_range)
else:
theta = 0
rotation_matrix = np.array([[np.cos(theta), -np.sin(theta), 0],
[np.sin(theta), np.cos(theta), 0],
[0, 0, 1]])
if self.height_shift_range:
# * x.shape[img_row_index]
tx = np.random.uniform(-self.height_shift_range,
self.height_shift_range) * crop_size[0]
else:
tx = 0
if self.width_shift_range:
# * x.shape[img_col_index]
ty = np.random.uniform(-self.width_shift_range,
self.width_shift_range) * crop_size[1]
else:
ty = 0
translation_matrix = np.array([[1, 0, tx],
[0, 1, ty],
[0, 0, 1]])
if self.shear_range:
shear = np.random.uniform(-self.shear_range, self.shear_range)
else:
shear = 0
shear_matrix = np.array([[1, -np.sin(shear), 0],
[0, np.cos(shear), 0],
[0, 0, 1]])
if self.zoom_range[0] == 1 and self.zoom_range[1] == 1:
zx, zy = 1, 1
else:
zx, zy = np.random.uniform(
self.zoom_range[0], self.zoom_range[1], 2)
if self.zoom_maintain_shape:
zy = zx
zoom_matrix = np.array([[zx, 0, 0],
[0, zy, 0],
[0, 0, 1]])
transform_matrix = np.dot(
np.dot(np.dot(rotation_matrix, translation_matrix), shear_matrix), zoom_matrix)
h, w = x.shape[img_row_index], x.shape[img_col_index]
transform_matrix = transform_matrix_offset_center(
transform_matrix, h, w)
x = apply_transform(x, transform_matrix, img_channel_index,
fill_mode=self.fill_mode, cval=self.cval)
y = apply_transform(y, transform_matrix, img_channel_index,
fill_mode='constant', cval=self.label_cval)
if self.channel_shift_range != 0:
x = random_channel_shift(
x, self.channel_shift_range, img_channel_index)
if self.horizontal_flip:
if np.random.random() < 0.5:
x = flip_axis(x, img_col_index)
y = flip_axis(y, img_col_index)
if self.vertical_flip:
if np.random.random() < 0.5:
x = flip_axis(x, img_row_index)
y = flip_axis(y, img_row_index)
if self.crop_mode == 'center':
x, y = pair_center_crop(x, y, self.crop_size, self.data_format)
elif self.crop_mode == 'random':
x, y = pair_random_crop(x, y, self.crop_size, self.data_format)
# TODO:
# channel-wise normalization
# barrel/fisheye
return x, y
def fit(self, X,
augment=False,
rounds=1,
seed=None):
'''Required for featurewise_center and featurewise_std_normalization
# Arguments
X: Numpy array, the data to fit on.
augment: whether to fit on randomly augmented samples
rounds: if `augment`,
how many augmentation passes to do over the data
seed: random seed.
'''
X = np.copy(X)
if augment:
aX = np.zeros(tuple([rounds * X.shape[0]] + list(X.shape)[1:]))
for r in range(rounds):
for i in range(X.shape[0]):
aX[i + r * X.shape[0]] = self.random_transform(X[i])
X = aX
if self.featurewise_center:
self.mean = np.mean(X, axis=0)
X -= self.mean
if self.featurewise_std_normalization:
self.std = np.std(X, axis=0)
X /= (self.std + 1e-7)
def set_ch_mean(self, ch_mean):
self.ch_mean = ch_mean
| 43.067308 | 160 | 0.552936 |
f7093f4912a687d4e7ec0a6c86c92b455f7b055d | 274,474 | py | Python | rsl_comm_py/shearwater_registers.py | RedshiftLabsPtyLtd/rsl_comm_py | e53b4e85079898c894dac25842a08bcc303edfbb | [
"MIT"
] | null | null | null | rsl_comm_py/shearwater_registers.py | RedshiftLabsPtyLtd/rsl_comm_py | e53b4e85079898c894dac25842a08bcc303edfbb | [
"MIT"
] | null | null | null | rsl_comm_py/shearwater_registers.py | RedshiftLabsPtyLtd/rsl_comm_py | e53b4e85079898c894dac25842a08bcc303edfbb | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# Author: Dr. Konstantin Selyunin
# License: MIT
# Created: 2020.08.19
import logging
import os.path
import struct
from abc import abstractmethod, ABC
from typing import Union, Tuple
from .rsl_xml_svd.rsl_svd_parser import RslSvdParser
class ShearWaterRegisters(ABC):
def __init__(self, **kwargs):
self.svd_parser = RslSvdParser(svd_file=ShearWaterRegisters.find_svd('shearwater.svd'))
@staticmethod
def find_svd(svd_file_name: str):
parent_dir = os.path.join(os.path.dirname(__file__), os.pardir)
for root, dirs, files in os.walk(parent_dir):
if svd_file_name in files:
return os.path.join(root, svd_file_name)
@abstractmethod
def connect(self, *args, **kwargs):
pass
@abstractmethod
def read_register(self, reg_addr: int, **kw) -> Tuple[bool, bytes]:
pass
@abstractmethod
def write_register(self, reg_addr: int, reg_value: Union[int, bytes, float, str], **kw):
pass
@property
def creg_com_settings(self):
"""
The CREG_COM_SETTINGS register is used to set the boards serial port baud rate and to enable (disable) the
automatic transmission of sensor data and estimated states (telemetry).
Payload structure:
[31:28] : BAUD_RATE -- Sets the baud rate of the boards main serial port:
:return: BAUD_RATE as bitField;
"""
addr = 0x00
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_COM_SETTINGS')
reg.raw_value, = struct.unpack('>I', payload[0:4])
# find value for BAUD_RATE bit field
baud_rate_val = (reg.raw_value >> 28) & 0x000F
baud_rate_enum = reg.find_field_by(name='BAUD_RATE').find_enum_entry_by(value=baud_rate_val)
return reg, baud_rate_enum
@creg_com_settings.setter
def creg_com_settings(self, new_value):
addr = 0x00
self.write_register(addr, new_value)
@property
def creg_com_rates1(self):
"""
The CREG_COM_RATES1 register sets desired telemetry transmission rates in Hz for raw accelerometer 1, gyro 1,
gyro 2 and magnetometer 1 data. If the specified rate is 0, then no data is transmitted.
Payload structure:
[31:24] : RAW_ACCEL_1_RATE -- Specifies the desired raw accelerometer 1 data broadcast rate in Hz. The data is stored as an unsigned 8-bit integer, yielding a maximum rate of 255 Hz.
[23:16] : RAW_GYRO_1_RATE -- Specifies the desired raw gyro 1 data broadcast rate in Hz. The data is stored as an unsigned 8-bit integer, yielding a maximum rate of 255 Hz
[15:8] : RAW_GYRO_2_RATE -- Specifies the desired raw gyro 2 data broadcast rate in Hz. The data is stored as an unsigned 8-bit integer, yielding a maximum rate of 255 Hz.
[7:0] : RAW_MAG_1_RATE -- Specifies the desired raw magnetometer 1 data broadcast rate in Hz. The data is stored as an unsigned 8-bit integer, yielding a maximum rate of 255 Hz.
:return: RAW_ACCEL_1_RATE as uint8_t; RAW_GYRO_1_RATE as uint8_t; RAW_GYRO_2_RATE as uint8_t; RAW_MAG_1_RATE as uint8_t;
"""
addr = 0x01
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_COM_RATES1')
reg.raw_value, = struct.unpack('>I', payload[0:4])
raw_accel_1_rate, raw_gyro_1_rate, raw_gyro_2_rate, raw_mag_1_rate = struct.unpack('>BBBB', payload[0:4])
return reg, raw_accel_1_rate, raw_gyro_1_rate, raw_gyro_2_rate, raw_mag_1_rate
@creg_com_rates1.setter
def creg_com_rates1(self, new_value):
addr = 0x01
self.write_register(addr, new_value)
@property
def creg_com_rates2(self):
"""
The CREG_COM_RATES2 register sets desired telemetry transmission rates for the magnetometer 2, all raw data,
and temperature data rate. The ALL_RAW_RATE setting has higher priority over the individual raw sensor data
settings, i.e. whenever this bitfield is set, then the individual raw sensor settings are ignored and not
used. If the specified rate is 0, then no data is transmitted.
Payload structure:
[31:24] : TEMP_RATE -- Specifies the desired broadcast rate for temperature data. The data is stored as an unsigned 8-bit integer, yielding a maximum rate of 255 Hz.
[23:16] : RAW_MAG_2_RATE -- Specifies the desired raw magnetometer 2 data broadcast rate in Hz. The data is stored as an unsigned 8-bit integer, yielding a maximum rate of 255 Hz.
[7:0] : ALL_RAW_RATE -- Specifies the desired broadcast rate for all raw sensor data. If set, this overrides the broadcast rate setting for individual raw data broadcast rates. The data is stored as an unsigned 8-bit integer, yielding a maximum rate of 255 Hz.
:return: TEMP_RATE as uint8_t; RAW_MAG_2_RATE as uint8_t; ALL_RAW_RATE as uint8_t;
"""
addr = 0x02
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_COM_RATES2')
reg.raw_value, = struct.unpack('>I', payload[0:4])
temp_rate, raw_mag_2_rate, all_raw_rate = struct.unpack('>BBxB', payload[0:4])
return reg, temp_rate, raw_mag_2_rate, all_raw_rate
@creg_com_rates2.setter
def creg_com_rates2(self, new_value):
addr = 0x02
self.write_register(addr, new_value)
@property
def creg_com_rates3(self):
"""
The CREG_COM_RATES3 register sets desired telemetry transmission rates for processed sensor data for the
sensors: the accelerometer 1, gyro 1, gyro 2, and magnetometer 1. If the specified rate is 0, then no data is
transmitted.
Payload structure:
[31:24] : PROC_ACCEL_1_RATE -- Specifies the desired broadcast rate for processed accelerometer 1 data. The data is stored as an unsigned 8-bit integer, yielding a maximum rate of 255 Hz.
[23:16] : PROC_GYRO_1_RATE -- Specifies the desired broadcast rate for processed rate gyro 1 data. The data is stored as an unsigned 8-bit integer, yielding a maximum rate of 255 Hz.
[15:8] : PROC_GYRO_2_RATE -- Specifies the desired broadcast rate for processed processed rate gyro 2 data. The data is stored as an unsigned 8-bit integer, yielding a maximum rate of 255 Hz.
[7:0] : PROC_MAG_1_RATE -- Specifies the desired broadcast rate for processed magnetometer 1 data. The data is stored as an unsigned 8-bit integer, yielding a maximum rate of 255 Hz.
:return: PROC_ACCEL_1_RATE as uint8_t; PROC_GYRO_1_RATE as uint8_t; PROC_GYRO_2_RATE as uint8_t; PROC_MAG_1_RATE as uint8_t;
"""
addr = 0x03
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_COM_RATES3')
reg.raw_value, = struct.unpack('>I', payload[0:4])
proc_accel_1_rate, proc_gyro_1_rate, proc_gyro_2_rate, proc_mag_1_rate = struct.unpack('>BBBB', payload[0:4])
return reg, proc_accel_1_rate, proc_gyro_1_rate, proc_gyro_2_rate, proc_mag_1_rate
@creg_com_rates3.setter
def creg_com_rates3(self, new_value):
addr = 0x03
self.write_register(addr, new_value)
@property
def creg_com_rates4(self):
"""
The CREG_COM_RATES4 register defines the desired telemetry transmission rates for the processed data for the
magnetometer 2, and for all processed data. The ALL_PROC_RATE setting has higher priority over the individual
processed sensor data settings, i.e. whenever this bitfield is set, then the individual processed sensor
transmission rate settings are ignored and not used. If the specified rate is 0, then no data is transmitted.
Payload structure:
[31:24] : PROC_MAG_2_RATE -- Specifies the desired broadcast rate for processed magnetometer 2 data. The data is stored as an unsigned 8-bit integer, yielding a maximum rate of 255 Hz.
[7:0] : ALL_PROC_RATE -- Specifies the desired broadcast rate for raw all processed sensor data. If set, this overrides the broadcast rate setting for individual processed data broadcast rates. The data is stored as an unsigned 8-bit integer, yielding a maximum rate of 255 Hz.
:return: PROC_MAG_2_RATE as uint8_t; ALL_PROC_RATE as uint8_t;
"""
addr = 0x04
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_COM_RATES4')
reg.raw_value, = struct.unpack('>I', payload[0:4])
proc_mag_2_rate, all_proc_rate = struct.unpack('>BxxB', payload[0:4])
return reg, proc_mag_2_rate, all_proc_rate
@creg_com_rates4.setter
def creg_com_rates4(self, new_value):
addr = 0x04
self.write_register(addr, new_value)
@property
def creg_com_rates5(self):
"""
The CREG_COM_RATES5 register sets desired telemetry transmission rates for quaternions, Euler Angles,
position, and velocity estimates. If the specified rate is 0, then no data is transmitted.
Payload structure:
[31:24] : QUAT_RATE -- Specifies the desired broadcast rate for quaternion data. The data is stored as an unsigned 8-bit integer, yielding a maximum rate of 255 Hz.
[23:16] : EULER_RATE -- Specifies the desired broadcast rate for Euler Angle data. The data is stored as an unsigned 8-bit integer, yielding a maximum rate of 255 Hz.
[15:8] : POSITION_RATE -- Specifies the desired broadcast rate position. The data is stored as an unsigned 8-bit integer, yielding a maximum rate of 255 Hz.
[7:0] : VELOCITY_RATE -- Specifies the desired broadcast rate for velocity. The data is stored as an unsigned 8-bit integer, yielding a maximum rate of 255 Hz.
:return: QUAT_RATE as uint8_t; EULER_RATE as uint8_t; POSITION_RATE as uint8_t; VELOCITY_RATE as uint8_t;
"""
addr = 0x05
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_COM_RATES5')
reg.raw_value, = struct.unpack('>I', payload[0:4])
quat_rate, euler_rate, position_rate, velocity_rate = struct.unpack('>BBBB', payload[0:4])
return reg, quat_rate, euler_rate, position_rate, velocity_rate
@creg_com_rates5.setter
def creg_com_rates5(self, new_value):
addr = 0x05
self.write_register(addr, new_value)
@property
def creg_com_rates6(self):
"""
The CREG_COM_RATES6 register sets desired telemetry transmission rates for pose (Euler/position packet),
health, and gyro bias estimates for the gyro 1 and gyro 2. If the specified rate is 0, then no data is
transmitted.
Payload structure:
[31:24] : POSE_RATE -- Specifies the desired broadcast rate for pose (Euler Angle and position) data. The data is stored as an unsigned 8-bit integer, yielding a maximum rate of 255 Hz.
[19:16] : HEALTH_RATE -- Specifies the desired broadcast rate for the sensor health packet.
[15:8] : GYRO_BIAS_1_RATE -- Specifies the desired broadcast rate for gyro 1 bias estimates. The data is stored as an unsigned 8-bit integer, yielding a maximum rate of 255 Hz.
[7:0] : GYRO_BIAS_2_RATE -- Specifies the desired broadcast rate for gyro 2 bias estimates. The data is stored as an unsigned 8-bit integer, yielding a maximum rate of 255 Hz.
:return: POSE_RATE as uint8_t; HEALTH_RATE as bitField; GYRO_BIAS_1_RATE as uint8_t; GYRO_BIAS_2_RATE as uint8_t;
"""
addr = 0x06
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_COM_RATES6')
reg.raw_value, = struct.unpack('>I', payload[0:4])
pose_rate, gyro_bias_1_rate, gyro_bias_2_rate = struct.unpack('>BxBB', payload[0:4])
reg.raw_value, = struct.unpack('>I', payload[0:4])
# find value for HEALTH_RATE bit field
health_rate_val = (reg.raw_value >> 16) & 0x000F
health_rate_enum = reg.find_field_by(name='HEALTH_RATE').find_enum_entry_by(value=health_rate_val)
return reg, pose_rate, gyro_bias_1_rate, gyro_bias_2_rate, reg, health_rate_enum
@creg_com_rates6.setter
def creg_com_rates6(self, new_value):
addr = 0x06
self.write_register(addr, new_value)
@property
def creg_com_rates7(self):
"""
The CREG_COM_RATES7 register sets desired telemetry transmission rates in Hz for NMEA packets.
Payload structure:
[31:28] : NMEA_HEALTH_RATE -- Specifies the desired broadcast rate for Redshift Labs NMEA-style health packet.
[27:24] : NMEA_POSE_RATE -- Specifies the desired broadcast rate for Redshift Labs NMEA-style pose (Euler Angle/position) packet.
[23:20] : NMEA_ATTITUDE_RATE -- Specifies the desired broadcast rate for Redshift Labs NMEA-style attitude packet.
[19:16] : NMEA_SENSOR_RATE -- Specifies the desired broadcast rate for Redshift Labs NMEA-style sensor data packet.
[15:12] : NMEA_RATES_RATE -- Specifies the desired broadcast rate for Redshift Labs NMEA-style rate data packet.
[11:8] : NMEA_GPS_POSE_RATE -- Specifies the desired broadcast rate for Redshift Labs NMEA-style GPS pose packet.
[7:4] : NMEA_QUAT_RATE -- Specifies the desired broadcast rate for Redshift Labs NMEA-style quaternion packet.
:return: NMEA_HEALTH_RATE as bitField; NMEA_POSE_RATE as bitField; NMEA_ATTITUDE_RATE as bitField; NMEA_SENSOR_RATE as bitField; NMEA_RATES_RATE as bitField; NMEA_GPS_POSE_RATE as bitField; NMEA_QUAT_RATE as bitField;
"""
addr = 0x07
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_COM_RATES7')
reg.raw_value, = struct.unpack('>I', payload[0:4])
# find value for NMEA_HEALTH_RATE bit field
nmea_health_rate_val = (reg.raw_value >> 28) & 0x000F
nmea_health_rate_enum = reg.find_field_by(name='NMEA_HEALTH_RATE').find_enum_entry_by(value=nmea_health_rate_val)
# find value for NMEA_POSE_RATE bit field
nmea_pose_rate_val = (reg.raw_value >> 24) & 0x000F
nmea_pose_rate_enum = reg.find_field_by(name='NMEA_POSE_RATE').find_enum_entry_by(value=nmea_pose_rate_val)
# find value for NMEA_ATTITUDE_RATE bit field
nmea_attitude_rate_val = (reg.raw_value >> 20) & 0x000F
nmea_attitude_rate_enum = reg.find_field_by(name='NMEA_ATTITUDE_RATE').find_enum_entry_by(value=nmea_attitude_rate_val)
# find value for NMEA_SENSOR_RATE bit field
nmea_sensor_rate_val = (reg.raw_value >> 16) & 0x000F
nmea_sensor_rate_enum = reg.find_field_by(name='NMEA_SENSOR_RATE').find_enum_entry_by(value=nmea_sensor_rate_val)
# find value for NMEA_RATES_RATE bit field
nmea_rates_rate_val = (reg.raw_value >> 12) & 0x000F
nmea_rates_rate_enum = reg.find_field_by(name='NMEA_RATES_RATE').find_enum_entry_by(value=nmea_rates_rate_val)
# find value for NMEA_GPS_POSE_RATE bit field
nmea_gps_pose_rate_val = (reg.raw_value >> 8) & 0x000F
nmea_gps_pose_rate_enum = reg.find_field_by(name='NMEA_GPS_POSE_RATE').find_enum_entry_by(value=nmea_gps_pose_rate_val)
# find value for NMEA_QUAT_RATE bit field
nmea_quat_rate_val = (reg.raw_value >> 4) & 0x000F
nmea_quat_rate_enum = reg.find_field_by(name='NMEA_QUAT_RATE').find_enum_entry_by(value=nmea_quat_rate_val)
return reg, nmea_health_rate_enum, nmea_pose_rate_enum, nmea_attitude_rate_enum, nmea_sensor_rate_enum, nmea_rates_rate_enum, nmea_gps_pose_rate_enum, nmea_quat_rate_enum
@creg_com_rates7.setter
def creg_com_rates7(self, new_value):
addr = 0x07
self.write_register(addr, new_value)
@property
def creg_misc_settings(self):
"""
This register contains miscellaneous filter and sensor control options.
Payload structure:
[8] : PPS -- If set, this bit causes the TX2 pin on the IO Expansion header to be used as the PPS input from an external GPS module. PPS pulses will then be used to synchronize the system clock to UTC time of day.
[3] : ZG -- If set, this bit causes the devicee to attempt to measure the rate gyro bias on startup. The sensor must be stationary on startup for this feature to work properly.
[2] : Q -- If this bit is set, the sensor will run in quaternion mode instead of Euler Angle mode.
[1] : MAG1 -- If set, the magnetometer 1 will be used in state updates.
[0] : MAG2 -- If set, the magnetometer 2 will be used in state updates.
:return: PPS as bitField; ZG as bitField; Q as bitField; MAG1 as bitField; MAG2 as bitField;
"""
addr = 0x08
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MISC_SETTINGS')
reg.raw_value, = struct.unpack('>I', payload[0:4])
# find value for PPS bit field
pps_val = (reg.raw_value >> 8) & 0x0001
pps_enum = reg.find_field_by(name='PPS').find_enum_entry_by(value=pps_val)
# find value for ZG bit field
zg_val = (reg.raw_value >> 3) & 0x0001
zg_enum = reg.find_field_by(name='ZG').find_enum_entry_by(value=zg_val)
# find value for Q bit field
q_val = (reg.raw_value >> 2) & 0x0001
q_enum = reg.find_field_by(name='Q').find_enum_entry_by(value=q_val)
# find value for MAG1 bit field
mag1_val = (reg.raw_value >> 1) & 0x0001
mag1_enum = reg.find_field_by(name='MAG1').find_enum_entry_by(value=mag1_val)
# find value for MAG2 bit field
mag2_val = (reg.raw_value >> 0) & 0x0001
mag2_enum = reg.find_field_by(name='MAG2').find_enum_entry_by(value=mag2_val)
return reg, pps_enum, zg_enum, q_enum, mag1_enum, mag2_enum
@creg_misc_settings.setter
def creg_misc_settings(self, new_value):
addr = 0x08
self.write_register(addr, new_value)
@property
def creg_gyro_1_meas_range(self):
"""
The CREG_GYRO_1_MEAS_RANGE register sets the desired measurement range for the gyro 1 sensor. If the rate is
not set, then the default value of 2000 deg/s will be used as a measurement range.
Payload structure:
[1:0] : MEAS_GYRO1 -- Specifies the desired measurement range for the gyro 1 measurements.
:return: MEAS_GYRO1 as bitField;
"""
addr = 0x09
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_GYRO_1_MEAS_RANGE')
reg.raw_value, = struct.unpack('>I', payload[0:4])
# find value for MEAS_GYRO1 bit field
meas_gyro1_val = (reg.raw_value >> 0) & 0x0003
meas_gyro1_enum = reg.find_field_by(name='MEAS_GYRO1').find_enum_entry_by(value=meas_gyro1_val)
return reg, meas_gyro1_enum
@creg_gyro_1_meas_range.setter
def creg_gyro_1_meas_range(self, new_value):
addr = 0x09
self.write_register(addr, new_value)
@property
def creg_gyro_1_trim_x(self):
"""
This register sets the x-axis rate gyro 1 trim, which is used to add additional bias compensation for the rate
gyros during calls to the ZERO_GYRO_BIAS command.
Payload structure:
[31:0] : GYRO_1_TRIM_X -- 32-bit IEEE Floating Point Value
:return: GYRO_1_TRIM_X as float;
"""
addr = 0x0A
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_GYRO_1_TRIM_X')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_1_trim_x, = struct.unpack('>f', payload[0:4])
return reg, gyro_1_trim_x,
@creg_gyro_1_trim_x.setter
def creg_gyro_1_trim_x(self, new_value):
addr = 0x0A
self.write_register(addr, new_value)
@property
def creg_gyro_1_trim_y(self):
"""
This register sets the y-axis rate gyro 1 trim, which is used to add additional bias compensation for the rate
gyros during calls to the ZERO_GYRO_BIAS command.
Payload structure:
[31:0] : GYRO_1_TRIM_Y -- 32-bit IEEE Floating Point Value
:return: GYRO_1_TRIM_Y as float;
"""
addr = 0x0B
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_GYRO_1_TRIM_Y')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_1_trim_y, = struct.unpack('>f', payload[0:4])
return reg, gyro_1_trim_y,
@creg_gyro_1_trim_y.setter
def creg_gyro_1_trim_y(self, new_value):
addr = 0x0B
self.write_register(addr, new_value)
@property
def creg_gyro_1_trim_z(self):
"""
This register sets the z-axis rate gyro 1 trim, which is used to add additional bias compensation for the rate
gyros during calls to the ZERO_GYRO_BIAS command.
Payload structure:
[31:0] : GYRO_1_TRIM_Z -- 32-bit IEEE Floating Point Value
:return: GYRO_1_TRIM_Z as float;
"""
addr = 0x0C
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_GYRO_1_TRIM_Z')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_1_trim_z, = struct.unpack('>f', payload[0:4])
return reg, gyro_1_trim_z,
@creg_gyro_1_trim_z.setter
def creg_gyro_1_trim_z(self, new_value):
addr = 0x0C
self.write_register(addr, new_value)
@property
def creg_gyro_2_meas_range(self):
"""
The CREG_GYRO_2_MEAS_RANGE register sets the desired measurement range for the gyro 2 sensor. If the rate is
not set, then the default value of 2000 deg/s will be used as a measurement range.
Payload structure:
[1:0] : MEAS_GYRO2 -- Specifies the desired measurement range for the gyro 2 measurements.
:return: MEAS_GYRO2 as bitField;
"""
addr = 0x0D
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_GYRO_2_MEAS_RANGE')
reg.raw_value, = struct.unpack('>I', payload[0:4])
# find value for MEAS_GYRO2 bit field
meas_gyro2_val = (reg.raw_value >> 0) & 0x0003
meas_gyro2_enum = reg.find_field_by(name='MEAS_GYRO2').find_enum_entry_by(value=meas_gyro2_val)
return reg, meas_gyro2_enum
@creg_gyro_2_meas_range.setter
def creg_gyro_2_meas_range(self, new_value):
addr = 0x0D
self.write_register(addr, new_value)
@property
def creg_gyro_2_trim_x(self):
"""
This register sets the x-axis rate gyro 2 trim, which is used to add additional bias compensation for the rate
gyros during calls to the ZERO_GYRO_BIAS command.
Payload structure:
[31:0] : GYRO_2_TRIM_X -- 32-bit IEEE Floating Point Value
:return: GYRO_2_TRIM_X as float;
"""
addr = 0x0E
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_GYRO_2_TRIM_X')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_2_trim_x, = struct.unpack('>f', payload[0:4])
return reg, gyro_2_trim_x,
@creg_gyro_2_trim_x.setter
def creg_gyro_2_trim_x(self, new_value):
addr = 0x0E
self.write_register(addr, new_value)
@property
def creg_gyro_2_trim_y(self):
"""
This register sets the y-axis rate gyro 2 trim, which is used to add additional bias compensation for the rate
gyros during calls to the ZERO_GYRO_BIAS command.
Payload structure:
[31:0] : GYRO_2_TRIM_Y -- 32-bit IEEE Floating Point Value
:return: GYRO_2_TRIM_Y as float;
"""
addr = 0x0F
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_GYRO_2_TRIM_Y')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_2_trim_y, = struct.unpack('>f', payload[0:4])
return reg, gyro_2_trim_y,
@creg_gyro_2_trim_y.setter
def creg_gyro_2_trim_y(self, new_value):
addr = 0x0F
self.write_register(addr, new_value)
@property
def creg_gyro_2_trim_z(self):
"""
This register sets the z-axis rate gyro 2 trim, which is used to add additional bias compensation for the rate
gyros during calls to the ZERO_GYRO_BIAS command.
Payload structure:
[31:0] : GYRO_2_TRIM_Z -- 32-bit IEEE Floating Point Value
:return: GYRO_2_TRIM_Z as float;
"""
addr = 0x10
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_GYRO_2_TRIM_Z')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_2_trim_z, = struct.unpack('>f', payload[0:4])
return reg, gyro_2_trim_z,
@creg_gyro_2_trim_z.setter
def creg_gyro_2_trim_z(self, new_value):
addr = 0x10
self.write_register(addr, new_value)
@property
def creg_mag_1_cal1_1(self):
"""
Row 1, Column 1 of magnetometer 1 calibration matrix.
Payload structure:
[31:0] : MAG_1_CAL1_1 -- 32-bit IEEE Floating Point Value
:return: MAG_1_CAL1_1 as float;
"""
addr = 0x11
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_1_CAL1_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_1_cal1_1, = struct.unpack('>f', payload[0:4])
return reg, mag_1_cal1_1,
@creg_mag_1_cal1_1.setter
def creg_mag_1_cal1_1(self, new_value):
addr = 0x11
self.write_register(addr, new_value)
@property
def creg_mag_1_cal1_2(self):
"""
Row 1, Column 2 of magnetometer 1 calibration matrix.
Payload structure:
[31:0] : MAG_1_CAL1_2 -- 32-bit IEEE Floating Point Value
:return: MAG_1_CAL1_2 as float;
"""
addr = 0x12
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_1_CAL1_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_1_cal1_2, = struct.unpack('>f', payload[0:4])
return reg, mag_1_cal1_2,
@creg_mag_1_cal1_2.setter
def creg_mag_1_cal1_2(self, new_value):
addr = 0x12
self.write_register(addr, new_value)
@property
def creg_mag_1_cal1_3(self):
"""
Row 1, Column 3 of magnetometer 1 calibration matrix.
Payload structure:
[31:0] : MAG_1_CAL1_3 -- 32-bit IEEE Floating Point Value
:return: MAG_1_CAL1_3 as float;
"""
addr = 0x13
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_1_CAL1_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_1_cal1_3, = struct.unpack('>f', payload[0:4])
return reg, mag_1_cal1_3,
@creg_mag_1_cal1_3.setter
def creg_mag_1_cal1_3(self, new_value):
addr = 0x13
self.write_register(addr, new_value)
@property
def creg_mag_1_cal2_1(self):
"""
Row 2, Column 1 of magnetometer 1 calibration matrix.
Payload structure:
[31:0] : MAG_1_CAL2_1 -- 32-bit IEEE Floating Point Value
:return: MAG_1_CAL2_1 as float;
"""
addr = 0x14
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_1_CAL2_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_1_cal2_1, = struct.unpack('>f', payload[0:4])
return reg, mag_1_cal2_1,
@creg_mag_1_cal2_1.setter
def creg_mag_1_cal2_1(self, new_value):
addr = 0x14
self.write_register(addr, new_value)
@property
def creg_mag_1_cal2_2(self):
"""
Row 2, Column 2 of magnetometer 1 calibration matrix.
Payload structure:
[31:0] : MAG_1_CAL2_2 -- 32-bit IEEE Floating Point Value
:return: MAG_1_CAL2_2 as float;
"""
addr = 0x15
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_1_CAL2_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_1_cal2_2, = struct.unpack('>f', payload[0:4])
return reg, mag_1_cal2_2,
@creg_mag_1_cal2_2.setter
def creg_mag_1_cal2_2(self, new_value):
addr = 0x15
self.write_register(addr, new_value)
@property
def creg_mag_1_cal2_3(self):
"""
Row 2, Column 3 of magnetometer 1 calibration matrix.
Payload structure:
[31:0] : MAG_1_CAL2_3 -- 32-bit IEEE Floating Point Value
:return: MAG_1_CAL2_3 as float;
"""
addr = 0x16
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_1_CAL2_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_1_cal2_3, = struct.unpack('>f', payload[0:4])
return reg, mag_1_cal2_3,
@creg_mag_1_cal2_3.setter
def creg_mag_1_cal2_3(self, new_value):
addr = 0x16
self.write_register(addr, new_value)
@property
def creg_mag_1_cal3_1(self):
"""
Row 3, Column 1 of magnetometer 1 calibration matrix.
Payload structure:
[31:0] : MAG_1_CAL3_1 -- 32-bit IEEE Floating Point Value
:return: MAG_1_CAL3_1 as float;
"""
addr = 0x17
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_1_CAL3_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_1_cal3_1, = struct.unpack('>f', payload[0:4])
return reg, mag_1_cal3_1,
@creg_mag_1_cal3_1.setter
def creg_mag_1_cal3_1(self, new_value):
addr = 0x17
self.write_register(addr, new_value)
@property
def creg_mag_1_cal3_2(self):
"""
Row 3, Column 2 of magnetometer 1 calibration matrix.
Payload structure:
[31:0] : MAG_1_CAL3_2 -- 32-bit IEEE Floating Point Value
:return: MAG_1_CAL3_2 as float;
"""
addr = 0x18
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_1_CAL3_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_1_cal3_2, = struct.unpack('>f', payload[0:4])
return reg, mag_1_cal3_2,
@creg_mag_1_cal3_2.setter
def creg_mag_1_cal3_2(self, new_value):
addr = 0x18
self.write_register(addr, new_value)
@property
def creg_mag_1_cal3_3(self):
"""
Row 3, Column 3 of magnetometer 1 calibration matrix.
Payload structure:
[31:0] : MAG_1_CAL3_3 -- 32-bit IEEE Floating Point Value
:return: MAG_1_CAL3_3 as float;
"""
addr = 0x19
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_1_CAL3_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_1_cal3_3, = struct.unpack('>f', payload[0:4])
return reg, mag_1_cal3_3,
@creg_mag_1_cal3_3.setter
def creg_mag_1_cal3_3(self, new_value):
addr = 0x19
self.write_register(addr, new_value)
@property
def creg_mag_1_bias_x(self):
"""
This register stores a bias term for the magnetometer 1 x-axis for hard-iron calibration. This term can be
computed by performing magnetometer calibration with the Redshift labs Serial Interface.
Payload structure:
[31:0] : MAG_1_BIAS_X -- 32-bit IEEE Floating Point Value
:return: MAG_1_BIAS_X as float;
"""
addr = 0x1A
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_1_BIAS_X')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_1_bias_x, = struct.unpack('>f', payload[0:4])
return reg, mag_1_bias_x,
@creg_mag_1_bias_x.setter
def creg_mag_1_bias_x(self, new_value):
addr = 0x1A
self.write_register(addr, new_value)
@property
def creg_mag_1_bias_y(self):
"""
This register stores a bias term for the magnetometer 1 y-axis for hard-iron calibration. This term can be
computed by performing magnetometer calibration with the Redshift labs Serial Interface.
Payload structure:
[31:0] : MAG_1_BIAS_Y -- 32-bit IEEE Floating Point Value
:return: MAG_1_BIAS_Y as float;
"""
addr = 0x1B
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_1_BIAS_Y')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_1_bias_y, = struct.unpack('>f', payload[0:4])
return reg, mag_1_bias_y,
@creg_mag_1_bias_y.setter
def creg_mag_1_bias_y(self, new_value):
addr = 0x1B
self.write_register(addr, new_value)
@property
def creg_mag_1_bias_z(self):
"""
This register stores a bias term for the magnetometer 1 z-axis for hard-iron calibration. This term can be
computed by performing magnetometer calibration with the Redshift labs Serial Interface.
Payload structure:
[31:0] : MAG_1_BIAS_Z -- 32-bit IEEE Floating Point Value
:return: MAG_1_BIAS_Z as float;
"""
addr = 0x1C
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_1_BIAS_Z')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_1_bias_z, = struct.unpack('>f', payload[0:4])
return reg, mag_1_bias_z,
@creg_mag_1_bias_z.setter
def creg_mag_1_bias_z(self, new_value):
addr = 0x1C
self.write_register(addr, new_value)
@property
def creg_mag_2_cal1_1(self):
"""
Row 1, Column 1 of magnetometer 2 calibration matrix.
Payload structure:
[31:0] : MAG_2_CAL1_1 -- 32-bit IEEE Floating Point Value
:return: MAG_2_CAL1_1 as float;
"""
addr = 0x1D
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_2_CAL1_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_2_cal1_1, = struct.unpack('>f', payload[0:4])
return reg, mag_2_cal1_1,
@creg_mag_2_cal1_1.setter
def creg_mag_2_cal1_1(self, new_value):
addr = 0x1D
self.write_register(addr, new_value)
@property
def creg_mag_2_cal1_2(self):
"""
Row 1, Column 2 of magnetometer 2 calibration matrix.
Payload structure:
[31:0] : MAG_2_CAL1_2 -- 32-bit IEEE Floating Point Value
:return: MAG_2_CAL1_2 as float;
"""
addr = 0x1E
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_2_CAL1_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_2_cal1_2, = struct.unpack('>f', payload[0:4])
return reg, mag_2_cal1_2,
@creg_mag_2_cal1_2.setter
def creg_mag_2_cal1_2(self, new_value):
addr = 0x1E
self.write_register(addr, new_value)
@property
def creg_mag_2_cal1_3(self):
"""
Row 1, Column 3 of magnetometer 2 calibration matrix.
Payload structure:
[31:0] : MAG_2_CAL1_3 -- 32-bit IEEE Floating Point Value
:return: MAG_2_CAL1_3 as float;
"""
addr = 0x1F
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_2_CAL1_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_2_cal1_3, = struct.unpack('>f', payload[0:4])
return reg, mag_2_cal1_3,
@creg_mag_2_cal1_3.setter
def creg_mag_2_cal1_3(self, new_value):
addr = 0x1F
self.write_register(addr, new_value)
@property
def creg_mag_2_cal2_1(self):
"""
Row 2, Column 1 of magnetometer 2 calibration matrix.
Payload structure:
[31:0] : MAG_2_CAL2_1 -- 32-bit IEEE Floating Point Value
:return: MAG_2_CAL2_1 as float;
"""
addr = 0x20
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_2_CAL2_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_2_cal2_1, = struct.unpack('>f', payload[0:4])
return reg, mag_2_cal2_1,
@creg_mag_2_cal2_1.setter
def creg_mag_2_cal2_1(self, new_value):
addr = 0x20
self.write_register(addr, new_value)
@property
def creg_mag_2_cal2_2(self):
"""
Row 2, Column 2 of magnetometer 2 calibration matrix.
Payload structure:
[31:0] : MAG_2_CAL2_2 -- 32-bit IEEE Floating Point Value
:return: MAG_2_CAL2_2 as float;
"""
addr = 0x21
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_2_CAL2_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_2_cal2_2, = struct.unpack('>f', payload[0:4])
return reg, mag_2_cal2_2,
@creg_mag_2_cal2_2.setter
def creg_mag_2_cal2_2(self, new_value):
addr = 0x21
self.write_register(addr, new_value)
@property
def creg_mag_2_cal2_3(self):
"""
Row 2, Column 3 of magnetometer 2 calibration matrix.
Payload structure:
[31:0] : MAG_2_CAL2_3 -- 32-bit IEEE Floating Point Value
:return: MAG_2_CAL2_3 as float;
"""
addr = 0x22
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_2_CAL2_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_2_cal2_3, = struct.unpack('>f', payload[0:4])
return reg, mag_2_cal2_3,
@creg_mag_2_cal2_3.setter
def creg_mag_2_cal2_3(self, new_value):
addr = 0x22
self.write_register(addr, new_value)
@property
def creg_mag_2_cal3_1(self):
"""
Row 3, Column 1 of magnetometer 2 calibration matrix.
Payload structure:
[31:0] : MAG_2_CAL3_1 -- 32-bit IEEE Floating Point Value
:return: MAG_2_CAL3_1 as float;
"""
addr = 0x23
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_2_CAL3_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_2_cal3_1, = struct.unpack('>f', payload[0:4])
return reg, mag_2_cal3_1,
@creg_mag_2_cal3_1.setter
def creg_mag_2_cal3_1(self, new_value):
addr = 0x23
self.write_register(addr, new_value)
@property
def creg_mag_2_cal3_2(self):
"""
Row 3, Column 2 of magnetometer 2 calibration matrix.
Payload structure:
[31:0] : MAG_2_CAL3_2 -- 32-bit IEEE Floating Point Value
:return: MAG_2_CAL3_2 as float;
"""
addr = 0x24
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_2_CAL3_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_2_cal3_2, = struct.unpack('>f', payload[0:4])
return reg, mag_2_cal3_2,
@creg_mag_2_cal3_2.setter
def creg_mag_2_cal3_2(self, new_value):
addr = 0x24
self.write_register(addr, new_value)
@property
def creg_mag_2_cal3_3(self):
"""
Row 3, Column 3 of magnetometer 2 calibration matrix.
Payload structure:
[31:0] : MAG_2_CAL3_3 -- 32-bit IEEE Floating Point Value
:return: MAG_2_CAL3_3 as float;
"""
addr = 0x25
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_2_CAL3_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_2_cal3_3, = struct.unpack('>f', payload[0:4])
return reg, mag_2_cal3_3,
@creg_mag_2_cal3_3.setter
def creg_mag_2_cal3_3(self, new_value):
addr = 0x25
self.write_register(addr, new_value)
@property
def creg_mag_2_bias_x(self):
"""
This register stores a bias term for the magnetometer 2 x-axis for hard-iron calibration. This term can be
computed by performing magnetometer calibration with the Redshift labs Serial Interface.
Payload structure:
[31:0] : MAG_2_BIAS_X -- 32-bit IEEE Floating Point Value
:return: MAG_2_BIAS_X as float;
"""
addr = 0x26
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_2_BIAS_X')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_2_bias_x, = struct.unpack('>f', payload[0:4])
return reg, mag_2_bias_x,
@creg_mag_2_bias_x.setter
def creg_mag_2_bias_x(self, new_value):
addr = 0x26
self.write_register(addr, new_value)
@property
def creg_mag_2_bias_y(self):
"""
This register stores a bias term for the magnetometer 2 y-axis for hard-iron calibration. This term can be
computed by performing magnetometer calibration with the Redshift labs Serial Interface.
Payload structure:
[31:0] : MAG_2_BIAS_Y -- 32-bit IEEE Floating Point Value
:return: MAG_2_BIAS_Y as float;
"""
addr = 0x27
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_2_BIAS_Y')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_2_bias_y, = struct.unpack('>f', payload[0:4])
return reg, mag_2_bias_y,
@creg_mag_2_bias_y.setter
def creg_mag_2_bias_y(self, new_value):
addr = 0x27
self.write_register(addr, new_value)
@property
def creg_mag_2_bias_z(self):
"""
This register stores a bias term for the magnetometer 2 z-axis for hard-iron calibration. This term can be
computed by performing magnetometer calibration with the Redshift labs Serial Interface.
Payload structure:
[31:0] : MAG_2_BIAS_Z -- 32-bit IEEE Floating Point Value
:return: MAG_2_BIAS_Z as float;
"""
addr = 0x28
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_MAG_2_BIAS_Z')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_2_bias_z, = struct.unpack('>f', payload[0:4])
return reg, mag_2_bias_z,
@creg_mag_2_bias_z.setter
def creg_mag_2_bias_z(self, new_value):
addr = 0x28
self.write_register(addr, new_value)
@property
def creg_accel_1_meas_range(self):
"""
The CREG_ACCEL_1_MEAS_RANGE register sets the desired measurement range for the accelerometer 1. If the rate
is not set, then the default value of the +-2 g will be used as a measurement range.
Payload structure:
[1:0] : MEAS_ACC1 -- Specifies the desired measurement range for the accelerometer 1 measurements.
:return: MEAS_ACC1 as bitField;
"""
addr = 0x29
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_ACCEL_1_MEAS_RANGE')
reg.raw_value, = struct.unpack('>I', payload[0:4])
# find value for MEAS_ACC1 bit field
meas_acc1_val = (reg.raw_value >> 0) & 0x0003
meas_acc1_enum = reg.find_field_by(name='MEAS_ACC1').find_enum_entry_by(value=meas_acc1_val)
return reg, meas_acc1_enum
@creg_accel_1_meas_range.setter
def creg_accel_1_meas_range(self, new_value):
addr = 0x29
self.write_register(addr, new_value)
@property
def creg_accel_1_cal1_1(self):
"""
Row 1, Column 1 of accelerometer 1 calibration matrix.
Payload structure:
[31:0] : ACCEL_1_CAL1_1 -- 32-bit IEEE Floating Point Value
:return: ACCEL_1_CAL1_1 as float;
"""
addr = 0x2A
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_ACCEL_1_CAL1_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
accel_1_cal1_1, = struct.unpack('>f', payload[0:4])
return reg, accel_1_cal1_1,
@creg_accel_1_cal1_1.setter
def creg_accel_1_cal1_1(self, new_value):
addr = 0x2A
self.write_register(addr, new_value)
@property
def creg_accel_1_cal1_2(self):
"""
Row 1, Column 2 of accelerometer 1 calibration matrix.
Payload structure:
[31:0] : ACCEL_1_CAL1_2 -- 32-bit IEEE Floating Point Value
:return: ACCEL_1_CAL1_2 as float;
"""
addr = 0x2B
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_ACCEL_1_CAL1_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
accel_1_cal1_2, = struct.unpack('>f', payload[0:4])
return reg, accel_1_cal1_2,
@creg_accel_1_cal1_2.setter
def creg_accel_1_cal1_2(self, new_value):
addr = 0x2B
self.write_register(addr, new_value)
@property
def creg_accel_1_cal1_3(self):
"""
Row 1, Column 3 of accelerometer 1 calibration matrix.
Payload structure:
[31:0] : ACCEL_1_CAL1_3 -- 32-bit IEEE Floating Point Value
:return: ACCEL_1_CAL1_3 as float;
"""
addr = 0x2C
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_ACCEL_1_CAL1_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
accel_1_cal1_3, = struct.unpack('>f', payload[0:4])
return reg, accel_1_cal1_3,
@creg_accel_1_cal1_3.setter
def creg_accel_1_cal1_3(self, new_value):
addr = 0x2C
self.write_register(addr, new_value)
@property
def creg_accel_1_cal2_1(self):
"""
Row 2, Column 1 of accelerometer 1 calibration matrix.
Payload structure:
[31:0] : ACCEL_1_CAL2_1 -- 32-bit IEEE Floating Point Value
:return: ACCEL_1_CAL2_1 as float;
"""
addr = 0x2D
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_ACCEL_1_CAL2_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
accel_1_cal2_1, = struct.unpack('>f', payload[0:4])
return reg, accel_1_cal2_1,
@creg_accel_1_cal2_1.setter
def creg_accel_1_cal2_1(self, new_value):
addr = 0x2D
self.write_register(addr, new_value)
@property
def creg_accel_1_cal2_2(self):
"""
Row 2, Column 2 of accelerometer 1 calibration matrix.
Payload structure:
[31:0] : ACCEL_1_CAL2_2 -- 32-bit IEEE Floating Point Value
:return: ACCEL_1_CAL2_2 as float;
"""
addr = 0x2E
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_ACCEL_1_CAL2_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
accel_1_cal2_2, = struct.unpack('>f', payload[0:4])
return reg, accel_1_cal2_2,
@creg_accel_1_cal2_2.setter
def creg_accel_1_cal2_2(self, new_value):
addr = 0x2E
self.write_register(addr, new_value)
@property
def creg_accel_1_cal2_3(self):
"""
Row 2, Column 3 of accelerometer 1 calibration matrix.
Payload structure:
[31:0] : ACCEL_1_CAL2_3 -- 32-bit IEEE Floating Point Value
:return: ACCEL_1_CAL2_3 as float;
"""
addr = 0x2F
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_ACCEL_1_CAL2_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
accel_1_cal2_3, = struct.unpack('>f', payload[0:4])
return reg, accel_1_cal2_3,
@creg_accel_1_cal2_3.setter
def creg_accel_1_cal2_3(self, new_value):
addr = 0x2F
self.write_register(addr, new_value)
@property
def creg_accel_1_cal3_1(self):
"""
Row 3, Column 1 of accelerometer 1 calibration matrix.
Payload structure:
[31:0] : ACCEL_1_CAL3_1 -- 32-bit IEEE Floating Point Value
:return: ACCEL_1_CAL3_1 as float;
"""
addr = 0x30
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_ACCEL_1_CAL3_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
accel_1_cal3_1, = struct.unpack('>f', payload[0:4])
return reg, accel_1_cal3_1,
@creg_accel_1_cal3_1.setter
def creg_accel_1_cal3_1(self, new_value):
addr = 0x30
self.write_register(addr, new_value)
@property
def creg_accel_1_cal3_2(self):
"""
Row 3, Column 2 of accelerometer 1 calibration matrix.
Payload structure:
[31:0] : ACCEL_1_CAL3_2 -- 32-bit IEEE Floating Point Value
:return: ACCEL_1_CAL3_2 as float;
"""
addr = 0x31
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_ACCEL_1_CAL3_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
accel_1_cal3_2, = struct.unpack('>f', payload[0:4])
return reg, accel_1_cal3_2,
@creg_accel_1_cal3_2.setter
def creg_accel_1_cal3_2(self, new_value):
addr = 0x31
self.write_register(addr, new_value)
@property
def creg_accel_1_cal3_3(self):
"""
Row 3, Column 3 of accelerometer 1 calibration matrix.
Payload structure:
[31:0] : ACCEL_1_CAL3_3 -- 32-bit IEEE Floating Point Value
:return: ACCEL_1_CAL3_3 as float;
"""
addr = 0x32
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_ACCEL_1_CAL3_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
accel_1_cal3_3, = struct.unpack('>f', payload[0:4])
return reg, accel_1_cal3_3,
@creg_accel_1_cal3_3.setter
def creg_accel_1_cal3_3(self, new_value):
addr = 0x32
self.write_register(addr, new_value)
@property
def creg_accel_1_bias_x(self):
"""
This register stores a bias term for the accelerometer 1 x-axis for bias calibration. This term can be
computed by performing calibrate accelerometers command within the Redshift labs Serial Interface.
Payload structure:
[31:0] : ACCEL_1_BIAS_X -- 32-bit IEEE Floating Point Value
:return: ACCEL_1_BIAS_X as float;
"""
addr = 0x33
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_ACCEL_1_BIAS_X')
reg.raw_value, = struct.unpack('>f', payload[0:4])
accel_1_bias_x, = struct.unpack('>f', payload[0:4])
return reg, accel_1_bias_x,
@creg_accel_1_bias_x.setter
def creg_accel_1_bias_x(self, new_value):
addr = 0x33
self.write_register(addr, new_value)
@property
def creg_accel_1_bias_y(self):
"""
This register stores a bias term for the accelerometer 1 y-axis for bias calibration. This term can be
computed by performing calibrate accelerometers command within the Redshift labs Serial Interface.
Payload structure:
[31:0] : ACCEL_1_BIAS_Y -- 32-bit IEEE Floating Point Value
:return: ACCEL_1_BIAS_Y as float;
"""
addr = 0x34
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_ACCEL_1_BIAS_Y')
reg.raw_value, = struct.unpack('>f', payload[0:4])
accel_1_bias_y, = struct.unpack('>f', payload[0:4])
return reg, accel_1_bias_y,
@creg_accel_1_bias_y.setter
def creg_accel_1_bias_y(self, new_value):
addr = 0x34
self.write_register(addr, new_value)
@property
def creg_accel_1_bias_z(self):
"""
This register stores a bias term for the accelerometer 1 z-axis for bias calibration. This term can be
computed by performing calibrate accelerometers command within the Redshift labs Serial Interface.
Payload structure:
[31:0] : ACCEL_1_BIAS_Z -- 32-bit IEEE Floating Point Value
:return: ACCEL_1_BIAS_Z as float;
"""
addr = 0x35
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='CREG_ACCEL_1_BIAS_Z')
reg.raw_value, = struct.unpack('>f', payload[0:4])
accel_1_bias_z, = struct.unpack('>f', payload[0:4])
return reg, accel_1_bias_z,
@creg_accel_1_bias_z.setter
def creg_accel_1_bias_z(self, new_value):
addr = 0x35
self.write_register(addr, new_value)
@property
def dreg_health(self):
"""
The health register reports the current status of the sensors on the board. Monitoring the health register is
the easiest way to watch for other problems that could affect the behavior of the board, status of the
sensors. The analogous to the health register, the status of the GPS signal can be monitored in the
DREG_GPS_HEALTH
Payload structure:
[8] : OVF -- Overflow bit. This bit is set if the board is attempting to transmit data over the serial port faster than is allowed given the baud-rate. If this bit is set, reduce broadcast rates in the COM_RATES registers.
[7] : ACC1_N -- This bit is set if the sensor detects that the norm of the accelerometer measurement is too far away from 1G to be used (i.e. during aggressive acceleration or high vibration).
[6] : MAG1_N -- This bit is set if the sensor detects that the norm of the magnetometer measurement for the magnetometer 1 is too far away from 1.0 to be trusted. Usually indicates bad calibration, local field distortions, or both.
[5] : MAG2_N -- This bit is set if the sensor detects that the norm of the magnetometer measurement for the magnetometer 2 is too far away from 1.0 to be trusted. Usually indicates bad calibration, local field distortions, or both.
[4] : ACCEL1 -- This bit will be set if the accelerometer 1 fails to initialize on startup.
[3] : GYRO1 -- This bit will be set if the rate gyro 1 fails to initialize on startup.
[2] : GYRO2 -- This bit will be set if the rate gyro 2 fails to initialize on startup.
[1] : MAG1 -- This bit will be set if the magnetometer 1 fails to initialize on startup.
[0] : MAG2 -- This bit will be set if the magnetometer 2 fails to initialize on startup.
:return: OVF as bitField; ACC1_N as bitField; MAG1_N as bitField; MAG2_N as bitField; ACCEL1 as bitField; GYRO1 as bitField; GYRO2 as bitField; MAG1 as bitField; MAG2 as bitField;
"""
addr = 0x55
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_HEALTH')
reg.raw_value, = struct.unpack('>I', payload[0:4])
# find value for OVF bit field
ovf_val = (reg.raw_value >> 8) & 0x0001
ovf_enum = reg.find_field_by(name='OVF').find_enum_entry_by(value=ovf_val)
# find value for ACC1_N bit field
acc1_n_val = (reg.raw_value >> 7) & 0x0001
acc1_n_enum = reg.find_field_by(name='ACC1_N').find_enum_entry_by(value=acc1_n_val)
# find value for MAG1_N bit field
mag1_n_val = (reg.raw_value >> 6) & 0x0001
mag1_n_enum = reg.find_field_by(name='MAG1_N').find_enum_entry_by(value=mag1_n_val)
# find value for MAG2_N bit field
mag2_n_val = (reg.raw_value >> 5) & 0x0001
mag2_n_enum = reg.find_field_by(name='MAG2_N').find_enum_entry_by(value=mag2_n_val)
# find value for ACCEL1 bit field
accel1_val = (reg.raw_value >> 4) & 0x0001
accel1_enum = reg.find_field_by(name='ACCEL1').find_enum_entry_by(value=accel1_val)
# find value for GYRO1 bit field
gyro1_val = (reg.raw_value >> 3) & 0x0001
gyro1_enum = reg.find_field_by(name='GYRO1').find_enum_entry_by(value=gyro1_val)
# find value for GYRO2 bit field
gyro2_val = (reg.raw_value >> 2) & 0x0001
gyro2_enum = reg.find_field_by(name='GYRO2').find_enum_entry_by(value=gyro2_val)
# find value for MAG1 bit field
mag1_val = (reg.raw_value >> 1) & 0x0001
mag1_enum = reg.find_field_by(name='MAG1').find_enum_entry_by(value=mag1_val)
# find value for MAG2 bit field
mag2_val = (reg.raw_value >> 0) & 0x0001
mag2_enum = reg.find_field_by(name='MAG2').find_enum_entry_by(value=mag2_val)
return reg, ovf_enum, acc1_n_enum, mag1_n_enum, mag2_n_enum, accel1_enum, gyro1_enum, gyro2_enum, mag1_enum, mag2_enum
@property
def dreg_gyro_1_raw_xy(self):
"""
Contains raw X and Y axis rate gyro 1 data.
Payload structure:
[31:16] : GYRO_1_RAW_X -- Gyro X (2s complement 16-bit integer)
[15:0] : GYRO_1_RAW_Y -- Gyro Y (2s complement 16-bit integer)
:return: GYRO_1_RAW_X as int16_t; GYRO_1_RAW_Y as int16_t;
"""
addr = 0x56
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_GYRO_1_RAW_XY')
reg.raw_value, = struct.unpack('>I', payload[0:4])
gyro_1_raw_x, gyro_1_raw_y = struct.unpack('>hh', payload[0:4])
return reg, gyro_1_raw_x, gyro_1_raw_y
@property
def dreg_gyro_1_raw_z(self):
"""
Contains raw Z axis rate gyro 1 data.
Payload structure:
[31:16] : GYRO_1_RAW_Z -- Gyro Z (2s complement 16-bit integer)
:return: GYRO_1_RAW_Z as int16_t;
"""
addr = 0x57
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_GYRO_1_RAW_Z')
reg.raw_value, = struct.unpack('>hxx', payload[0:4])
gyro_1_raw_z, = struct.unpack('>hxx', payload[0:4])
return reg, gyro_1_raw_z,
@property
def dreg_gyro_1_raw_time(self):
"""
Contains time at which the last rate gyro 1 data was acquired.
Payload structure:
[31:0] : GYRO_1_RAW_TIME -- 32-bit IEEE Floating Point Value
:return: GYRO_1_RAW_TIME as float;
"""
addr = 0x58
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_GYRO_1_RAW_TIME')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_1_raw_time, = struct.unpack('>f', payload[0:4])
return reg, gyro_1_raw_time,
@property
def dreg_gyro_2_raw_xy(self):
"""
Contains raw X and Y axis rate gyro 2 data.
Payload structure:
[31:16] : GYRO_2_RAW_X -- Gyro X (2s complement 16-bit integer)
[15:0] : GYRO_2_RAW_Y -- Gyro Y (2s complement 16-bit integer)
:return: GYRO_2_RAW_X as int16_t; GYRO_2_RAW_Y as int16_t;
"""
addr = 0x59
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_GYRO_2_RAW_XY')
reg.raw_value, = struct.unpack('>I', payload[0:4])
gyro_2_raw_x, gyro_2_raw_y = struct.unpack('>hh', payload[0:4])
return reg, gyro_2_raw_x, gyro_2_raw_y
@property
def dreg_gyro_2_raw_z(self):
"""
Contains raw Z axis rate gyro 2 data.
Payload structure:
[31:16] : GYRO_2_RAW_Z -- Gyro Z (2s complement 16-bit integer)
:return: GYRO_2_RAW_Z as int16_t;
"""
addr = 0x5A
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_GYRO_2_RAW_Z')
reg.raw_value, = struct.unpack('>hxx', payload[0:4])
gyro_2_raw_z, = struct.unpack('>hxx', payload[0:4])
return reg, gyro_2_raw_z,
@property
def dreg_gyro_2_raw_time(self):
"""
Contains time at which the last rate gyro 2 data was acquired.
Payload structure:
[31:0] : GYRO_2_RAW_TIME -- 32-bit IEEE Floating Point Value
:return: GYRO_2_RAW_TIME as float;
"""
addr = 0x5B
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_GYRO_2_RAW_TIME')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_2_raw_time, = struct.unpack('>f', payload[0:4])
return reg, gyro_2_raw_time,
@property
def dreg_accel_1_raw_xy(self):
"""
Contains raw X and Y axis accelerometer 1 data.
Payload structure:
[31:16] : ACCEL_1_RAW_X -- Accel X (2s complement 16-bit integer)
[15:0] : ACCEL_1_RAW_Y -- Accel Y (2s complement 16-bit integer)
:return: ACCEL_1_RAW_X as int16_t; ACCEL_1_RAW_Y as int16_t;
"""
addr = 0x5C
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_ACCEL_1_RAW_XY')
reg.raw_value, = struct.unpack('>I', payload[0:4])
accel_1_raw_x, accel_1_raw_y = struct.unpack('>hh', payload[0:4])
return reg, accel_1_raw_x, accel_1_raw_y
@property
def dreg_accel_1_raw_z(self):
"""
Contains raw Z axis accelerometer 1 data.
Payload structure:
[31:16] : ACCEL_1_RAW_Z -- Accel Z (2s complement 16-bit integer)
:return: ACCEL_1_RAW_Z as int16_t;
"""
addr = 0x5D
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_ACCEL_1_RAW_Z')
reg.raw_value, = struct.unpack('>hxx', payload[0:4])
accel_1_raw_z, = struct.unpack('>hxx', payload[0:4])
return reg, accel_1_raw_z,
@property
def dreg_accel_1_raw_time(self):
"""
Contains time at which the last raw data sample for the accelerometer 1 was acquired.
Payload structure:
[31:0] : ACCEL_1_RAW_TIME -- 32-bit IEEE Floating Point Value
:return: ACCEL_1_RAW_TIME as float;
"""
addr = 0x5E
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_ACCEL_1_RAW_TIME')
reg.raw_value, = struct.unpack('>f', payload[0:4])
accel_1_raw_time, = struct.unpack('>f', payload[0:4])
return reg, accel_1_raw_time,
@property
def dreg_mag_1_raw_x(self):
"""
Contains raw x axis magnetometer 1 data.
Payload structure:
[31:0] : MAG_1_RAW_X -- 32-bit signed integer value
:return: MAG_1_RAW_X as int32_t;
"""
addr = 0x5F
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_MAG_1_RAW_X')
reg.raw_value, = struct.unpack('>i', payload[0:4])
mag_1_raw_x, = struct.unpack('>i', payload[0:4])
return reg, mag_1_raw_x,
@property
def dreg_mag_1_raw_y(self):
"""
Contains raw y axis magnetometer 1 data.
Payload structure:
[31:0] : MAG_1_RAW_Y -- 32-bit signed integer value
:return: MAG_1_RAW_Y as int32_t;
"""
addr = 0x60
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_MAG_1_RAW_Y')
reg.raw_value, = struct.unpack('>i', payload[0:4])
mag_1_raw_y, = struct.unpack('>i', payload[0:4])
return reg, mag_1_raw_y,
@property
def dreg_mag_1_raw_z(self):
"""
Contains raw z axis magnetometer 1 data.
Payload structure:
[31:0] : MAG_1_RAW_Z -- 32-bit signed integer value
:return: MAG_1_RAW_Z as int32_t;
"""
addr = 0x61
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_MAG_1_RAW_Z')
reg.raw_value, = struct.unpack('>i', payload[0:4])
mag_1_raw_z, = struct.unpack('>i', payload[0:4])
return reg, mag_1_raw_z,
@property
def dreg_mag_1_raw_time(self):
"""
Contains time at which the last magnetometer data from the magnetometer 1 was acquired.
Payload structure:
[31:0] : MAG_1_RAW_TIME -- 32-bit IEEE Floating Point Value
:return: MAG_1_RAW_TIME as float;
"""
addr = 0x62
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_MAG_1_RAW_TIME')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_1_raw_time, = struct.unpack('>f', payload[0:4])
return reg, mag_1_raw_time,
@property
def dreg_mag_2_raw_xy(self):
"""
Contains raw X and Y axis magnetometer 2 data.
Payload structure:
[31:16] : MAG_2_RAW_X -- Magnetometer X (2s complement 16-bit integer)
[15:0] : MAG_2_RAW_Y -- Magnetometer Y (2s complement 16-bit integer)
:return: MAG_2_RAW_X as int16_t; MAG_2_RAW_Y as int16_t;
"""
addr = 0x63
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_MAG_2_RAW_XY')
reg.raw_value, = struct.unpack('>I', payload[0:4])
mag_2_raw_x, mag_2_raw_y = struct.unpack('>hh', payload[0:4])
return reg, mag_2_raw_x, mag_2_raw_y
@property
def dreg_mag_2_raw_z(self):
"""
Contains raw Z axis magnetometer 2 data.
Payload structure:
[31:16] : MAG_2_RAW_Z -- Magnetometer Z (2s complement 16-bit integer)
:return: MAG_2_RAW_Z as int16_t;
"""
addr = 0x64
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_MAG_2_RAW_Z')
reg.raw_value, = struct.unpack('>hxx', payload[0:4])
mag_2_raw_z, = struct.unpack('>hxx', payload[0:4])
return reg, mag_2_raw_z,
@property
def dreg_mag_2_raw_time(self):
"""
Contains time at which the last magnetometer data from the magnetometer 2 was acquired.
Payload structure:
[31:0] : MAG_2_RAW_TIME -- 32-bit IEEE Floating Point Value
:return: MAG_2_RAW_TIME as float;
"""
addr = 0x65
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_MAG_2_RAW_TIME')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_2_raw_time, = struct.unpack('>f', payload[0:4])
return reg, mag_2_raw_time,
@property
def dreg_temperature(self):
"""
Contains the temperature output of the onboard temperature sensor.
Payload structure:
[31:0] : TEMPERATURE -- Temperature in degrees Celcius (32-bit IEEE Floating Point)
:return: TEMPERATURE as float;
"""
addr = 0x66
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_TEMPERATURE')
reg.raw_value, = struct.unpack('>f', payload[0:4])
temperature, = struct.unpack('>f', payload[0:4])
return reg, temperature,
@property
def dreg_temperature_time(self):
"""
Contains time at which the last temperature was acquired.
Payload structure:
[31:0] : TEMPERATURE_TIME -- 32-bit IEEE Floating Point Value
:return: TEMPERATURE_TIME as float;
"""
addr = 0x67
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_TEMPERATURE_TIME')
reg.raw_value, = struct.unpack('>f', payload[0:4])
temperature_time, = struct.unpack('>f', payload[0:4])
return reg, temperature_time,
@property
def dreg_gyro_1_proc_x(self):
"""
Contains the actual measured angular rate from the gyro 1 for the x axis in degrees/sec after calibration has
been applied.
Payload structure:
[31:0] : GYRO_1_PROC_X -- Gyro X in degrees / sec (32-bit IEEE Floating Point Value)
:return: GYRO_1_PROC_X as float;
"""
addr = 0x68
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_GYRO_1_PROC_X')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_1_proc_x, = struct.unpack('>f', payload[0:4])
return reg, gyro_1_proc_x,
@property
def dreg_gyro_1_proc_y(self):
"""
Contains the actual measured angular rate from the gyro 1 for the y axis in degrees/sec after calibration has
been applied.
Payload structure:
[31:0] : GYRO_1_PROC_Y -- Gyro Y in degrees / sec (32-bit IEEE Floating Point Value)
:return: GYRO_1_PROC_Y as float;
"""
addr = 0x69
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_GYRO_1_PROC_Y')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_1_proc_y, = struct.unpack('>f', payload[0:4])
return reg, gyro_1_proc_y,
@property
def dreg_gyro_1_proc_z(self):
"""
Contains the actual measured angular rate from the gyro 1 for the z axis in degrees/sec after calibration has
been applied.
Payload structure:
[31:0] : GYRO_1_PROC_Z -- Gyro Z in degrees / sec (32-bit IEEE Floating Point Value)
:return: GYRO_1_PROC_Z as float;
"""
addr = 0x6A
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_GYRO_1_PROC_Z')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_1_proc_z, = struct.unpack('>f', payload[0:4])
return reg, gyro_1_proc_z,
@property
def dreg_gyro_1_proc_time(self):
"""
Contains the time at which the last rate gyro data from the gyro 1 was measured.
Payload structure:
[31:0] : GYRO_1_PROC_TIME -- Gyro 1 time stamp (32-bit IEEE Floating Point Value)
:return: GYRO_1_PROC_TIME as float;
"""
addr = 0x6B
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_GYRO_1_PROC_TIME')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_1_proc_time, = struct.unpack('>f', payload[0:4])
return reg, gyro_1_proc_time,
@property
def dreg_gyro_2_proc_x(self):
"""
Contains the actual measured angular rate from the gyro 2 for the x axis in degrees/sec after calibration has
been applied.
Payload structure:
[31:0] : GYRO_2_PROC_X -- Gyro X in degrees / sec (32-bit IEEE Floating Point Value)
:return: GYRO_2_PROC_X as float;
"""
addr = 0x6C
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_GYRO_2_PROC_X')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_2_proc_x, = struct.unpack('>f', payload[0:4])
return reg, gyro_2_proc_x,
@property
def dreg_gyro_2_proc_y(self):
"""
Contains the actual measured angular rate from the gyro 2 for the y axis in degrees/sec after calibration has
been applied.
Payload structure:
[31:0] : GYRO_2_PROC_Y -- Gyro Y in degrees / sec (32-bit IEEE Floating Point Value)
:return: GYRO_2_PROC_Y as float;
"""
addr = 0x6D
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_GYRO_2_PROC_Y')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_2_proc_y, = struct.unpack('>f', payload[0:4])
return reg, gyro_2_proc_y,
@property
def dreg_gyro_2_proc_z(self):
"""
Contains the actual measured angular rate from the gyro 2 for the z axis in degrees/sec after calibration has
been applied.
Payload structure:
[31:0] : GYRO_2_PROC_Z -- Gyro Z in degrees / sec (32-bit IEEE Floating Point Value)
:return: GYRO_2_PROC_Z as float;
"""
addr = 0x6E
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_GYRO_2_PROC_Z')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_2_proc_z, = struct.unpack('>f', payload[0:4])
return reg, gyro_2_proc_z,
@property
def dreg_gyro_2_proc_time(self):
"""
Contains the time at which the last rate gyro data from the gyro 2 was measured.
Payload structure:
[31:0] : GYRO_2_PROC_TIME -- Gyro 2 time stamp (32-bit IEEE Floating Point Value)
:return: GYRO_2_PROC_TIME as float;
"""
addr = 0x6F
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_GYRO_2_PROC_TIME')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_2_proc_time, = struct.unpack('>f', payload[0:4])
return reg, gyro_2_proc_time,
@property
def dreg_accel_1_proc_x(self):
"""
Contains the actual measured acceleration from the accelerometer 1 for the x axis in m/s2 after calibration
has been applied.
Payload structure:
[31:0] : ACCEL_1_PROC_X -- Acceleration X in m/s2 (32-bit IEEE Floating Point Value)
:return: ACCEL_1_PROC_X as float;
"""
addr = 0x70
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_ACCEL_1_PROC_X')
reg.raw_value, = struct.unpack('>f', payload[0:4])
accel_1_proc_x, = struct.unpack('>f', payload[0:4])
return reg, accel_1_proc_x,
@property
def dreg_accel_1_proc_y(self):
"""
Contains the actual measured acceleration from the accelerometer 1 for the y axis in m/s2 after calibration
has been applied.
Payload structure:
[31:0] : ACCEL_1_PROC_Y -- Acceleration Y in m/s2 (32-bit IEEE Floating Point Value)
:return: ACCEL_1_PROC_Y as float;
"""
addr = 0x71
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_ACCEL_1_PROC_Y')
reg.raw_value, = struct.unpack('>f', payload[0:4])
accel_1_proc_y, = struct.unpack('>f', payload[0:4])
return reg, accel_1_proc_y,
@property
def dreg_accel_1_proc_z(self):
"""
Contains the actual measured acceleration from the accelerometer 1 for the z axis in m/s2 after calibration
has been applied.
Payload structure:
[31:0] : ACCEL_1_PROC_Z -- Acceleration Z in m/s2 (32-bit IEEE Floating Point Value)
:return: ACCEL_1_PROC_Z as float;
"""
addr = 0x72
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_ACCEL_1_PROC_Z')
reg.raw_value, = struct.unpack('>f', payload[0:4])
accel_1_proc_z, = struct.unpack('>f', payload[0:4])
return reg, accel_1_proc_z,
@property
def dreg_accel_1_proc_time(self):
"""
Contains the time at which the last acceleration data from the accelerometer 1 was measured.
Payload structure:
[31:0] : ACCEL_1_PROC_TIME -- Accelerometer 1 time stamp (32-bit IEEE Floating Point Value)
:return: ACCEL_1_PROC_TIME as float;
"""
addr = 0x73
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_ACCEL_1_PROC_TIME')
reg.raw_value, = struct.unpack('>f', payload[0:4])
accel_1_proc_time, = struct.unpack('>f', payload[0:4])
return reg, accel_1_proc_time,
@property
def dreg_mag_1_proc_x(self):
"""
Contains the actual measured magnetic field from the magnetometer 1 for the x axis in mT after calibration has
been applied.
Payload structure:
[31:0] : MAG_1_PROC_X -- Magnetometer X in mT (32-bit IEEE Floating Point Value)
:return: MAG_1_PROC_X as float;
"""
addr = 0x74
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_MAG_1_PROC_X')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_1_proc_x, = struct.unpack('>f', payload[0:4])
return reg, mag_1_proc_x,
@property
def dreg_mag_1_proc_y(self):
"""
Contains the actual measured magnetic field from the magnetometer 1 for the y axis in mT after calibration has
been applied.
Payload structure:
[31:0] : MAG_1_PROC_Y -- Magnetometer Y in mT (32-bit IEEE Floating Point Value)
:return: MAG_1_PROC_Y as float;
"""
addr = 0x75
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_MAG_1_PROC_Y')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_1_proc_y, = struct.unpack('>f', payload[0:4])
return reg, mag_1_proc_y,
@property
def dreg_mag_1_proc_z(self):
"""
Contains the actual measured magnetic field from the magnetometer 1 for the z axis in mT after calibration has
been applied.
Payload structure:
[31:0] : MAG_1_PROC_Z -- Magnetometer Z in mT (32-bit IEEE Floating Point Value)
:return: MAG_1_PROC_Z as float;
"""
addr = 0x76
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_MAG_1_PROC_Z')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_1_proc_z, = struct.unpack('>f', payload[0:4])
return reg, mag_1_proc_z,
@property
def dreg_mag_1_norm(self):
"""
Contains the L2-norm (magnetic norm) for the measured magnetic field from the magnetometer 1 computed over the
calibrated values.
Payload structure:
[31:0] : MAG_1_NORM -- Magnetic norm (32-bit IEEE Floating Point Value)
:return: MAG_1_NORM as float;
"""
addr = 0x77
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_MAG_1_NORM')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_1_norm, = struct.unpack('>f', payload[0:4])
return reg, mag_1_norm,
@property
def dreg_mag_1_proc_time(self):
"""
Contains the time stamp at which the calibrated magnetometer 1 data was acquired.
Payload structure:
[31:0] : MAG_1_PROC_TIME -- Magnetometer 1 time stamp (32-bit IEEE Floating Point Value)
:return: MAG_1_PROC_TIME as float;
"""
addr = 0x78
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_MAG_1_PROC_TIME')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_1_proc_time, = struct.unpack('>f', payload[0:4])
return reg, mag_1_proc_time,
@property
def dreg_mag_2_proc_x(self):
"""
Contains the actual measured magnetic field from the magnetometer 2 for the x axis in mT after calibration has
been applied.
Payload structure:
[31:0] : MAG_2_PROC_X -- Magnetometer X in mT (32-bit IEEE Floating Point Value)
:return: MAG_2_PROC_X as float;
"""
addr = 0x79
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_MAG_2_PROC_X')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_2_proc_x, = struct.unpack('>f', payload[0:4])
return reg, mag_2_proc_x,
@property
def dreg_mag_2_proc_y(self):
"""
Contains the actual measured magnetic field from the magnetometer 2 for the y axis in mT after calibration has
been applied.
Payload structure:
[31:0] : MAG_2_PROC_Y -- Magnetometer Y in mT (32-bit IEEE Floating Point Value)
:return: MAG_2_PROC_Y as float;
"""
addr = 0x7A
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_MAG_2_PROC_Y')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_2_proc_y, = struct.unpack('>f', payload[0:4])
return reg, mag_2_proc_y,
@property
def dreg_mag_2_proc_z(self):
"""
Contains the actual measured magnetic field from the magnetometer 2 for the z axis in mT after calibration has
been applied.
Payload structure:
[31:0] : MAG_2_PROC_Z -- Magnetometer Z in mT (32-bit IEEE Floating Point Value)
:return: MAG_2_PROC_Z as float;
"""
addr = 0x7B
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_MAG_2_PROC_Z')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_2_proc_z, = struct.unpack('>f', payload[0:4])
return reg, mag_2_proc_z,
@property
def dreg_mag_2_norm(self):
"""
Contains the L2-norm (magnetic norm) for the measured magnetic field from the magnetometer 2 computed over the
calibrated values.
Payload structure:
[31:0] : MAG_2_NORM -- Magnetic norm (32-bit IEEE Floating Point Value)
:return: MAG_2_NORM as float;
"""
addr = 0x7C
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_MAG_2_NORM')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_2_norm, = struct.unpack('>f', payload[0:4])
return reg, mag_2_norm,
@property
def dreg_mag_2_proc_time(self):
"""
Contains the time stamp at which the calibrated magnetometer 2 data was acquired.
Payload structure:
[31:0] : MAG_2_PROC_TIME -- Magnetometer 2 time stamp (32-bit IEEE Floating Point Value)
:return: MAG_2_PROC_TIME as float;
"""
addr = 0x7D
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_MAG_2_PROC_TIME')
reg.raw_value, = struct.unpack('>f', payload[0:4])
mag_2_proc_time, = struct.unpack('>f', payload[0:4])
return reg, mag_2_proc_time,
@property
def dreg_quat_ab(self):
"""
Contains the first two components (a and b) of the estimated quaternion attitude.
Payload structure:
[31:16] : QUAT_A -- First quaternion component. Stored as a 16-bit signed integer. To get the actual value, divide by 29789.09091.
[15:0] : QUAT_B -- Second quaternion component. Stored as a 16-bit signed integer. To get the actual value, divide by 29789.09091.
:return: QUAT_A as int16_t; QUAT_B as int16_t;
"""
addr = 0x7E
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_QUAT_AB')
reg.raw_value, = struct.unpack('>I', payload[0:4])
quat_a, quat_b = struct.unpack('>hh', payload[0:4])
return reg, quat_a, quat_b
@property
def dreg_quat_cd(self):
"""
Contains the second two components (c and d) of the estimated quaternion attitude.
Payload structure:
[31:16] : QUAT_C -- Third quaternion component. Stored as a 16-bit signed integer. To get the actual value, divide by 29789.09091.
[15:0] : QUAT_D -- Fourth quaternion component. Stored as a 16-bit signed integer. To get the actual value, divide by 29789.09091.
:return: QUAT_C as int16_t; QUAT_D as int16_t;
"""
addr = 0x7F
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_QUAT_CD')
reg.raw_value, = struct.unpack('>I', payload[0:4])
quat_c, quat_d = struct.unpack('>hh', payload[0:4])
return reg, quat_c, quat_d
@property
def dreg_quat_time(self):
"""
Contains the time that the quaternion attitude was estimated.
Payload structure:
[31:0] : QUAT_TIME -- Quaternion time (32-bit IEEE Floating Point Value)
:return: QUAT_TIME as float;
"""
addr = 0x80
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_QUAT_TIME')
reg.raw_value, = struct.unpack('>f', payload[0:4])
quat_time, = struct.unpack('>f', payload[0:4])
return reg, quat_time,
@property
def dreg_euler_phi_theta(self):
"""
Contains the pitch and roll angle estimates.
Payload structure:
[31:16] : PHI -- Roll angle. Stored as a 16-bit signed integer. To get the actual value, divide by 91.02222.
[15:0] : THETA -- Pitch angle. Stored as a 16-bit signed integer. To get the actual value, divide by 91.02222.
:return: PHI as int16_t; THETA as int16_t;
"""
addr = 0x81
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_EULER_PHI_THETA')
reg.raw_value, = struct.unpack('>I', payload[0:4])
phi, theta = struct.unpack('>hh', payload[0:4])
return reg, phi, theta
@property
def dreg_euler_psi(self):
"""
Contains the yaw angle estimate.
Payload structure:
[31:16] : PSI -- Yaw angle. Stored as a 16-bit signed integer. To get the actual value, divide by 91.02222.
:return: PSI as int16_t;
"""
addr = 0x82
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_EULER_PSI')
reg.raw_value, = struct.unpack('>hxx', payload[0:4])
psi, = struct.unpack('>hxx', payload[0:4])
return reg, psi,
@property
def dreg_euler_phi_theta_dot(self):
"""
Contains the pitch and roll rate estimates.
Payload structure:
[31:16] : PHI_DOT -- Roll rate. Stored as a 16-bit signed integer. To get the actual value, divide by 16.0.
[15:0] : THETA_DOT -- Pitch rate. Stored as a 16-bit signed integer. To get the actual value, divide by 16.0.
:return: PHI_DOT as int16_t; THETA_DOT as int16_t;
"""
addr = 0x83
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_EULER_PHI_THETA_DOT')
reg.raw_value, = struct.unpack('>I', payload[0:4])
phi_dot, theta_dot = struct.unpack('>hh', payload[0:4])
return reg, phi_dot, theta_dot
@property
def dreg_euler_psi_dot(self):
"""
Contains the yaw rate estimate.
Payload structure:
[31:16] : PSI_DOT -- Yaw rate. Stored as a 16-bit signed integer. To get the actual value, divide by 16.0.
:return: PSI_DOT as int16_t;
"""
addr = 0x84
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_EULER_PSI_DOT')
reg.raw_value, = struct.unpack('>hxx', payload[0:4])
psi_dot, = struct.unpack('>hxx', payload[0:4])
return reg, psi_dot,
@property
def dreg_euler_time(self):
"""
Contains the time that the Euler Angles were estimated.
Payload structure:
[31:0] : EULER_TIME -- Euler time (32-bit IEEE Floating Point Value)
:return: EULER_TIME as float;
"""
addr = 0x85
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_EULER_TIME')
reg.raw_value, = struct.unpack('>f', payload[0:4])
euler_time, = struct.unpack('>f', payload[0:4])
return reg, euler_time,
@property
def dreg_position_north(self):
"""
Contains the measured north position in meters from the latitude specified in CREG_HOME_NORTH.
Payload structure:
[31:0] : POSITION_NORTH -- North Position (32-bit IEEE Floating Point Value)
:return: POSITION_NORTH as float;
"""
addr = 0x86
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_POSITION_NORTH')
reg.raw_value, = struct.unpack('>f', payload[0:4])
position_north, = struct.unpack('>f', payload[0:4])
return reg, position_north,
@property
def dreg_position_east(self):
"""
Contains the measured east position in meters from the longitude specified in CREG_HOME_EAST.
Payload structure:
[31:0] : POSITION_EAST -- East Position (32-bit IEEE Floating Point Value)
:return: POSITION_EAST as float;
"""
addr = 0x87
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_POSITION_EAST')
reg.raw_value, = struct.unpack('>f', payload[0:4])
position_east, = struct.unpack('>f', payload[0:4])
return reg, position_east,
@property
def dreg_position_up(self):
"""
Contains the measured altitude in meters from the altitude specified in CREG_HOME_UP.
Payload structure:
[31:0] : POSITION_UP -- Altitude (32-bit IEEE Floating Point Value)
:return: POSITION_UP as float;
"""
addr = 0x88
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_POSITION_UP')
reg.raw_value, = struct.unpack('>f', payload[0:4])
position_up, = struct.unpack('>f', payload[0:4])
return reg, position_up,
@property
def dreg_position_time(self):
"""
Contains the time at which the position was acquired.
Payload structure:
[31:0] : POSITION_TIME -- Position Time (32-bit IEEE Floating Point Value)
:return: POSITION_TIME as float;
"""
addr = 0x89
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_POSITION_TIME')
reg.raw_value, = struct.unpack('>f', payload[0:4])
position_time, = struct.unpack('>f', payload[0:4])
return reg, position_time,
@property
def dreg_velocity_north(self):
"""
Contains the measured north velocity in m/s.
Payload structure:
[31:0] : VELOCITY_NORTH -- North Velocity (32-bit IEEE Floating Point Value)
:return: VELOCITY_NORTH as float;
"""
addr = 0x8A
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_VELOCITY_NORTH')
reg.raw_value, = struct.unpack('>f', payload[0:4])
velocity_north, = struct.unpack('>f', payload[0:4])
return reg, velocity_north,
@property
def dreg_velocity_east(self):
"""
Contains the measured east velocity in m/s.
Payload structure:
[31:0] : VELOCITY_EAST -- East Velocity (32-bit IEEE Floating Point Value)
:return: VELOCITY_EAST as float;
"""
addr = 0x8B
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_VELOCITY_EAST')
reg.raw_value, = struct.unpack('>f', payload[0:4])
velocity_east, = struct.unpack('>f', payload[0:4])
return reg, velocity_east,
@property
def dreg_velocity_up(self):
"""
Contains the measured altitude velocity in m/s.
Payload structure:
[31:0] : VELOCITY_UP -- Altitude Velocity (32-bit IEEE Floating Point Value)
:return: VELOCITY_UP as float;
"""
addr = 0x8C
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_VELOCITY_UP')
reg.raw_value, = struct.unpack('>f', payload[0:4])
velocity_up, = struct.unpack('>f', payload[0:4])
return reg, velocity_up,
@property
def dreg_velocity_time(self):
"""
Contains the time at which the velocity was measured.
Payload structure:
[31:0] : VELOCITY_TIME -- Velocity time (32-bit IEEE Floating Point Value)
:return: VELOCITY_TIME as float;
"""
addr = 0x8D
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_VELOCITY_TIME')
reg.raw_value, = struct.unpack('>f', payload[0:4])
velocity_time, = struct.unpack('>f', payload[0:4])
return reg, velocity_time,
@property
def dreg_gyro_1_bias_x(self):
"""
Contains the estimated x-axis bias for the gyro 1 in degrees/s.
Payload structure:
[31:0] : GYRO_1_BIAS_X -- Gyro 1 bias X (32-bit IEEE Floating Point Value)
:return: GYRO_1_BIAS_X as float;
"""
addr = 0x8E
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_GYRO_1_BIAS_X')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_1_bias_x, = struct.unpack('>f', payload[0:4])
return reg, gyro_1_bias_x,
@property
def dreg_gyro_1_bias_y(self):
"""
Contains the estimated y-axis bias for the gyro 1 in degrees/s.
Payload structure:
[31:0] : GYRO_1_BIAS_Y -- Gyro 1 bias Y (32-bit IEEE Floating Point Value)
:return: GYRO_1_BIAS_Y as float;
"""
addr = 0x8F
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_GYRO_1_BIAS_Y')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_1_bias_y, = struct.unpack('>f', payload[0:4])
return reg, gyro_1_bias_y,
@property
def dreg_gyro_1_bias_z(self):
"""
Contains the estimated z-axis bias for the gyro 1 in degrees/s.
Payload structure:
[31:0] : GYRO_1_BIAS_Z -- Gyro 1 bias Z (32-bit IEEE Floating Point Value)
:return: GYRO_1_BIAS_Z as float;
"""
addr = 0x90
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_GYRO_1_BIAS_Z')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_1_bias_z, = struct.unpack('>f', payload[0:4])
return reg, gyro_1_bias_z,
@property
def dreg_gyro_2_bias_x(self):
"""
Contains the estimated x-axis bias for the gyro 2 in degrees/s.
Payload structure:
[31:0] : GYRO_2_BIAS_X -- Gyro 2 bias X (32-bit IEEE Floating Point Value)
:return: GYRO_2_BIAS_X as float;
"""
addr = 0x91
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_GYRO_2_BIAS_X')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_2_bias_x, = struct.unpack('>f', payload[0:4])
return reg, gyro_2_bias_x,
@property
def dreg_gyro_2_bias_y(self):
"""
Contains the estimated y-axis bias for the gyro 2 in degrees/s.
Payload structure:
[31:0] : GYRO_2_BIAS_Y -- Gyro 2 bias Y (32-bit IEEE Floating Point Value)
:return: GYRO_2_BIAS_Y as float;
"""
addr = 0x92
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_GYRO_2_BIAS_Y')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_2_bias_y, = struct.unpack('>f', payload[0:4])
return reg, gyro_2_bias_y,
@property
def dreg_gyro_2_bias_z(self):
"""
Contains the estimated z-axis bias for the gyro 2 in degrees/s.
Payload structure:
[31:0] : GYRO_2_BIAS_Z -- Gyro 2 bias Z (32-bit IEEE Floating Point Value)
:return: GYRO_2_BIAS_Z as float;
"""
addr = 0x93
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='DREG_GYRO_2_BIAS_Z')
reg.raw_value, = struct.unpack('>f', payload[0:4])
gyro_2_bias_z, = struct.unpack('>f', payload[0:4])
return reg, gyro_2_bias_z,
@property
def get_fw_build_id(self):
"""
Firmware build identification string: a four byte ASCII character sequence which corresponds to a firmware
series.
Payload structure:
[31:0] : FW_BUILD_ID -- Firmware Build ID string
:return: FW_BUILD_ID as string;
"""
addr = 0xAA
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='GET_FW_BUILD_ID')
reg.raw_value, = struct.unpack('>I', payload[0:4])
fw_build_id = struct.unpack('>4s', payload[0:4])[0].decode('utf-8')
return fw_build_id
@property
def get_fw_build_version(self):
"""
Firmware build version provides the unique identifier of the firmware programmed in the board. A response is
four bytes long and identifies major and minor build version, and the build number.
Payload structure:
[31:24] : VERSION_MAJOR -- 8-bit unsigned integer major version number
[23:16] : VERSION_MINOR -- 8-bit unsigned integer minor version number
[15:0] : BUILD_ID -- 16-bit unsigned integer build ID number
:return: VERSION_MAJOR as uint8_t; VERSION_MINOR as uint8_t; BUILD_ID as uint16_t;
"""
addr = 0xAB
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='GET_FW_BUILD_VERSION')
reg.raw_value, = struct.unpack('>I', payload[0:4])
version_major, version_minor, build_id = struct.unpack('>BBH', payload[0:4])
return reg, version_major, version_minor, build_id
@property
def flash_commit(self):
raise RuntimeError('flash_commit has no getter! The register flash_commit is write-only!')
@flash_commit.setter
def flash_commit(self, new_value):
addr = 0xAC
self.write_register(addr, new_value)
@property
def reset_to_factory(self):
raise RuntimeError('reset_to_factory has no getter! The register reset_to_factory is write-only!')
@reset_to_factory.setter
def reset_to_factory(self, new_value):
addr = 0xAD
self.write_register(addr, new_value)
@property
def zero_gyros(self):
raise RuntimeError('zero_gyros has no getter! The register zero_gyros is write-only!')
@zero_gyros.setter
def zero_gyros(self, new_value):
addr = 0xAE
self.write_register(addr, new_value)
@property
def set_home_position(self):
raise RuntimeError('set_home_position has no getter! The register set_home_position is write-only!')
@set_home_position.setter
def set_home_position(self, new_value):
addr = 0xB0
self.write_register(addr, new_value)
@property
def set_mag_reference(self):
raise RuntimeError('set_mag_reference has no getter! The register set_mag_reference is write-only!')
@set_mag_reference.setter
def set_mag_reference(self, new_value):
addr = 0xB1
self.write_register(addr, new_value)
@property
def calibrate_accelerometers(self):
raise RuntimeError('calibrate_accelerometers has no getter! The register calibrate_accelerometers is write-only!')
@calibrate_accelerometers.setter
def calibrate_accelerometers(self, new_value):
addr = 0xB2
self.write_register(addr, new_value)
@property
def reset_fusion(self):
raise RuntimeError('reset_fusion has no getter! The register reset_fusion is write-only!')
@reset_fusion.setter
def reset_fusion(self, new_value):
addr = 0xB3
self.write_register(addr, new_value)
@property
def enable_zupt(self):
raise RuntimeError('enable_zupt has no getter! The register enable_zupt is write-only!')
@enable_zupt.setter
def enable_zupt(self, new_value):
addr = 0xB4
self.write_register(addr, new_value)
@property
def euler_mode(self):
raise RuntimeError('euler_mode has no getter! The register euler_mode is write-only!')
@euler_mode.setter
def euler_mode(self, new_value):
addr = 0xB5
self.write_register(addr, new_value)
@property
def quaternion_mode(self):
raise RuntimeError('quaternion_mode has no getter! The register quaternion_mode is write-only!')
@quaternion_mode.setter
def quaternion_mode(self, new_value):
addr = 0xB6
self.write_register(addr, new_value)
@property
def enable_rt_calibration(self):
raise RuntimeError('enable_rt_calibration has no getter! The register enable_rt_calibration is write-only!')
@enable_rt_calibration.setter
def enable_rt_calibration(self, new_value):
addr = 0xB7
self.write_register(addr, new_value)
@property
def en_mag_anomaly_detection(self):
raise RuntimeError('en_mag_anomaly_detection has no getter! The register en_mag_anomaly_detection is write-only!')
@en_mag_anomaly_detection.setter
def en_mag_anomaly_detection(self, new_value):
addr = 0xB8
self.write_register(addr, new_value)
@property
def run_self_tests(self):
raise RuntimeError('run_self_tests has no getter! The register run_self_tests is write-only!')
@run_self_tests.setter
def run_self_tests(self, new_value):
addr = 0xB9
self.write_register(addr, new_value)
@property
def enable_external_event(self):
raise RuntimeError('enable_external_event has no getter! The register enable_external_event is write-only!')
@enable_external_event.setter
def enable_external_event(self, new_value):
addr = 0xBA
self.write_register(addr, new_value)
@property
def enable_gnns_fusion(self):
raise RuntimeError('enable_gnns_fusion has no getter! The register enable_gnns_fusion is write-only!')
@enable_gnns_fusion.setter
def enable_gnns_fusion(self, new_value):
addr = 0xBB
self.write_register(addr, new_value)
@property
def enable_usr_euler_output(self):
raise RuntimeError('enable_usr_euler_output has no getter! The register enable_usr_euler_output is write-only!')
@enable_usr_euler_output.setter
def enable_usr_euler_output(self, new_value):
addr = 0xBC
self.write_register(addr, new_value)
@property
def enable_dead_reckoning(self):
raise RuntimeError('enable_dead_reckoning has no getter! The register enable_dead_reckoning is write-only!')
@enable_dead_reckoning.setter
def enable_dead_reckoning(self, new_value):
addr = 0xBD
self.write_register(addr, new_value)
@property
def enable_heave_sway_surge(self):
raise RuntimeError('enable_heave_sway_surge has no getter! The register enable_heave_sway_surge is write-only!')
@enable_heave_sway_surge.setter
def enable_heave_sway_surge(self, new_value):
addr = 0xBE
self.write_register(addr, new_value)
@property
def enable_ukf(self):
raise RuntimeError('enable_ukf has no getter! The register enable_ukf is write-only!')
@enable_ukf.setter
def enable_ukf(self, new_value):
addr = 0xBF
self.write_register(addr, new_value)
@property
def board_unique_id_1(self):
"""
First 32-bits of the 64-bits of the board unique identifier. Bits of the unique identifier cannot be modified
by the user.
Payload structure:
[31:0] : BOARD_UNIQUE_ID_1_BITS -- Board unique ID bits
:return: BOARD_UNIQUE_ID_1_BITS as uint32_t;
"""
addr = 0xFD
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='BOARD_UNIQUE_ID_1')
reg.raw_value, = struct.unpack('>I', payload[0:4])
board_unique_id_1_bits, = struct.unpack('>I', payload[0:4])
return reg, board_unique_id_1_bits,
@property
def board_unique_id_2(self):
"""
Last 32-bits of the 64-bits of the board unique identifier. Bits of the unique identifier cannot be modified
by the user.
Payload structure:
[31:0] : BOARD_UNIQUE_ID_2_BITS -- Board unique ID bits
:return: BOARD_UNIQUE_ID_2_BITS as uint32_t;
"""
addr = 0xFE
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='BOARD_UNIQUE_ID_2')
reg.raw_value, = struct.unpack('>I', payload[0:4])
board_unique_id_2_bits, = struct.unpack('>I', payload[0:4])
return reg, board_unique_id_2_bits,
@property
def protocol_version(self):
"""
String version of the protocol.
Payload structure:
[31:0] : PROTOCOL_VERSION_STR -- Protocol version string
:return: PROTOCOL_VERSION_STR as string;
"""
addr = 0xFF
ok, payload = self.read_register(addr)
if ok:
reg = self.svd_parser.find_register_by(name='PROTOCOL_VERSION')
reg.raw_value, = struct.unpack('>I', payload[0:4])
protocol_version_str = struct.unpack('>4s', payload[0:4])[0].decode('utf-8')
return protocol_version_str
@property
def hidden_gyro_1_variance(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_1_VARIANCE -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_1_VARIANCE as float;
"""
addr = 0x00
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_1_VARIANCE')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_1_variance, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_1_variance,
@hidden_gyro_1_variance.setter
def hidden_gyro_1_variance(self, new_value):
addr = 0x00
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gyro_2_variance(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_2_VARIANCE -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_2_VARIANCE as float;
"""
addr = 0x01
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_2_VARIANCE')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_2_variance, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_2_variance,
@hidden_gyro_2_variance.setter
def hidden_gyro_2_variance(self, new_value):
addr = 0x01
self.write_register(addr, new_value, hidden=True)
@property
def hidden_accel_1_variance(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_ACCEL_1_VARIANCE -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_ACCEL_1_VARIANCE as float;
"""
addr = 0x02
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_ACCEL_1_VARIANCE')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_accel_1_variance, = struct.unpack('>f', payload[0:4])
return reg, hidden_accel_1_variance,
@hidden_accel_1_variance.setter
def hidden_accel_1_variance(self, new_value):
addr = 0x02
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_1_variance(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_1_VARIANCE -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_1_VARIANCE as float;
"""
addr = 0x03
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_1_VARIANCE')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_1_variance, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_1_variance,
@hidden_mag_1_variance.setter
def hidden_mag_1_variance(self, new_value):
addr = 0x03
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_2_variance(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_2_VARIANCE -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_2_VARIANCE as float;
"""
addr = 0x04
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_2_VARIANCE')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_2_variance, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_2_variance,
@hidden_mag_2_variance.setter
def hidden_mag_2_variance(self, new_value):
addr = 0x04
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gps_course_variance(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GPS_COURSE_VARIANCE -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GPS_COURSE_VARIANCE as float;
"""
addr = 0x05
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GPS_COURSE_VARIANCE')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gps_course_variance, = struct.unpack('>f', payload[0:4])
return reg, hidden_gps_course_variance,
@hidden_gps_course_variance.setter
def hidden_gps_course_variance(self, new_value):
addr = 0x05
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gps_position_variance(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GPS_POSITION_VARIANCE -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GPS_POSITION_VARIANCE as float;
"""
addr = 0x06
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GPS_POSITION_VARIANCE')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gps_position_variance, = struct.unpack('>f', payload[0:4])
return reg, hidden_gps_position_variance,
@hidden_gps_position_variance.setter
def hidden_gps_position_variance(self, new_value):
addr = 0x06
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gps_velocity_variance(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GPS_VELOCITY_VARIANCE -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GPS_VELOCITY_VARIANCE as float;
"""
addr = 0x07
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GPS_VELOCITY_VARIANCE')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gps_velocity_variance, = struct.unpack('>f', payload[0:4])
return reg, hidden_gps_velocity_variance,
@hidden_gps_velocity_variance.setter
def hidden_gps_velocity_variance(self, new_value):
addr = 0x07
self.write_register(addr, new_value, hidden=True)
@property
def hidden_static_press_variance(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_STATIC_PRESS_VARIANCE -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_STATIC_PRESS_VARIANCE as float;
"""
addr = 0x08
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_STATIC_PRESS_VARIANCE')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_static_press_variance, = struct.unpack('>f', payload[0:4])
return reg, hidden_static_press_variance,
@hidden_static_press_variance.setter
def hidden_static_press_variance(self, new_value):
addr = 0x08
self.write_register(addr, new_value, hidden=True)
@property
def hidden_diff_press_variance(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_DIFF_PRESS_VARIANCE -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_DIFF_PRESS_VARIANCE as float;
"""
addr = 0x09
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_DIFF_PRESS_VARIANCE')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_diff_press_variance, = struct.unpack('>f', payload[0:4])
return reg, hidden_diff_press_variance,
@hidden_diff_press_variance.setter
def hidden_diff_press_variance(self, new_value):
addr = 0x09
self.write_register(addr, new_value, hidden=True)
@property
def hidden_q_uvw(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_Q_UVW -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_Q_UVW as float;
"""
addr = 0x0A
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_Q_UVW')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_q_uvw, = struct.unpack('>f', payload[0:4])
return reg, hidden_q_uvw,
@hidden_q_uvw.setter
def hidden_q_uvw(self, new_value):
addr = 0x0A
self.write_register(addr, new_value, hidden=True)
@property
def hidden_q_quaternion(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_Q_QUATERNION -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_Q_QUATERNION as float;
"""
addr = 0x0B
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_Q_QUATERNION')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_q_quaternion, = struct.unpack('>f', payload[0:4])
return reg, hidden_q_quaternion,
@hidden_q_quaternion.setter
def hidden_q_quaternion(self, new_value):
addr = 0x0B
self.write_register(addr, new_value, hidden=True)
@property
def hidden_q_gps_position(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_Q_GPS_POSITION -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_Q_GPS_POSITION as float;
"""
addr = 0x0C
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_Q_GPS_POSITION')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_q_gps_position, = struct.unpack('>f', payload[0:4])
return reg, hidden_q_gps_position,
@hidden_q_gps_position.setter
def hidden_q_gps_position(self, new_value):
addr = 0x0C
self.write_register(addr, new_value, hidden=True)
@property
def hidden_q_bias(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_Q_BIAS -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_Q_BIAS as float;
"""
addr = 0x0D
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_Q_BIAS')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_q_bias, = struct.unpack('>f', payload[0:4])
return reg, hidden_q_bias,
@hidden_q_bias.setter
def hidden_q_bias(self, new_value):
addr = 0x0D
self.write_register(addr, new_value, hidden=True)
@property
def hidden_q_euler_angles(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_Q_EULER_ANGLES -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_Q_EULER_ANGLES as float;
"""
addr = 0x0E
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_Q_EULER_ANGLES')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_q_euler_angles, = struct.unpack('>f', payload[0:4])
return reg, hidden_q_euler_angles,
@hidden_q_euler_angles.setter
def hidden_q_euler_angles(self, new_value):
addr = 0x0E
self.write_register(addr, new_value, hidden=True)
@property
def hidden_low_vg_accel_noise_factor(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_LOW_VG_ACCEL_NOISE_FACTOR -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_LOW_VG_ACCEL_NOISE_FACTOR as float;
"""
addr = 0x0F
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_LOW_VG_ACCEL_NOISE_FACTOR')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_low_vg_accel_noise_factor, = struct.unpack('>f', payload[0:4])
return reg, hidden_low_vg_accel_noise_factor,
@hidden_low_vg_accel_noise_factor.setter
def hidden_low_vg_accel_noise_factor(self, new_value):
addr = 0x0F
self.write_register(addr, new_value, hidden=True)
@property
def hidden_lpf_tau_groundspeed(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_LPF_TAU_GROUNDSPEED -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_LPF_TAU_GROUNDSPEED as float;
"""
addr = 0x10
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_LPF_TAU_GROUNDSPEED')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_lpf_tau_groundspeed, = struct.unpack('>f', payload[0:4])
return reg, hidden_lpf_tau_groundspeed,
@hidden_lpf_tau_groundspeed.setter
def hidden_lpf_tau_groundspeed(self, new_value):
addr = 0x10
self.write_register(addr, new_value, hidden=True)
@property
def hidden_lpf_tau_gyro_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_LPF_TAU_GYRO_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_LPF_TAU_GYRO_1 as float;
"""
addr = 0x11
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_LPF_TAU_GYRO_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_lpf_tau_gyro_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_lpf_tau_gyro_1,
@hidden_lpf_tau_gyro_1.setter
def hidden_lpf_tau_gyro_1(self, new_value):
addr = 0x11
self.write_register(addr, new_value, hidden=True)
@property
def hidden_lpf_tau_gyro_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_LPF_TAU_GYRO_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_LPF_TAU_GYRO_2 as float;
"""
addr = 0x12
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_LPF_TAU_GYRO_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_lpf_tau_gyro_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_lpf_tau_gyro_2,
@hidden_lpf_tau_gyro_2.setter
def hidden_lpf_tau_gyro_2(self, new_value):
addr = 0x12
self.write_register(addr, new_value, hidden=True)
@property
def hidden_lpf_tau_accel_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_LPF_TAU_ACCEL_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_LPF_TAU_ACCEL_1 as float;
"""
addr = 0x13
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_LPF_TAU_ACCEL_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_lpf_tau_accel_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_lpf_tau_accel_1,
@hidden_lpf_tau_accel_1.setter
def hidden_lpf_tau_accel_1(self, new_value):
addr = 0x13
self.write_register(addr, new_value, hidden=True)
@property
def hidden_lpf_tau_mag_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_LPF_TAU_MAG_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_LPF_TAU_MAG_1 as float;
"""
addr = 0x14
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_LPF_TAU_MAG_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_lpf_tau_mag_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_lpf_tau_mag_1,
@hidden_lpf_tau_mag_1.setter
def hidden_lpf_tau_mag_1(self, new_value):
addr = 0x14
self.write_register(addr, new_value, hidden=True)
@property
def hidden_lpf_tau_mag_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_LPF_TAU_MAG_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_LPF_TAU_MAG_2 as float;
"""
addr = 0x15
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_LPF_TAU_MAG_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_lpf_tau_mag_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_lpf_tau_mag_2,
@hidden_lpf_tau_mag_2.setter
def hidden_lpf_tau_mag_2(self, new_value):
addr = 0x15
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_bias_x_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_BIAS_X_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_BIAS_X_POW_0 as float;
"""
addr = 0x16
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_BIAS_X_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_bias_x_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_bias_x_pow_0,
@hidden_c_gyro_1_bias_x_pow_0.setter
def hidden_c_gyro_1_bias_x_pow_0(self, new_value):
addr = 0x16
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_bias_x_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_BIAS_X_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_BIAS_X_POW_1 as float;
"""
addr = 0x17
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_BIAS_X_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_bias_x_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_bias_x_pow_1,
@hidden_c_gyro_1_bias_x_pow_1.setter
def hidden_c_gyro_1_bias_x_pow_1(self, new_value):
addr = 0x17
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_bias_x_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_BIAS_X_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_BIAS_X_POW_2 as float;
"""
addr = 0x18
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_BIAS_X_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_bias_x_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_bias_x_pow_2,
@hidden_c_gyro_1_bias_x_pow_2.setter
def hidden_c_gyro_1_bias_x_pow_2(self, new_value):
addr = 0x18
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_bias_x_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_BIAS_X_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_BIAS_X_POW_3 as float;
"""
addr = 0x19
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_BIAS_X_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_bias_x_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_bias_x_pow_3,
@hidden_c_gyro_1_bias_x_pow_3.setter
def hidden_c_gyro_1_bias_x_pow_3(self, new_value):
addr = 0x19
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_bias_y_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_BIAS_Y_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_BIAS_Y_POW_0 as float;
"""
addr = 0x1A
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_BIAS_Y_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_bias_y_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_bias_y_pow_0,
@hidden_c_gyro_1_bias_y_pow_0.setter
def hidden_c_gyro_1_bias_y_pow_0(self, new_value):
addr = 0x1A
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_bias_y_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_BIAS_Y_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_BIAS_Y_POW_1 as float;
"""
addr = 0x1B
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_BIAS_Y_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_bias_y_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_bias_y_pow_1,
@hidden_c_gyro_1_bias_y_pow_1.setter
def hidden_c_gyro_1_bias_y_pow_1(self, new_value):
addr = 0x1B
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_bias_y_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_BIAS_Y_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_BIAS_Y_POW_2 as float;
"""
addr = 0x1C
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_BIAS_Y_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_bias_y_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_bias_y_pow_2,
@hidden_c_gyro_1_bias_y_pow_2.setter
def hidden_c_gyro_1_bias_y_pow_2(self, new_value):
addr = 0x1C
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_bias_y_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_BIAS_Y_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_BIAS_Y_POW_3 as float;
"""
addr = 0x1D
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_BIAS_Y_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_bias_y_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_bias_y_pow_3,
@hidden_c_gyro_1_bias_y_pow_3.setter
def hidden_c_gyro_1_bias_y_pow_3(self, new_value):
addr = 0x1D
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_bias_z_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_BIAS_Z_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_BIAS_Z_POW_0 as float;
"""
addr = 0x1E
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_BIAS_Z_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_bias_z_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_bias_z_pow_0,
@hidden_c_gyro_1_bias_z_pow_0.setter
def hidden_c_gyro_1_bias_z_pow_0(self, new_value):
addr = 0x1E
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_bias_z_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_BIAS_Z_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_BIAS_Z_POW_1 as float;
"""
addr = 0x1F
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_BIAS_Z_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_bias_z_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_bias_z_pow_1,
@hidden_c_gyro_1_bias_z_pow_1.setter
def hidden_c_gyro_1_bias_z_pow_1(self, new_value):
addr = 0x1F
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_bias_z_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_BIAS_Z_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_BIAS_Z_POW_2 as float;
"""
addr = 0x20
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_BIAS_Z_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_bias_z_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_bias_z_pow_2,
@hidden_c_gyro_1_bias_z_pow_2.setter
def hidden_c_gyro_1_bias_z_pow_2(self, new_value):
addr = 0x20
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_bias_z_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_BIAS_Z_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_BIAS_Z_POW_3 as float;
"""
addr = 0x21
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_BIAS_Z_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_bias_z_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_bias_z_pow_3,
@hidden_c_gyro_1_bias_z_pow_3.setter
def hidden_c_gyro_1_bias_z_pow_3(self, new_value):
addr = 0x21
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_scale_x_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_SCALE_X_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_SCALE_X_POW_0 as float;
"""
addr = 0x22
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_SCALE_X_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_scale_x_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_scale_x_pow_0,
@hidden_c_gyro_1_scale_x_pow_0.setter
def hidden_c_gyro_1_scale_x_pow_0(self, new_value):
addr = 0x22
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_scale_x_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_SCALE_X_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_SCALE_X_POW_1 as float;
"""
addr = 0x23
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_SCALE_X_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_scale_x_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_scale_x_pow_1,
@hidden_c_gyro_1_scale_x_pow_1.setter
def hidden_c_gyro_1_scale_x_pow_1(self, new_value):
addr = 0x23
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_scale_x_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_SCALE_X_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_SCALE_X_POW_2 as float;
"""
addr = 0x24
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_SCALE_X_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_scale_x_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_scale_x_pow_2,
@hidden_c_gyro_1_scale_x_pow_2.setter
def hidden_c_gyro_1_scale_x_pow_2(self, new_value):
addr = 0x24
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_scale_x_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_SCALE_X_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_SCALE_X_POW_3 as float;
"""
addr = 0x25
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_SCALE_X_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_scale_x_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_scale_x_pow_3,
@hidden_c_gyro_1_scale_x_pow_3.setter
def hidden_c_gyro_1_scale_x_pow_3(self, new_value):
addr = 0x25
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_scale_y_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_SCALE_Y_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_SCALE_Y_POW_0 as float;
"""
addr = 0x26
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_SCALE_Y_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_scale_y_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_scale_y_pow_0,
@hidden_c_gyro_1_scale_y_pow_0.setter
def hidden_c_gyro_1_scale_y_pow_0(self, new_value):
addr = 0x26
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_scale_y_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_SCALE_Y_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_SCALE_Y_POW_1 as float;
"""
addr = 0x27
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_SCALE_Y_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_scale_y_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_scale_y_pow_1,
@hidden_c_gyro_1_scale_y_pow_1.setter
def hidden_c_gyro_1_scale_y_pow_1(self, new_value):
addr = 0x27
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_scale_y_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_SCALE_Y_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_SCALE_Y_POW_2 as float;
"""
addr = 0x28
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_SCALE_Y_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_scale_y_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_scale_y_pow_2,
@hidden_c_gyro_1_scale_y_pow_2.setter
def hidden_c_gyro_1_scale_y_pow_2(self, new_value):
addr = 0x28
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_scale_y_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_SCALE_Y_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_SCALE_Y_POW_3 as float;
"""
addr = 0x29
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_SCALE_Y_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_scale_y_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_scale_y_pow_3,
@hidden_c_gyro_1_scale_y_pow_3.setter
def hidden_c_gyro_1_scale_y_pow_3(self, new_value):
addr = 0x29
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_scale_z_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_SCALE_Z_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_SCALE_Z_POW_0 as float;
"""
addr = 0x2A
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_SCALE_Z_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_scale_z_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_scale_z_pow_0,
@hidden_c_gyro_1_scale_z_pow_0.setter
def hidden_c_gyro_1_scale_z_pow_0(self, new_value):
addr = 0x2A
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_scale_z_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_SCALE_Z_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_SCALE_Z_POW_1 as float;
"""
addr = 0x2B
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_SCALE_Z_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_scale_z_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_scale_z_pow_1,
@hidden_c_gyro_1_scale_z_pow_1.setter
def hidden_c_gyro_1_scale_z_pow_1(self, new_value):
addr = 0x2B
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_scale_z_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_SCALE_Z_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_SCALE_Z_POW_2 as float;
"""
addr = 0x2C
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_SCALE_Z_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_scale_z_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_scale_z_pow_2,
@hidden_c_gyro_1_scale_z_pow_2.setter
def hidden_c_gyro_1_scale_z_pow_2(self, new_value):
addr = 0x2C
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_1_scale_z_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_1_SCALE_Z_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_1_SCALE_Z_POW_3 as float;
"""
addr = 0x2D
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_1_SCALE_Z_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_1_scale_z_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_1_scale_z_pow_3,
@hidden_c_gyro_1_scale_z_pow_3.setter
def hidden_c_gyro_1_scale_z_pow_3(self, new_value):
addr = 0x2D
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gyro_1_alignment1_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_1_ALIGNMENT1_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_1_ALIGNMENT1_1 as float;
"""
addr = 0x2E
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_1_ALIGNMENT1_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_1_alignment1_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_1_alignment1_1,
@hidden_gyro_1_alignment1_1.setter
def hidden_gyro_1_alignment1_1(self, new_value):
addr = 0x2E
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gyro_1_alignment1_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_1_ALIGNMENT1_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_1_ALIGNMENT1_2 as float;
"""
addr = 0x2F
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_1_ALIGNMENT1_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_1_alignment1_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_1_alignment1_2,
@hidden_gyro_1_alignment1_2.setter
def hidden_gyro_1_alignment1_2(self, new_value):
addr = 0x2F
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gyro_1_alignment1_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_1_ALIGNMENT1_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_1_ALIGNMENT1_3 as float;
"""
addr = 0x30
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_1_ALIGNMENT1_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_1_alignment1_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_1_alignment1_3,
@hidden_gyro_1_alignment1_3.setter
def hidden_gyro_1_alignment1_3(self, new_value):
addr = 0x30
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gyro_1_alignment2_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_1_ALIGNMENT2_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_1_ALIGNMENT2_1 as float;
"""
addr = 0x31
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_1_ALIGNMENT2_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_1_alignment2_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_1_alignment2_1,
@hidden_gyro_1_alignment2_1.setter
def hidden_gyro_1_alignment2_1(self, new_value):
addr = 0x31
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gyro_1_alignment2_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_1_ALIGNMENT2_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_1_ALIGNMENT2_2 as float;
"""
addr = 0x32
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_1_ALIGNMENT2_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_1_alignment2_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_1_alignment2_2,
@hidden_gyro_1_alignment2_2.setter
def hidden_gyro_1_alignment2_2(self, new_value):
addr = 0x32
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gyro_1_alignment2_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_1_ALIGNMENT2_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_1_ALIGNMENT2_3 as float;
"""
addr = 0x33
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_1_ALIGNMENT2_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_1_alignment2_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_1_alignment2_3,
@hidden_gyro_1_alignment2_3.setter
def hidden_gyro_1_alignment2_3(self, new_value):
addr = 0x33
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gyro_1_alignment3_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_1_ALIGNMENT3_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_1_ALIGNMENT3_1 as float;
"""
addr = 0x34
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_1_ALIGNMENT3_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_1_alignment3_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_1_alignment3_1,
@hidden_gyro_1_alignment3_1.setter
def hidden_gyro_1_alignment3_1(self, new_value):
addr = 0x34
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gyro_1_alignment3_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_1_ALIGNMENT3_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_1_ALIGNMENT3_2 as float;
"""
addr = 0x35
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_1_ALIGNMENT3_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_1_alignment3_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_1_alignment3_2,
@hidden_gyro_1_alignment3_2.setter
def hidden_gyro_1_alignment3_2(self, new_value):
addr = 0x35
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gyro_1_alignment3_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_1_ALIGNMENT3_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_1_ALIGNMENT3_3 as float;
"""
addr = 0x36
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_1_ALIGNMENT3_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_1_alignment3_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_1_alignment3_3,
@hidden_gyro_1_alignment3_3.setter
def hidden_gyro_1_alignment3_3(self, new_value):
addr = 0x36
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_bias_x_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_BIAS_X_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_BIAS_X_POW_0 as float;
"""
addr = 0x37
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_BIAS_X_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_bias_x_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_bias_x_pow_0,
@hidden_c_gyro_2_bias_x_pow_0.setter
def hidden_c_gyro_2_bias_x_pow_0(self, new_value):
addr = 0x37
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_bias_x_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_BIAS_X_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_BIAS_X_POW_1 as float;
"""
addr = 0x38
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_BIAS_X_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_bias_x_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_bias_x_pow_1,
@hidden_c_gyro_2_bias_x_pow_1.setter
def hidden_c_gyro_2_bias_x_pow_1(self, new_value):
addr = 0x38
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_bias_x_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_BIAS_X_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_BIAS_X_POW_2 as float;
"""
addr = 0x39
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_BIAS_X_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_bias_x_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_bias_x_pow_2,
@hidden_c_gyro_2_bias_x_pow_2.setter
def hidden_c_gyro_2_bias_x_pow_2(self, new_value):
addr = 0x39
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_bias_x_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_BIAS_X_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_BIAS_X_POW_3 as float;
"""
addr = 0x3A
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_BIAS_X_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_bias_x_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_bias_x_pow_3,
@hidden_c_gyro_2_bias_x_pow_3.setter
def hidden_c_gyro_2_bias_x_pow_3(self, new_value):
addr = 0x3A
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_bias_y_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_BIAS_Y_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_BIAS_Y_POW_0 as float;
"""
addr = 0x3B
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_BIAS_Y_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_bias_y_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_bias_y_pow_0,
@hidden_c_gyro_2_bias_y_pow_0.setter
def hidden_c_gyro_2_bias_y_pow_0(self, new_value):
addr = 0x3B
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_bias_y_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_BIAS_Y_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_BIAS_Y_POW_1 as float;
"""
addr = 0x3C
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_BIAS_Y_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_bias_y_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_bias_y_pow_1,
@hidden_c_gyro_2_bias_y_pow_1.setter
def hidden_c_gyro_2_bias_y_pow_1(self, new_value):
addr = 0x3C
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_bias_y_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_BIAS_Y_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_BIAS_Y_POW_2 as float;
"""
addr = 0x3D
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_BIAS_Y_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_bias_y_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_bias_y_pow_2,
@hidden_c_gyro_2_bias_y_pow_2.setter
def hidden_c_gyro_2_bias_y_pow_2(self, new_value):
addr = 0x3D
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_bias_y_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_BIAS_Y_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_BIAS_Y_POW_3 as float;
"""
addr = 0x3E
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_BIAS_Y_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_bias_y_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_bias_y_pow_3,
@hidden_c_gyro_2_bias_y_pow_3.setter
def hidden_c_gyro_2_bias_y_pow_3(self, new_value):
addr = 0x3E
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_bias_z_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_BIAS_Z_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_BIAS_Z_POW_0 as float;
"""
addr = 0x3F
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_BIAS_Z_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_bias_z_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_bias_z_pow_0,
@hidden_c_gyro_2_bias_z_pow_0.setter
def hidden_c_gyro_2_bias_z_pow_0(self, new_value):
addr = 0x3F
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_bias_z_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_BIAS_Z_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_BIAS_Z_POW_1 as float;
"""
addr = 0x40
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_BIAS_Z_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_bias_z_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_bias_z_pow_1,
@hidden_c_gyro_2_bias_z_pow_1.setter
def hidden_c_gyro_2_bias_z_pow_1(self, new_value):
addr = 0x40
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_bias_z_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_BIAS_Z_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_BIAS_Z_POW_2 as float;
"""
addr = 0x41
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_BIAS_Z_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_bias_z_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_bias_z_pow_2,
@hidden_c_gyro_2_bias_z_pow_2.setter
def hidden_c_gyro_2_bias_z_pow_2(self, new_value):
addr = 0x41
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_bias_z_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_BIAS_Z_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_BIAS_Z_POW_3 as float;
"""
addr = 0x42
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_BIAS_Z_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_bias_z_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_bias_z_pow_3,
@hidden_c_gyro_2_bias_z_pow_3.setter
def hidden_c_gyro_2_bias_z_pow_3(self, new_value):
addr = 0x42
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_scale_x_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_SCALE_X_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_SCALE_X_POW_0 as float;
"""
addr = 0x43
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_SCALE_X_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_scale_x_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_scale_x_pow_0,
@hidden_c_gyro_2_scale_x_pow_0.setter
def hidden_c_gyro_2_scale_x_pow_0(self, new_value):
addr = 0x43
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_scale_x_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_SCALE_X_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_SCALE_X_POW_1 as float;
"""
addr = 0x44
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_SCALE_X_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_scale_x_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_scale_x_pow_1,
@hidden_c_gyro_2_scale_x_pow_1.setter
def hidden_c_gyro_2_scale_x_pow_1(self, new_value):
addr = 0x44
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_scale_x_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_SCALE_X_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_SCALE_X_POW_2 as float;
"""
addr = 0x45
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_SCALE_X_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_scale_x_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_scale_x_pow_2,
@hidden_c_gyro_2_scale_x_pow_2.setter
def hidden_c_gyro_2_scale_x_pow_2(self, new_value):
addr = 0x45
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_scale_x_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_SCALE_X_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_SCALE_X_POW_3 as float;
"""
addr = 0x46
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_SCALE_X_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_scale_x_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_scale_x_pow_3,
@hidden_c_gyro_2_scale_x_pow_3.setter
def hidden_c_gyro_2_scale_x_pow_3(self, new_value):
addr = 0x46
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_scale_y_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_SCALE_Y_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_SCALE_Y_POW_0 as float;
"""
addr = 0x47
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_SCALE_Y_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_scale_y_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_scale_y_pow_0,
@hidden_c_gyro_2_scale_y_pow_0.setter
def hidden_c_gyro_2_scale_y_pow_0(self, new_value):
addr = 0x47
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_scale_y_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_SCALE_Y_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_SCALE_Y_POW_1 as float;
"""
addr = 0x48
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_SCALE_Y_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_scale_y_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_scale_y_pow_1,
@hidden_c_gyro_2_scale_y_pow_1.setter
def hidden_c_gyro_2_scale_y_pow_1(self, new_value):
addr = 0x48
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_scale_y_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_SCALE_Y_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_SCALE_Y_POW_2 as float;
"""
addr = 0x49
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_SCALE_Y_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_scale_y_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_scale_y_pow_2,
@hidden_c_gyro_2_scale_y_pow_2.setter
def hidden_c_gyro_2_scale_y_pow_2(self, new_value):
addr = 0x49
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_scale_y_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_SCALE_Y_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_SCALE_Y_POW_3 as float;
"""
addr = 0x4A
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_SCALE_Y_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_scale_y_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_scale_y_pow_3,
@hidden_c_gyro_2_scale_y_pow_3.setter
def hidden_c_gyro_2_scale_y_pow_3(self, new_value):
addr = 0x4A
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_scale_z_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_SCALE_Z_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_SCALE_Z_POW_0 as float;
"""
addr = 0x4B
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_SCALE_Z_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_scale_z_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_scale_z_pow_0,
@hidden_c_gyro_2_scale_z_pow_0.setter
def hidden_c_gyro_2_scale_z_pow_0(self, new_value):
addr = 0x4B
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_scale_z_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_SCALE_Z_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_SCALE_Z_POW_1 as float;
"""
addr = 0x4C
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_SCALE_Z_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_scale_z_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_scale_z_pow_1,
@hidden_c_gyro_2_scale_z_pow_1.setter
def hidden_c_gyro_2_scale_z_pow_1(self, new_value):
addr = 0x4C
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_scale_z_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_SCALE_Z_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_SCALE_Z_POW_2 as float;
"""
addr = 0x4D
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_SCALE_Z_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_scale_z_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_scale_z_pow_2,
@hidden_c_gyro_2_scale_z_pow_2.setter
def hidden_c_gyro_2_scale_z_pow_2(self, new_value):
addr = 0x4D
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_gyro_2_scale_z_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_GYRO_2_SCALE_Z_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_GYRO_2_SCALE_Z_POW_3 as float;
"""
addr = 0x4E
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_GYRO_2_SCALE_Z_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_gyro_2_scale_z_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_gyro_2_scale_z_pow_3,
@hidden_c_gyro_2_scale_z_pow_3.setter
def hidden_c_gyro_2_scale_z_pow_3(self, new_value):
addr = 0x4E
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gyro_2_alignment1_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_2_ALIGNMENT1_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_2_ALIGNMENT1_1 as float;
"""
addr = 0x4F
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_2_ALIGNMENT1_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_2_alignment1_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_2_alignment1_1,
@hidden_gyro_2_alignment1_1.setter
def hidden_gyro_2_alignment1_1(self, new_value):
addr = 0x4F
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gyro_2_alignment1_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_2_ALIGNMENT1_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_2_ALIGNMENT1_2 as float;
"""
addr = 0x50
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_2_ALIGNMENT1_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_2_alignment1_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_2_alignment1_2,
@hidden_gyro_2_alignment1_2.setter
def hidden_gyro_2_alignment1_2(self, new_value):
addr = 0x50
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gyro_2_alignment1_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_2_ALIGNMENT1_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_2_ALIGNMENT1_3 as float;
"""
addr = 0x51
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_2_ALIGNMENT1_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_2_alignment1_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_2_alignment1_3,
@hidden_gyro_2_alignment1_3.setter
def hidden_gyro_2_alignment1_3(self, new_value):
addr = 0x51
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gyro_2_alignment2_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_2_ALIGNMENT2_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_2_ALIGNMENT2_1 as float;
"""
addr = 0x52
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_2_ALIGNMENT2_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_2_alignment2_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_2_alignment2_1,
@hidden_gyro_2_alignment2_1.setter
def hidden_gyro_2_alignment2_1(self, new_value):
addr = 0x52
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gyro_2_alignment2_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_2_ALIGNMENT2_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_2_ALIGNMENT2_2 as float;
"""
addr = 0x53
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_2_ALIGNMENT2_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_2_alignment2_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_2_alignment2_2,
@hidden_gyro_2_alignment2_2.setter
def hidden_gyro_2_alignment2_2(self, new_value):
addr = 0x53
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gyro_2_alignment2_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_2_ALIGNMENT2_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_2_ALIGNMENT2_3 as float;
"""
addr = 0x54
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_2_ALIGNMENT2_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_2_alignment2_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_2_alignment2_3,
@hidden_gyro_2_alignment2_3.setter
def hidden_gyro_2_alignment2_3(self, new_value):
addr = 0x54
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gyro_2_alignment3_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_2_ALIGNMENT3_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_2_ALIGNMENT3_1 as float;
"""
addr = 0x55
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_2_ALIGNMENT3_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_2_alignment3_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_2_alignment3_1,
@hidden_gyro_2_alignment3_1.setter
def hidden_gyro_2_alignment3_1(self, new_value):
addr = 0x55
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gyro_2_alignment3_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_2_ALIGNMENT3_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_2_ALIGNMENT3_2 as float;
"""
addr = 0x56
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_2_ALIGNMENT3_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_2_alignment3_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_2_alignment3_2,
@hidden_gyro_2_alignment3_2.setter
def hidden_gyro_2_alignment3_2(self, new_value):
addr = 0x56
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gyro_2_alignment3_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_2_ALIGNMENT3_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_2_ALIGNMENT3_3 as float;
"""
addr = 0x57
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_2_ALIGNMENT3_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_2_alignment3_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_2_alignment3_3,
@hidden_gyro_2_alignment3_3.setter
def hidden_gyro_2_alignment3_3(self, new_value):
addr = 0x57
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_bias_x_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_BIAS_X_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_BIAS_X_POW_0 as float;
"""
addr = 0x58
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_BIAS_X_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_bias_x_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_bias_x_pow_0,
@hidden_c_accel_1_bias_x_pow_0.setter
def hidden_c_accel_1_bias_x_pow_0(self, new_value):
addr = 0x58
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_bias_x_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_BIAS_X_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_BIAS_X_POW_1 as float;
"""
addr = 0x59
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_BIAS_X_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_bias_x_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_bias_x_pow_1,
@hidden_c_accel_1_bias_x_pow_1.setter
def hidden_c_accel_1_bias_x_pow_1(self, new_value):
addr = 0x59
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_bias_x_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_BIAS_X_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_BIAS_X_POW_2 as float;
"""
addr = 0x5A
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_BIAS_X_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_bias_x_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_bias_x_pow_2,
@hidden_c_accel_1_bias_x_pow_2.setter
def hidden_c_accel_1_bias_x_pow_2(self, new_value):
addr = 0x5A
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_bias_x_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_BIAS_X_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_BIAS_X_POW_3 as float;
"""
addr = 0x5B
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_BIAS_X_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_bias_x_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_bias_x_pow_3,
@hidden_c_accel_1_bias_x_pow_3.setter
def hidden_c_accel_1_bias_x_pow_3(self, new_value):
addr = 0x5B
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_bias_y_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_BIAS_Y_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_BIAS_Y_POW_0 as float;
"""
addr = 0x5C
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_BIAS_Y_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_bias_y_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_bias_y_pow_0,
@hidden_c_accel_1_bias_y_pow_0.setter
def hidden_c_accel_1_bias_y_pow_0(self, new_value):
addr = 0x5C
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_bias_y_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_BIAS_Y_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_BIAS_Y_POW_1 as float;
"""
addr = 0x5D
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_BIAS_Y_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_bias_y_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_bias_y_pow_1,
@hidden_c_accel_1_bias_y_pow_1.setter
def hidden_c_accel_1_bias_y_pow_1(self, new_value):
addr = 0x5D
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_bias_y_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_BIAS_Y_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_BIAS_Y_POW_2 as float;
"""
addr = 0x5E
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_BIAS_Y_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_bias_y_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_bias_y_pow_2,
@hidden_c_accel_1_bias_y_pow_2.setter
def hidden_c_accel_1_bias_y_pow_2(self, new_value):
addr = 0x5E
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_bias_y_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_BIAS_Y_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_BIAS_Y_POW_3 as float;
"""
addr = 0x5F
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_BIAS_Y_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_bias_y_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_bias_y_pow_3,
@hidden_c_accel_1_bias_y_pow_3.setter
def hidden_c_accel_1_bias_y_pow_3(self, new_value):
addr = 0x5F
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_bias_z_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_BIAS_Z_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_BIAS_Z_POW_0 as float;
"""
addr = 0x60
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_BIAS_Z_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_bias_z_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_bias_z_pow_0,
@hidden_c_accel_1_bias_z_pow_0.setter
def hidden_c_accel_1_bias_z_pow_0(self, new_value):
addr = 0x60
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_bias_z_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_BIAS_Z_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_BIAS_Z_POW_1 as float;
"""
addr = 0x61
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_BIAS_Z_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_bias_z_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_bias_z_pow_1,
@hidden_c_accel_1_bias_z_pow_1.setter
def hidden_c_accel_1_bias_z_pow_1(self, new_value):
addr = 0x61
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_bias_z_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_BIAS_Z_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_BIAS_Z_POW_2 as float;
"""
addr = 0x62
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_BIAS_Z_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_bias_z_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_bias_z_pow_2,
@hidden_c_accel_1_bias_z_pow_2.setter
def hidden_c_accel_1_bias_z_pow_2(self, new_value):
addr = 0x62
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_bias_z_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_BIAS_Z_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_BIAS_Z_POW_3 as float;
"""
addr = 0x63
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_BIAS_Z_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_bias_z_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_bias_z_pow_3,
@hidden_c_accel_1_bias_z_pow_3.setter
def hidden_c_accel_1_bias_z_pow_3(self, new_value):
addr = 0x63
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_scale_x_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_SCALE_X_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_SCALE_X_POW_0 as float;
"""
addr = 0x64
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_SCALE_X_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_scale_x_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_scale_x_pow_0,
@hidden_c_accel_1_scale_x_pow_0.setter
def hidden_c_accel_1_scale_x_pow_0(self, new_value):
addr = 0x64
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_scale_x_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_SCALE_X_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_SCALE_X_POW_1 as float;
"""
addr = 0x65
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_SCALE_X_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_scale_x_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_scale_x_pow_1,
@hidden_c_accel_1_scale_x_pow_1.setter
def hidden_c_accel_1_scale_x_pow_1(self, new_value):
addr = 0x65
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_scale_x_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_SCALE_X_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_SCALE_X_POW_2 as float;
"""
addr = 0x66
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_SCALE_X_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_scale_x_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_scale_x_pow_2,
@hidden_c_accel_1_scale_x_pow_2.setter
def hidden_c_accel_1_scale_x_pow_2(self, new_value):
addr = 0x66
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_scale_x_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_SCALE_X_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_SCALE_X_POW_3 as float;
"""
addr = 0x67
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_SCALE_X_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_scale_x_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_scale_x_pow_3,
@hidden_c_accel_1_scale_x_pow_3.setter
def hidden_c_accel_1_scale_x_pow_3(self, new_value):
addr = 0x67
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_scale_y_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_SCALE_Y_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_SCALE_Y_POW_0 as float;
"""
addr = 0x68
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_SCALE_Y_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_scale_y_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_scale_y_pow_0,
@hidden_c_accel_1_scale_y_pow_0.setter
def hidden_c_accel_1_scale_y_pow_0(self, new_value):
addr = 0x68
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_scale_y_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_SCALE_Y_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_SCALE_Y_POW_1 as float;
"""
addr = 0x69
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_SCALE_Y_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_scale_y_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_scale_y_pow_1,
@hidden_c_accel_1_scale_y_pow_1.setter
def hidden_c_accel_1_scale_y_pow_1(self, new_value):
addr = 0x69
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_scale_y_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_SCALE_Y_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_SCALE_Y_POW_2 as float;
"""
addr = 0x6A
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_SCALE_Y_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_scale_y_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_scale_y_pow_2,
@hidden_c_accel_1_scale_y_pow_2.setter
def hidden_c_accel_1_scale_y_pow_2(self, new_value):
addr = 0x6A
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_scale_y_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_SCALE_Y_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_SCALE_Y_POW_3 as float;
"""
addr = 0x6B
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_SCALE_Y_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_scale_y_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_scale_y_pow_3,
@hidden_c_accel_1_scale_y_pow_3.setter
def hidden_c_accel_1_scale_y_pow_3(self, new_value):
addr = 0x6B
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_scale_z_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_SCALE_Z_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_SCALE_Z_POW_0 as float;
"""
addr = 0x6C
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_SCALE_Z_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_scale_z_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_scale_z_pow_0,
@hidden_c_accel_1_scale_z_pow_0.setter
def hidden_c_accel_1_scale_z_pow_0(self, new_value):
addr = 0x6C
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_scale_z_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_SCALE_Z_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_SCALE_Z_POW_1 as float;
"""
addr = 0x6D
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_SCALE_Z_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_scale_z_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_scale_z_pow_1,
@hidden_c_accel_1_scale_z_pow_1.setter
def hidden_c_accel_1_scale_z_pow_1(self, new_value):
addr = 0x6D
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_scale_z_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_SCALE_Z_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_SCALE_Z_POW_2 as float;
"""
addr = 0x6E
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_SCALE_Z_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_scale_z_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_scale_z_pow_2,
@hidden_c_accel_1_scale_z_pow_2.setter
def hidden_c_accel_1_scale_z_pow_2(self, new_value):
addr = 0x6E
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_accel_1_scale_z_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_ACCEL_1_SCALE_Z_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_ACCEL_1_SCALE_Z_POW_3 as float;
"""
addr = 0x6F
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_ACCEL_1_SCALE_Z_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_accel_1_scale_z_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_accel_1_scale_z_pow_3,
@hidden_c_accel_1_scale_z_pow_3.setter
def hidden_c_accel_1_scale_z_pow_3(self, new_value):
addr = 0x6F
self.write_register(addr, new_value, hidden=True)
@property
def hidden_accel_1_alignment1_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_ACCEL_1_ALIGNMENT1_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_ACCEL_1_ALIGNMENT1_1 as float;
"""
addr = 0x70
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_ACCEL_1_ALIGNMENT1_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_accel_1_alignment1_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_accel_1_alignment1_1,
@hidden_accel_1_alignment1_1.setter
def hidden_accel_1_alignment1_1(self, new_value):
addr = 0x70
self.write_register(addr, new_value, hidden=True)
@property
def hidden_accel_1_alignment1_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_ACCEL_1_ALIGNMENT1_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_ACCEL_1_ALIGNMENT1_2 as float;
"""
addr = 0x71
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_ACCEL_1_ALIGNMENT1_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_accel_1_alignment1_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_accel_1_alignment1_2,
@hidden_accel_1_alignment1_2.setter
def hidden_accel_1_alignment1_2(self, new_value):
addr = 0x71
self.write_register(addr, new_value, hidden=True)
@property
def hidden_accel_1_alignment1_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_ACCEL_1_ALIGNMENT1_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_ACCEL_1_ALIGNMENT1_3 as float;
"""
addr = 0x72
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_ACCEL_1_ALIGNMENT1_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_accel_1_alignment1_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_accel_1_alignment1_3,
@hidden_accel_1_alignment1_3.setter
def hidden_accel_1_alignment1_3(self, new_value):
addr = 0x72
self.write_register(addr, new_value, hidden=True)
@property
def hidden_accel_1_alignment2_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_ACCEL_1_ALIGNMENT2_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_ACCEL_1_ALIGNMENT2_1 as float;
"""
addr = 0x73
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_ACCEL_1_ALIGNMENT2_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_accel_1_alignment2_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_accel_1_alignment2_1,
@hidden_accel_1_alignment2_1.setter
def hidden_accel_1_alignment2_1(self, new_value):
addr = 0x73
self.write_register(addr, new_value, hidden=True)
@property
def hidden_accel_1_alignment2_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_ACCEL_1_ALIGNMENT2_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_ACCEL_1_ALIGNMENT2_2 as float;
"""
addr = 0x74
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_ACCEL_1_ALIGNMENT2_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_accel_1_alignment2_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_accel_1_alignment2_2,
@hidden_accel_1_alignment2_2.setter
def hidden_accel_1_alignment2_2(self, new_value):
addr = 0x74
self.write_register(addr, new_value, hidden=True)
@property
def hidden_accel_1_alignment2_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_ACCEL_1_ALIGNMENT2_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_ACCEL_1_ALIGNMENT2_3 as float;
"""
addr = 0x75
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_ACCEL_1_ALIGNMENT2_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_accel_1_alignment2_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_accel_1_alignment2_3,
@hidden_accel_1_alignment2_3.setter
def hidden_accel_1_alignment2_3(self, new_value):
addr = 0x75
self.write_register(addr, new_value, hidden=True)
@property
def hidden_accel_1_alignment3_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_ACCEL_1_ALIGNMENT3_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_ACCEL_1_ALIGNMENT3_1 as float;
"""
addr = 0x76
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_ACCEL_1_ALIGNMENT3_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_accel_1_alignment3_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_accel_1_alignment3_1,
@hidden_accel_1_alignment3_1.setter
def hidden_accel_1_alignment3_1(self, new_value):
addr = 0x76
self.write_register(addr, new_value, hidden=True)
@property
def hidden_accel_1_alignment3_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_ACCEL_1_ALIGNMENT3_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_ACCEL_1_ALIGNMENT3_2 as float;
"""
addr = 0x77
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_ACCEL_1_ALIGNMENT3_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_accel_1_alignment3_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_accel_1_alignment3_2,
@hidden_accel_1_alignment3_2.setter
def hidden_accel_1_alignment3_2(self, new_value):
addr = 0x77
self.write_register(addr, new_value, hidden=True)
@property
def hidden_accel_1_alignment3_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_ACCEL_1_ALIGNMENT3_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_ACCEL_1_ALIGNMENT3_3 as float;
"""
addr = 0x78
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_ACCEL_1_ALIGNMENT3_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_accel_1_alignment3_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_accel_1_alignment3_3,
@hidden_accel_1_alignment3_3.setter
def hidden_accel_1_alignment3_3(self, new_value):
addr = 0x78
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_bias_x_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_BIAS_X_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_BIAS_X_POW_0 as float;
"""
addr = 0x79
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_BIAS_X_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_bias_x_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_bias_x_pow_0,
@hidden_c_mag_1_bias_x_pow_0.setter
def hidden_c_mag_1_bias_x_pow_0(self, new_value):
addr = 0x79
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_bias_x_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_BIAS_X_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_BIAS_X_POW_1 as float;
"""
addr = 0x7A
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_BIAS_X_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_bias_x_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_bias_x_pow_1,
@hidden_c_mag_1_bias_x_pow_1.setter
def hidden_c_mag_1_bias_x_pow_1(self, new_value):
addr = 0x7A
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_bias_x_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_BIAS_X_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_BIAS_X_POW_2 as float;
"""
addr = 0x7B
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_BIAS_X_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_bias_x_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_bias_x_pow_2,
@hidden_c_mag_1_bias_x_pow_2.setter
def hidden_c_mag_1_bias_x_pow_2(self, new_value):
addr = 0x7B
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_bias_x_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_BIAS_X_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_BIAS_X_POW_3 as float;
"""
addr = 0x7C
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_BIAS_X_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_bias_x_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_bias_x_pow_3,
@hidden_c_mag_1_bias_x_pow_3.setter
def hidden_c_mag_1_bias_x_pow_3(self, new_value):
addr = 0x7C
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_bias_y_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_BIAS_Y_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_BIAS_Y_POW_0 as float;
"""
addr = 0x7D
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_BIAS_Y_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_bias_y_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_bias_y_pow_0,
@hidden_c_mag_1_bias_y_pow_0.setter
def hidden_c_mag_1_bias_y_pow_0(self, new_value):
addr = 0x7D
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_bias_y_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_BIAS_Y_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_BIAS_Y_POW_1 as float;
"""
addr = 0x7E
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_BIAS_Y_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_bias_y_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_bias_y_pow_1,
@hidden_c_mag_1_bias_y_pow_1.setter
def hidden_c_mag_1_bias_y_pow_1(self, new_value):
addr = 0x7E
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_bias_y_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_BIAS_Y_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_BIAS_Y_POW_2 as float;
"""
addr = 0x7F
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_BIAS_Y_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_bias_y_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_bias_y_pow_2,
@hidden_c_mag_1_bias_y_pow_2.setter
def hidden_c_mag_1_bias_y_pow_2(self, new_value):
addr = 0x7F
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_bias_y_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_BIAS_Y_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_BIAS_Y_POW_3 as float;
"""
addr = 0x80
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_BIAS_Y_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_bias_y_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_bias_y_pow_3,
@hidden_c_mag_1_bias_y_pow_3.setter
def hidden_c_mag_1_bias_y_pow_3(self, new_value):
addr = 0x80
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_bias_z_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_BIAS_Z_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_BIAS_Z_POW_0 as float;
"""
addr = 0x81
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_BIAS_Z_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_bias_z_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_bias_z_pow_0,
@hidden_c_mag_1_bias_z_pow_0.setter
def hidden_c_mag_1_bias_z_pow_0(self, new_value):
addr = 0x81
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_bias_z_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_BIAS_Z_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_BIAS_Z_POW_1 as float;
"""
addr = 0x82
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_BIAS_Z_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_bias_z_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_bias_z_pow_1,
@hidden_c_mag_1_bias_z_pow_1.setter
def hidden_c_mag_1_bias_z_pow_1(self, new_value):
addr = 0x82
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_bias_z_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_BIAS_Z_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_BIAS_Z_POW_2 as float;
"""
addr = 0x83
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_BIAS_Z_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_bias_z_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_bias_z_pow_2,
@hidden_c_mag_1_bias_z_pow_2.setter
def hidden_c_mag_1_bias_z_pow_2(self, new_value):
addr = 0x83
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_bias_z_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_BIAS_Z_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_BIAS_Z_POW_3 as float;
"""
addr = 0x84
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_BIAS_Z_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_bias_z_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_bias_z_pow_3,
@hidden_c_mag_1_bias_z_pow_3.setter
def hidden_c_mag_1_bias_z_pow_3(self, new_value):
addr = 0x84
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_scale_x_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_SCALE_X_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_SCALE_X_POW_0 as float;
"""
addr = 0x85
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_SCALE_X_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_scale_x_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_scale_x_pow_0,
@hidden_c_mag_1_scale_x_pow_0.setter
def hidden_c_mag_1_scale_x_pow_0(self, new_value):
addr = 0x85
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_scale_x_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_SCALE_X_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_SCALE_X_POW_1 as float;
"""
addr = 0x86
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_SCALE_X_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_scale_x_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_scale_x_pow_1,
@hidden_c_mag_1_scale_x_pow_1.setter
def hidden_c_mag_1_scale_x_pow_1(self, new_value):
addr = 0x86
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_scale_x_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_SCALE_X_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_SCALE_X_POW_2 as float;
"""
addr = 0x87
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_SCALE_X_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_scale_x_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_scale_x_pow_2,
@hidden_c_mag_1_scale_x_pow_2.setter
def hidden_c_mag_1_scale_x_pow_2(self, new_value):
addr = 0x87
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_scale_x_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_SCALE_X_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_SCALE_X_POW_3 as float;
"""
addr = 0x88
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_SCALE_X_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_scale_x_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_scale_x_pow_3,
@hidden_c_mag_1_scale_x_pow_3.setter
def hidden_c_mag_1_scale_x_pow_3(self, new_value):
addr = 0x88
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_scale_y_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_SCALE_Y_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_SCALE_Y_POW_0 as float;
"""
addr = 0x89
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_SCALE_Y_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_scale_y_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_scale_y_pow_0,
@hidden_c_mag_1_scale_y_pow_0.setter
def hidden_c_mag_1_scale_y_pow_0(self, new_value):
addr = 0x89
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_scale_y_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_SCALE_Y_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_SCALE_Y_POW_1 as float;
"""
addr = 0x8A
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_SCALE_Y_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_scale_y_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_scale_y_pow_1,
@hidden_c_mag_1_scale_y_pow_1.setter
def hidden_c_mag_1_scale_y_pow_1(self, new_value):
addr = 0x8A
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_scale_y_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_SCALE_Y_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_SCALE_Y_POW_2 as float;
"""
addr = 0x8B
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_SCALE_Y_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_scale_y_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_scale_y_pow_2,
@hidden_c_mag_1_scale_y_pow_2.setter
def hidden_c_mag_1_scale_y_pow_2(self, new_value):
addr = 0x8B
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_scale_y_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_SCALE_Y_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_SCALE_Y_POW_3 as float;
"""
addr = 0x8C
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_SCALE_Y_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_scale_y_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_scale_y_pow_3,
@hidden_c_mag_1_scale_y_pow_3.setter
def hidden_c_mag_1_scale_y_pow_3(self, new_value):
addr = 0x8C
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_scale_z_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_SCALE_Z_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_SCALE_Z_POW_0 as float;
"""
addr = 0x8D
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_SCALE_Z_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_scale_z_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_scale_z_pow_0,
@hidden_c_mag_1_scale_z_pow_0.setter
def hidden_c_mag_1_scale_z_pow_0(self, new_value):
addr = 0x8D
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_scale_z_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_SCALE_Z_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_SCALE_Z_POW_1 as float;
"""
addr = 0x8E
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_SCALE_Z_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_scale_z_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_scale_z_pow_1,
@hidden_c_mag_1_scale_z_pow_1.setter
def hidden_c_mag_1_scale_z_pow_1(self, new_value):
addr = 0x8E
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_scale_z_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_SCALE_Z_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_SCALE_Z_POW_2 as float;
"""
addr = 0x8F
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_SCALE_Z_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_scale_z_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_scale_z_pow_2,
@hidden_c_mag_1_scale_z_pow_2.setter
def hidden_c_mag_1_scale_z_pow_2(self, new_value):
addr = 0x8F
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_1_scale_z_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_1_SCALE_Z_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_1_SCALE_Z_POW_3 as float;
"""
addr = 0x90
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_1_SCALE_Z_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_1_scale_z_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_1_scale_z_pow_3,
@hidden_c_mag_1_scale_z_pow_3.setter
def hidden_c_mag_1_scale_z_pow_3(self, new_value):
addr = 0x90
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_1_alignment1_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_1_ALIGNMENT1_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_1_ALIGNMENT1_1 as float;
"""
addr = 0x91
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_1_ALIGNMENT1_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_1_alignment1_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_1_alignment1_1,
@hidden_mag_1_alignment1_1.setter
def hidden_mag_1_alignment1_1(self, new_value):
addr = 0x91
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_1_alignment1_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_1_ALIGNMENT1_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_1_ALIGNMENT1_2 as float;
"""
addr = 0x92
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_1_ALIGNMENT1_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_1_alignment1_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_1_alignment1_2,
@hidden_mag_1_alignment1_2.setter
def hidden_mag_1_alignment1_2(self, new_value):
addr = 0x92
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_1_alignment1_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_1_ALIGNMENT1_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_1_ALIGNMENT1_3 as float;
"""
addr = 0x93
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_1_ALIGNMENT1_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_1_alignment1_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_1_alignment1_3,
@hidden_mag_1_alignment1_3.setter
def hidden_mag_1_alignment1_3(self, new_value):
addr = 0x93
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_1_alignment2_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_1_ALIGNMENT2_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_1_ALIGNMENT2_1 as float;
"""
addr = 0x94
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_1_ALIGNMENT2_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_1_alignment2_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_1_alignment2_1,
@hidden_mag_1_alignment2_1.setter
def hidden_mag_1_alignment2_1(self, new_value):
addr = 0x94
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_1_alignment2_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_1_ALIGNMENT2_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_1_ALIGNMENT2_2 as float;
"""
addr = 0x95
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_1_ALIGNMENT2_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_1_alignment2_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_1_alignment2_2,
@hidden_mag_1_alignment2_2.setter
def hidden_mag_1_alignment2_2(self, new_value):
addr = 0x95
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_1_alignment2_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_1_ALIGNMENT2_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_1_ALIGNMENT2_3 as float;
"""
addr = 0x96
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_1_ALIGNMENT2_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_1_alignment2_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_1_alignment2_3,
@hidden_mag_1_alignment2_3.setter
def hidden_mag_1_alignment2_3(self, new_value):
addr = 0x96
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_1_alignment3_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_1_ALIGNMENT3_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_1_ALIGNMENT3_1 as float;
"""
addr = 0x97
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_1_ALIGNMENT3_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_1_alignment3_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_1_alignment3_1,
@hidden_mag_1_alignment3_1.setter
def hidden_mag_1_alignment3_1(self, new_value):
addr = 0x97
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_1_alignment3_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_1_ALIGNMENT3_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_1_ALIGNMENT3_2 as float;
"""
addr = 0x98
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_1_ALIGNMENT3_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_1_alignment3_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_1_alignment3_2,
@hidden_mag_1_alignment3_2.setter
def hidden_mag_1_alignment3_2(self, new_value):
addr = 0x98
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_1_alignment3_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_1_ALIGNMENT3_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_1_ALIGNMENT3_3 as float;
"""
addr = 0x99
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_1_ALIGNMENT3_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_1_alignment3_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_1_alignment3_3,
@hidden_mag_1_alignment3_3.setter
def hidden_mag_1_alignment3_3(self, new_value):
addr = 0x99
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_1_reference_x(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_1_REFERENCE_X -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_1_REFERENCE_X as float;
"""
addr = 0x9A
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_1_REFERENCE_X')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_1_reference_x, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_1_reference_x,
@hidden_mag_1_reference_x.setter
def hidden_mag_1_reference_x(self, new_value):
addr = 0x9A
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_1_reference_y(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_1_REFERENCE_Y -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_1_REFERENCE_Y as float;
"""
addr = 0x9B
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_1_REFERENCE_Y')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_1_reference_y, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_1_reference_y,
@hidden_mag_1_reference_y.setter
def hidden_mag_1_reference_y(self, new_value):
addr = 0x9B
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_1_reference_z(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_1_REFERENCE_Z -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_1_REFERENCE_Z as float;
"""
addr = 0x9C
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_1_REFERENCE_Z')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_1_reference_z, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_1_reference_z,
@hidden_mag_1_reference_z.setter
def hidden_mag_1_reference_z(self, new_value):
addr = 0x9C
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_bias_x_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_BIAS_X_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_BIAS_X_POW_0 as float;
"""
addr = 0x9D
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_BIAS_X_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_bias_x_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_bias_x_pow_0,
@hidden_c_mag_2_bias_x_pow_0.setter
def hidden_c_mag_2_bias_x_pow_0(self, new_value):
addr = 0x9D
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_bias_x_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_BIAS_X_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_BIAS_X_POW_1 as float;
"""
addr = 0x9E
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_BIAS_X_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_bias_x_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_bias_x_pow_1,
@hidden_c_mag_2_bias_x_pow_1.setter
def hidden_c_mag_2_bias_x_pow_1(self, new_value):
addr = 0x9E
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_bias_x_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_BIAS_X_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_BIAS_X_POW_2 as float;
"""
addr = 0x9F
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_BIAS_X_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_bias_x_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_bias_x_pow_2,
@hidden_c_mag_2_bias_x_pow_2.setter
def hidden_c_mag_2_bias_x_pow_2(self, new_value):
addr = 0x9F
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_bias_x_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_BIAS_X_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_BIAS_X_POW_3 as float;
"""
addr = 0xA0
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_BIAS_X_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_bias_x_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_bias_x_pow_3,
@hidden_c_mag_2_bias_x_pow_3.setter
def hidden_c_mag_2_bias_x_pow_3(self, new_value):
addr = 0xA0
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_bias_y_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_BIAS_Y_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_BIAS_Y_POW_0 as float;
"""
addr = 0xA1
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_BIAS_Y_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_bias_y_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_bias_y_pow_0,
@hidden_c_mag_2_bias_y_pow_0.setter
def hidden_c_mag_2_bias_y_pow_0(self, new_value):
addr = 0xA1
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_bias_y_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_BIAS_Y_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_BIAS_Y_POW_1 as float;
"""
addr = 0xA2
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_BIAS_Y_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_bias_y_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_bias_y_pow_1,
@hidden_c_mag_2_bias_y_pow_1.setter
def hidden_c_mag_2_bias_y_pow_1(self, new_value):
addr = 0xA2
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_bias_y_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_BIAS_Y_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_BIAS_Y_POW_2 as float;
"""
addr = 0xA3
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_BIAS_Y_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_bias_y_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_bias_y_pow_2,
@hidden_c_mag_2_bias_y_pow_2.setter
def hidden_c_mag_2_bias_y_pow_2(self, new_value):
addr = 0xA3
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_bias_y_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_BIAS_Y_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_BIAS_Y_POW_3 as float;
"""
addr = 0xA4
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_BIAS_Y_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_bias_y_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_bias_y_pow_3,
@hidden_c_mag_2_bias_y_pow_3.setter
def hidden_c_mag_2_bias_y_pow_3(self, new_value):
addr = 0xA4
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_bias_z_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_BIAS_Z_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_BIAS_Z_POW_0 as float;
"""
addr = 0xA5
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_BIAS_Z_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_bias_z_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_bias_z_pow_0,
@hidden_c_mag_2_bias_z_pow_0.setter
def hidden_c_mag_2_bias_z_pow_0(self, new_value):
addr = 0xA5
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_bias_z_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_BIAS_Z_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_BIAS_Z_POW_1 as float;
"""
addr = 0xA6
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_BIAS_Z_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_bias_z_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_bias_z_pow_1,
@hidden_c_mag_2_bias_z_pow_1.setter
def hidden_c_mag_2_bias_z_pow_1(self, new_value):
addr = 0xA6
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_bias_z_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_BIAS_Z_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_BIAS_Z_POW_2 as float;
"""
addr = 0xA7
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_BIAS_Z_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_bias_z_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_bias_z_pow_2,
@hidden_c_mag_2_bias_z_pow_2.setter
def hidden_c_mag_2_bias_z_pow_2(self, new_value):
addr = 0xA7
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_bias_z_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_BIAS_Z_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_BIAS_Z_POW_3 as float;
"""
addr = 0xA8
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_BIAS_Z_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_bias_z_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_bias_z_pow_3,
@hidden_c_mag_2_bias_z_pow_3.setter
def hidden_c_mag_2_bias_z_pow_3(self, new_value):
addr = 0xA8
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_scale_x_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_SCALE_X_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_SCALE_X_POW_0 as float;
"""
addr = 0xA9
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_SCALE_X_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_scale_x_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_scale_x_pow_0,
@hidden_c_mag_2_scale_x_pow_0.setter
def hidden_c_mag_2_scale_x_pow_0(self, new_value):
addr = 0xA9
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_scale_x_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_SCALE_X_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_SCALE_X_POW_1 as float;
"""
addr = 0xAA
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_SCALE_X_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_scale_x_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_scale_x_pow_1,
@hidden_c_mag_2_scale_x_pow_1.setter
def hidden_c_mag_2_scale_x_pow_1(self, new_value):
addr = 0xAA
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_scale_x_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_SCALE_X_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_SCALE_X_POW_2 as float;
"""
addr = 0xAB
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_SCALE_X_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_scale_x_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_scale_x_pow_2,
@hidden_c_mag_2_scale_x_pow_2.setter
def hidden_c_mag_2_scale_x_pow_2(self, new_value):
addr = 0xAB
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_scale_x_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_SCALE_X_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_SCALE_X_POW_3 as float;
"""
addr = 0xAC
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_SCALE_X_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_scale_x_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_scale_x_pow_3,
@hidden_c_mag_2_scale_x_pow_3.setter
def hidden_c_mag_2_scale_x_pow_3(self, new_value):
addr = 0xAC
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_scale_y_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_SCALE_Y_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_SCALE_Y_POW_0 as float;
"""
addr = 0xAD
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_SCALE_Y_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_scale_y_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_scale_y_pow_0,
@hidden_c_mag_2_scale_y_pow_0.setter
def hidden_c_mag_2_scale_y_pow_0(self, new_value):
addr = 0xAD
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_scale_y_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_SCALE_Y_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_SCALE_Y_POW_1 as float;
"""
addr = 0xAE
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_SCALE_Y_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_scale_y_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_scale_y_pow_1,
@hidden_c_mag_2_scale_y_pow_1.setter
def hidden_c_mag_2_scale_y_pow_1(self, new_value):
addr = 0xAE
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_scale_y_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_SCALE_Y_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_SCALE_Y_POW_2 as float;
"""
addr = 0xAF
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_SCALE_Y_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_scale_y_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_scale_y_pow_2,
@hidden_c_mag_2_scale_y_pow_2.setter
def hidden_c_mag_2_scale_y_pow_2(self, new_value):
addr = 0xAF
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_scale_y_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_SCALE_Y_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_SCALE_Y_POW_3 as float;
"""
addr = 0xB0
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_SCALE_Y_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_scale_y_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_scale_y_pow_3,
@hidden_c_mag_2_scale_y_pow_3.setter
def hidden_c_mag_2_scale_y_pow_3(self, new_value):
addr = 0xB0
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_scale_z_pow_0(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_SCALE_Z_POW_0 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_SCALE_Z_POW_0 as float;
"""
addr = 0xB1
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_SCALE_Z_POW_0')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_scale_z_pow_0, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_scale_z_pow_0,
@hidden_c_mag_2_scale_z_pow_0.setter
def hidden_c_mag_2_scale_z_pow_0(self, new_value):
addr = 0xB1
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_scale_z_pow_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_SCALE_Z_POW_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_SCALE_Z_POW_1 as float;
"""
addr = 0xB2
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_SCALE_Z_POW_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_scale_z_pow_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_scale_z_pow_1,
@hidden_c_mag_2_scale_z_pow_1.setter
def hidden_c_mag_2_scale_z_pow_1(self, new_value):
addr = 0xB2
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_scale_z_pow_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_SCALE_Z_POW_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_SCALE_Z_POW_2 as float;
"""
addr = 0xB3
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_SCALE_Z_POW_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_scale_z_pow_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_scale_z_pow_2,
@hidden_c_mag_2_scale_z_pow_2.setter
def hidden_c_mag_2_scale_z_pow_2(self, new_value):
addr = 0xB3
self.write_register(addr, new_value, hidden=True)
@property
def hidden_c_mag_2_scale_z_pow_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_C_MAG_2_SCALE_Z_POW_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_C_MAG_2_SCALE_Z_POW_3 as float;
"""
addr = 0xB4
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_C_MAG_2_SCALE_Z_POW_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_c_mag_2_scale_z_pow_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_c_mag_2_scale_z_pow_3,
@hidden_c_mag_2_scale_z_pow_3.setter
def hidden_c_mag_2_scale_z_pow_3(self, new_value):
addr = 0xB4
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_2_alignment1_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_2_ALIGNMENT1_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_2_ALIGNMENT1_1 as float;
"""
addr = 0xB5
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_2_ALIGNMENT1_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_2_alignment1_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_2_alignment1_1,
@hidden_mag_2_alignment1_1.setter
def hidden_mag_2_alignment1_1(self, new_value):
addr = 0xB5
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_2_alignment1_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_2_ALIGNMENT1_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_2_ALIGNMENT1_2 as float;
"""
addr = 0xB6
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_2_ALIGNMENT1_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_2_alignment1_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_2_alignment1_2,
@hidden_mag_2_alignment1_2.setter
def hidden_mag_2_alignment1_2(self, new_value):
addr = 0xB6
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_2_alignment1_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_2_ALIGNMENT1_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_2_ALIGNMENT1_3 as float;
"""
addr = 0xB7
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_2_ALIGNMENT1_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_2_alignment1_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_2_alignment1_3,
@hidden_mag_2_alignment1_3.setter
def hidden_mag_2_alignment1_3(self, new_value):
addr = 0xB7
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_2_alignment2_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_2_ALIGNMENT2_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_2_ALIGNMENT2_1 as float;
"""
addr = 0xB8
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_2_ALIGNMENT2_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_2_alignment2_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_2_alignment2_1,
@hidden_mag_2_alignment2_1.setter
def hidden_mag_2_alignment2_1(self, new_value):
addr = 0xB8
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_2_alignment2_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_2_ALIGNMENT2_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_2_ALIGNMENT2_2 as float;
"""
addr = 0xB9
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_2_ALIGNMENT2_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_2_alignment2_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_2_alignment2_2,
@hidden_mag_2_alignment2_2.setter
def hidden_mag_2_alignment2_2(self, new_value):
addr = 0xB9
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_2_alignment2_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_2_ALIGNMENT2_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_2_ALIGNMENT2_3 as float;
"""
addr = 0xBA
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_2_ALIGNMENT2_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_2_alignment2_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_2_alignment2_3,
@hidden_mag_2_alignment2_3.setter
def hidden_mag_2_alignment2_3(self, new_value):
addr = 0xBA
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_2_alignment3_1(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_2_ALIGNMENT3_1 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_2_ALIGNMENT3_1 as float;
"""
addr = 0xBB
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_2_ALIGNMENT3_1')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_2_alignment3_1, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_2_alignment3_1,
@hidden_mag_2_alignment3_1.setter
def hidden_mag_2_alignment3_1(self, new_value):
addr = 0xBB
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_2_alignment3_2(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_2_ALIGNMENT3_2 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_2_ALIGNMENT3_2 as float;
"""
addr = 0xBC
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_2_ALIGNMENT3_2')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_2_alignment3_2, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_2_alignment3_2,
@hidden_mag_2_alignment3_2.setter
def hidden_mag_2_alignment3_2(self, new_value):
addr = 0xBC
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_2_alignment3_3(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_2_ALIGNMENT3_3 -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_2_ALIGNMENT3_3 as float;
"""
addr = 0xBD
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_2_ALIGNMENT3_3')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_2_alignment3_3, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_2_alignment3_3,
@hidden_mag_2_alignment3_3.setter
def hidden_mag_2_alignment3_3(self, new_value):
addr = 0xBD
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_2_reference_x(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_2_REFERENCE_X -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_2_REFERENCE_X as float;
"""
addr = 0xBE
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_2_REFERENCE_X')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_2_reference_x, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_2_reference_x,
@hidden_mag_2_reference_x.setter
def hidden_mag_2_reference_x(self, new_value):
addr = 0xBE
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_2_reference_y(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_2_REFERENCE_Y -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_2_REFERENCE_Y as float;
"""
addr = 0xBF
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_2_REFERENCE_Y')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_2_reference_y, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_2_reference_y,
@hidden_mag_2_reference_y.setter
def hidden_mag_2_reference_y(self, new_value):
addr = 0xBF
self.write_register(addr, new_value, hidden=True)
@property
def hidden_mag_2_reference_z(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_2_REFERENCE_Z -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_2_REFERENCE_Z as float;
"""
addr = 0xC0
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_2_REFERENCE_Z')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_2_reference_z, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_2_reference_z,
@hidden_mag_2_reference_z.setter
def hidden_mag_2_reference_z(self, new_value):
addr = 0xC0
self.write_register(addr, new_value, hidden=True)
@property
def hidden_gyro_1_conversion(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_1_CONVERSION -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_1_CONVERSION as float;
"""
addr = 0xC1
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_1_CONVERSION')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_1_conversion, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_1_conversion,
@property
def hidden_gyro_2_conversion(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_GYRO_2_CONVERSION -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_GYRO_2_CONVERSION as float;
"""
addr = 0xC2
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_GYRO_2_CONVERSION')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_gyro_2_conversion, = struct.unpack('>f', payload[0:4])
return reg, hidden_gyro_2_conversion,
@property
def hidden_accel_1_conversion(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_ACCEL_1_CONVERSION -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_ACCEL_1_CONVERSION as float;
"""
addr = 0xC3
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_ACCEL_1_CONVERSION')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_accel_1_conversion, = struct.unpack('>f', payload[0:4])
return reg, hidden_accel_1_conversion,
@property
def hidden_mag_1_conversion(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_1_CONVERSION -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_1_CONVERSION as float;
"""
addr = 0xC4
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_1_CONVERSION')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_1_conversion, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_1_conversion,
@property
def hidden_mag_2_conversion(self):
"""
TODO: add description
Payload structure:
[31:0] : HIDDEN_MAG_2_CONVERSION -- 32-bit IEEE 754 Floating Point Value
:return: HIDDEN_MAG_2_CONVERSION as float;
"""
addr = 0xC5
ok, payload = self.read_register(addr, hidden=True)
if ok:
reg = self.svd_parser.find_hidden_register_by(name='HIDDEN_MAG_2_CONVERSION')
reg.raw_value, = struct.unpack('>f', payload[0:4])
hidden_mag_2_conversion, = struct.unpack('>f', payload[0:4])
return reg, hidden_mag_2_conversion,
if __name__ == '__main__':
pass
| 40.66884 | 287 | 0.63888 |
f70953a63bbc0820445c238e63d7232ef154338a | 3,503 | py | Python | python/phonenumbers/data/region_HR.py | Eyepea/python-phonenumbers | 0336e191fda80a21ed5c19d5e029ad8c70f620ee | [
"Apache-2.0"
] | 2 | 2019-03-30T02:12:54.000Z | 2021-03-08T18:59:40.000Z | python/phonenumbers/data/region_HR.py | Eyepea/python-phonenumbers | 0336e191fda80a21ed5c19d5e029ad8c70f620ee | [
"Apache-2.0"
] | null | null | null | python/phonenumbers/data/region_HR.py | Eyepea/python-phonenumbers | 0336e191fda80a21ed5c19d5e029ad8c70f620ee | [
"Apache-2.0"
] | 1 | 2018-11-10T03:47:34.000Z | 2018-11-10T03:47:34.000Z | """Auto-generated file, do not edit by hand. HR metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_HR = PhoneMetadata(id='HR', country_code=385, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='[1-7]\\d{5,8}|[89]\\d{6,11}', possible_number_pattern='\\d{6,12}'),
fixed_line=PhoneNumberDesc(national_number_pattern='1\\d{7}|(?:2[0-3]|3[1-5]|4[02-47-9]|5[1-3])\\d{6}', possible_number_pattern='\\d{6,8}', example_number='12345678'),
mobile=PhoneNumberDesc(national_number_pattern='9[1257-9]\\d{6,10}', possible_number_pattern='\\d{8,12}', example_number='912345678'),
toll_free=PhoneNumberDesc(national_number_pattern='80[01]\\d{4,7}', possible_number_pattern='\\d{7,10}', example_number='8001234567'),
premium_rate=PhoneNumberDesc(national_number_pattern='6(?:[09]\\d{7}|[145]\\d{4,7})', possible_number_pattern='\\d{6,9}', example_number='611234'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='7[45]\\d{4,7}', possible_number_pattern='\\d{6,9}', example_number='741234567'),
voip=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='62\\d{6,7}', possible_number_pattern='\\d{8,9}', example_number='62123456'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
national_prefix='0',
national_prefix_for_parsing='0',
number_format=[NumberFormat(pattern='(1)(\\d{4})(\\d{3})', format=u'\\1 \\2 \\3', leading_digits_pattern=['1'], national_prefix_formatting_rule=u'0\\1'),
NumberFormat(pattern='(6[09])(\\d{4})(\\d{3})', format=u'\\1 \\2 \\3', leading_digits_pattern=['6[09]'], national_prefix_formatting_rule=u'0\\1'),
NumberFormat(pattern='(62)(\\d{3})(\\d{3,4})', format=u'\\1 \\2 \\3', leading_digits_pattern=['62'], national_prefix_formatting_rule=u'0\\1'),
NumberFormat(pattern='([2-5]\\d)(\\d{3})(\\d{3})', format=u'\\1 \\2 \\3', leading_digits_pattern=['[2-5]'], national_prefix_formatting_rule=u'0\\1'),
NumberFormat(pattern='(9\\d)(\\d{3})(\\d{3,4})', format=u'\\1 \\2 \\3', leading_digits_pattern=['9'], national_prefix_formatting_rule=u'0\\1'),
NumberFormat(pattern='(9\\d)(\\d{4})(\\d{4})', format=u'\\1 \\2 \\3', leading_digits_pattern=['9'], national_prefix_formatting_rule=u'0\\1'),
NumberFormat(pattern='(9\\d)(\\d{3,4})(\\d{3})(\\d{3})', format=u'\\1 \\2 \\3 \\4', leading_digits_pattern=['9'], national_prefix_formatting_rule=u'0\\1'),
NumberFormat(pattern='(\\d{2})(\\d{2})(\\d{2,3})', format=u'\\1 \\2 \\3', leading_digits_pattern=['6[145]|7'], national_prefix_formatting_rule=u'0\\1'),
NumberFormat(pattern='(\\d{2})(\\d{3,4})(\\d{3})', format=u'\\1 \\2 \\3', leading_digits_pattern=['6[145]|7'], national_prefix_formatting_rule=u'0\\1'),
NumberFormat(pattern='(80[01])(\\d{2})(\\d{2,3})', format=u'\\1 \\2 \\3', leading_digits_pattern=['8'], national_prefix_formatting_rule=u'0\\1'),
NumberFormat(pattern='(80[01])(\\d{3,4})(\\d{3})', format=u'\\1 \\2 \\3', leading_digits_pattern=['8'], national_prefix_formatting_rule=u'0\\1')],
mobile_number_portable_region=True)
| 113 | 171 | 0.68684 |
f7095d1db7c3f81dc0dfa863c114fdc12fc2c216 | 2,964 | py | Python | test_scripts/main_cvxpy_simple.py | forgi86/pyMPC | 291db149554767a035fcb01df3fed7a6b3fe60e4 | [
"MIT"
] | 84 | 2019-05-28T09:27:37.000Z | 2022-03-31T08:38:23.000Z | test_scripts/main_cvxpy_simple.py | passion4energy/pyMPC | 4b004ba707dab49cd36d96a3575b8593c870a904 | [
"MIT"
] | 2 | 2020-04-17T00:03:27.000Z | 2021-01-30T11:35:58.000Z | test_scripts/main_cvxpy_simple.py | passion4energy/pyMPC | 4b004ba707dab49cd36d96a3575b8593c870a904 | [
"MIT"
] | 20 | 2019-10-13T13:50:16.000Z | 2022-03-31T08:38:25.000Z | from cvxpy import Variable, Parameter, Minimize, Problem, OSQP, quad_form
import numpy as np
import scipy as sp
import scipy.sparse as sparse
import time
if __name__ == "__main__":
# Discrete time model of a quadcopter
Ts = 0.2
M = 2.0
Ad = sparse.csc_matrix([
[1.0, Ts],
[0, 1.0]
])
Bd = sparse.csc_matrix([
[0.0],
[Ts/M]])
[nx, nu] = Bd.shape # number of states and number or inputs
# Constraints
uref = 0
uinit = 0 # not used here
umin = np.array([-1000.0]) - uref
umax = np.array([1000.0]) - uref
xmin = np.array([-100.0, -100.0])
xmax = np.array([100.0, 100.0])
# Objective function
Q = sparse.diags([0.2, 0.3])
QN = sparse.diags([0.4, 0.5]) # final cost
R = 0.1*sparse.eye(1)
# Initial and reference states
x0 = np.array([0.1, 0.2]) # initial state
# Reference input and states
pref = 7.0
vref = 0
xref = np.array([pref, vref]) # reference state
# Prediction horizon
Np = 20
# Define problem
u = Variable((nu, Np))
x = Variable((nx, Np + 1))
x_init = Parameter(nx)
objective = 0
constraints = [x[:,0] == x_init]
for k in range(Np):
objective += quad_form(x[:, k] - xref, Q) + quad_form(u[:, k], R)
constraints += [x[:, k+1] == Ad*x[:, k] + Bd*u[:, k]]
constraints += [xmin <= x[:, k], x[:, k] <= xmax]
constraints += [umin <= u[:, k], u[:, k] <= umax]
objective += quad_form(x[:, Np] - xref, QN)
prob = Problem(Minimize(objective), constraints)
# Simulate in closed loop
# Simulate in closed loop
len_sim = 15 # simulation length (s)
nsim = int(len_sim/Ts) # simulation length(timesteps)
xsim = np.zeros((nsim,nx))
usim = np.zeros((nsim,nu))
tsim = np.arange(0,nsim)*Ts
uminus1_val = uinit # initial previous measured input is the input at time instant -1.
time_start = time.time()
for i in range(nsim):
x_init.value = x0
#uminus1.value = uminus1_val
prob.solve(solver=OSQP, warm_start=True)
uMPC = u[:,0].value
usim[i,:] = uMPC
x0 = Ad.dot(x0) + Bd.dot(uMPC)
xsim[i,:] = x0
uminus1_val = uMPC # or a measurement if the input is affected by noise
time_sim = time.time() - time_start
# In [1]
import matplotlib.pyplot as plt
fig,axes = plt.subplots(3,1, figsize=(10,10))
axes[0].plot(tsim, xsim[:,0], "k", label='p')
axes[0].plot(tsim, xref[0]*np.ones(np.shape(tsim)), "r--", label="pref")
axes[0].set_title("Position (m)")
axes[1].plot(tsim, xsim[:,1], label="v")
axes[1].plot(tsim, xref[1]*np.ones(np.shape(tsim)), "r--", label="vref")
axes[1].set_title("Velocity (m/s)")
axes[2].plot(tsim, usim[:,0], label="u")
axes[2].plot(tsim, uref*np.ones(np.shape(tsim)), "r--", label="uref")
axes[2].set_title("Force (N)")
for ax in axes:
ax.grid(True)
ax.legend()
| 28.5 | 90 | 0.567476 |
f70964db97d98144a8ce1f0e75a35480642ce6b9 | 1,273 | py | Python | discretisedfield/ovf2vtk.py | minrk/discretisedfield | 251584f8d976a7fafdff5402d16327489407c4dd | [
"BSD-3-Clause"
] | null | null | null | discretisedfield/ovf2vtk.py | minrk/discretisedfield | 251584f8d976a7fafdff5402d16327489407c4dd | [
"BSD-3-Clause"
] | null | null | null | discretisedfield/ovf2vtk.py | minrk/discretisedfield | 251584f8d976a7fafdff5402d16327489407c4dd | [
"BSD-3-Clause"
] | null | null | null | import argparse
import discretisedfield as df
def convert_files(input_files, output_files):
for input_file, output_file in zip(input_files, output_files):
field = df.Field.fromfile(input_file)
field.write(output_file)
def main():
parser = argparse.ArgumentParser(
prog='ovf2vtk',
description='ovf2vtk - ovf to VTK format conversion'
)
parser.add_argument('--infile', type=argparse.FileType('r'),
help='One or more input files', nargs='+',
required=True)
parser.add_argument('--outfile', type=argparse.FileType('w'), nargs='+',
help='One or more output files, optional')
args = parser.parse_args()
if args.outfile:
if len(args.infile) == len(args.outfile):
input_files = [f.name for f in args.infile]
output_files = [f.name for f in args.outfile]
else:
print('\nError: The number of input and output '
'files does not match.')
return 0
else:
input_files = [f.name for f in args.infile]
output_files = [f'{f.split(".")[0]}.vtk' for f in input_files]
convert_files(input_files, output_files)
if __name__ == "__main__":
main()
| 31.825 | 76 | 0.600157 |
f70975b2a600c129a02d2b7302e2595d0a4aa9c1 | 138 | py | Python | src/apps/cecyrd/config.py | SGC-Tlaxcala/cerebro | 6c842f66d849065a70002fccdb1eaca1e3d61d99 | [
"MIT"
] | null | null | null | src/apps/cecyrd/config.py | SGC-Tlaxcala/cerebro | 6c842f66d849065a70002fccdb1eaca1e3d61d99 | [
"MIT"
] | 48 | 2017-04-21T17:35:23.000Z | 2020-08-29T04:19:35.000Z | src/apps/cecyrd/config.py | SGC-Tlaxcala/cerebro | 6c842f66d849065a70002fccdb1eaca1e3d61d99 | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class CecyrdConfig(AppConfig):
name = 'apps.cecyrd'
verbose_name = 'Evaluación del proveedor'
| 19.714286 | 45 | 0.746377 |
f7098178cdc1fe45aac531ccf83efb684e2e1369 | 12,691 | py | Python | src/python/pants/backend/docker/target_types.py | xyzst/pants | d6a357fe67ee7e8e1aefeae625e107f5609f1717 | [
"Apache-2.0"
] | null | null | null | src/python/pants/backend/docker/target_types.py | xyzst/pants | d6a357fe67ee7e8e1aefeae625e107f5609f1717 | [
"Apache-2.0"
] | null | null | null | src/python/pants/backend/docker/target_types.py | xyzst/pants | d6a357fe67ee7e8e1aefeae625e107f5609f1717 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import os
import re
from abc import ABC, abstractmethod
from textwrap import dedent
from typing import Callable, ClassVar, Iterator, Optional, cast
from typing_extensions import final
from pants.backend.docker.registries import ALL_DEFAULT_REGISTRIES
from pants.base.build_environment import get_buildroot
from pants.core.goals.run import RestartableField
from pants.engine.addresses import Address
from pants.engine.fs import GlobMatchErrorBehavior
from pants.engine.target import (
COMMON_TARGET_FIELDS,
AsyncFieldMixin,
BoolField,
Dependencies,
DictStringToStringField,
InvalidFieldException,
OptionalSingleSourceField,
StringField,
StringSequenceField,
Target,
)
from pants.util.docutil import doc_url
# Common help text to be applied to each field that supports value interpolation.
_interpolation_help = (
"{kind} may use placeholders in curly braces to be interpolated. The placeholders are derived "
"from various sources, such as the Dockerfile instructions and build args.\n\n"
)
class DockerImageBuildArgsField(StringSequenceField):
alias = "extra_build_args"
default = ()
help = (
"Build arguments (`--build-arg`) to use when building this image. "
"Entries are either strings in the form `ARG_NAME=value` to set an explicit value; "
"or just `ARG_NAME` to copy the value from Pants's own environment.\n\n"
"Use `[docker].build_args` to set default build args for all images."
)
class DockerImageContextRootField(StringField):
alias = "context_root"
help = (
"Specify which directory to use as the Docker build context root. This affects the file "
"paths to use for the `COPY` and `ADD` instructions. For example, whether "
"`COPY files/f.txt` should look for the file relative to the build root: "
"`<build root>/files/f.txt` vs relative to the BUILD file: "
"`<build root>/path_to_build_file/files/f.txt`.\n\n"
"Specify the `context_root` path as `files` for relative to build root, or as `./files` "
"for relative to the BUILD file.\n\n"
"If `context_root` is not specified, it defaults to `[docker].default_context_root`."
)
@classmethod
def compute_value(cls, raw_value: Optional[str], address: Address) -> Optional[str]:
value_or_default = super().compute_value(raw_value, address=address)
if isinstance(value_or_default, str) and value_or_default.startswith("/"):
val = value_or_default.strip("/")
raise InvalidFieldException(
f"The `{cls.alias}` field in target {address} must be a relative path, but was "
f"{value_or_default!r}. Use {val!r} for a path relative to the build root, or "
f"{'./' + val!r} for a path relative to the BUILD file (i.e. {os.path.join(address.spec_path, val)!r})."
)
return value_or_default
class DockerImageSourceField(OptionalSingleSourceField):
default = "Dockerfile"
# When the default glob value is in effect, we don't want the normal glob match error behavior
# to kick in for a missing Dockerfile, in case there are `instructions` provided, in which case
# we generate the Dockerfile instead. If there are no `instructions`, or there are both
# `instructions` and a Dockerfile hydrated from the `source` glob, we error out with a message
# to the user.
default_glob_match_error_behavior = GlobMatchErrorBehavior.ignore
help = (
"The Dockerfile to use when building the Docker image.\n\n"
"Use the `instructions` field instead if you prefer not having the Dockerfile in your "
"source tree."
)
class DockerImageInstructionsField(StringSequenceField):
alias = "instructions"
required = False
help = (
"The `Dockerfile` content, typically one instruction per list item.\n\n"
"Use the `source` field instead if you prefer having the Dockerfile in your source tree."
"\n\n"
+ dedent(
"""\
Example:
# example/BUILD
docker_image(
instructions=[
"FROM base/image:1.0",
"RUN echo example",
],
)
"""
)
)
class DockerImageTagsField(StringSequenceField):
alias = "image_tags"
default = ("latest",)
help = (
"Any tags to apply to the Docker image name (the version is usually applied as a tag).\n\n"
+ _interpolation_help.format(kind="tag")
+ f"See {doc_url('tagging-docker-images')}."
)
class DockerImageTargetStageField(StringField):
alias = "target_stage"
help = (
"Specify target build stage, rather than building the entire `Dockerfile`.\n\n"
"When using multi-stage build, you may name your stages, and can target them when building "
"to only selectively build a certain stage. See also the `--docker-build-target-stage` "
"option.\n\n"
"Read more about [multi-stage Docker builds]"
"(https://docs.docker.com/develop/develop-images/multistage-build/#stop-at-a-specific-build-stage)"
)
class DockerImageDependenciesField(Dependencies):
supports_transitive_excludes = True
class DockerImageRegistriesField(StringSequenceField):
alias = "registries"
default = (ALL_DEFAULT_REGISTRIES,)
help = (
"List of addresses or configured aliases to any Docker registries to use for the "
"built image.\n\n"
"The address is a domain name with optional port for your registry, and any registry "
"aliases are prefixed with `@` for addresses in the [docker].registries configuration "
"section.\n\n"
"By default, all configured registries with `default = true` are used.\n\n"
+ dedent(
"""\
Example:
# pants.toml
[docker.registries.my-registry-alias]
address = "myregistrydomain:port"
default = false # optional
# example/BUILD
docker_image(
registries = [
"@my-registry-alias",
"myregistrydomain:port",
],
)
"""
)
+ (
"The above example shows two valid `registry` options: using an alias to a configured "
"registry and the address to a registry verbatim in the BUILD file."
)
)
class DockerImageRepositoryField(StringField):
alias = "repository"
help = (
'The repository name for the Docker image. e.g. "<repository>/<name>".\n\n'
"It uses the `[docker].default_repository` by default.\n\n"
+ _interpolation_help.format(kind="repository")
+ "Additional placeholders for the repository field are: `name`, `directory` and "
"`parent_directory`.\n\nSee the documentation for `[docker].default_repository` for more "
"information."
)
class DockerImageSkipPushField(BoolField):
alias = "skip_push"
default = False
help = "If set to true, do not push this image to registries when running `./pants publish`."
OptionValueFormatter = Callable[[str], str]
class DockerBuildOptionFieldMixin(ABC):
"""Inherit this mixin class to provide options to `docker build`."""
docker_build_option: ClassVar[str]
@abstractmethod
def option_values(self, *, value_formatter: OptionValueFormatter) -> Iterator[str]:
"""Subclasses must implement this, to turn their `self.value` into none, one or more option
values."""
@final
def options(self, value_formatter: OptionValueFormatter) -> Iterator[str]:
for value in self.option_values(value_formatter=value_formatter):
yield from (self.docker_build_option, value)
class DockerImageBuildImageLabelsOptionField(DockerBuildOptionFieldMixin, DictStringToStringField):
alias = "image_labels"
help = (
"Provide image metadata.\n\n"
+ _interpolation_help.format(kind="label value")
+ "See [Docker labels](https://docs.docker.com/config/labels-custom-metadata/"
"#manage-labels-on-objects) for more information."
)
docker_build_option = "--label"
def option_values(self, value_formatter: OptionValueFormatter) -> Iterator[str]:
for label, value in (self.value or {}).items():
yield f"{label}={value_formatter(value)}"
class DockerImageBuildSecretsOptionField(
AsyncFieldMixin, DockerBuildOptionFieldMixin, DictStringToStringField
):
alias = "secrets"
help = (
"Secret files to expose to the build (only if BuildKit enabled).\n\n"
"Secrets may use absolute paths, or paths relative to your build root, or the BUILD file "
"if prefixed with `./`. The id should be valid as used by the Docker build `--secret` "
"option. See [Docker secrets](https://docs.docker.com/engine/swarm/secrets/) for more "
"information.\n\n"
+ dedent(
"""\
Example:
docker_image(
secrets={
"mysecret": "/var/secrets/some-secret",
"repo-secret": "src/proj/secrets/some-secret",
"target-secret": "./secrets/some-secret",
}
)
"""
)
)
docker_build_option = "--secret"
def option_values(self, **kwargs) -> Iterator[str]:
# os.path.join() discards preceding parts if encountering an abs path, e.g. if the secret
# `path` is an absolute path, the `buildroot` and `spec_path` will not be considered. Also,
# an empty path part is ignored.
for secret, path in (self.value or {}).items():
full_path = os.path.join(
get_buildroot(),
self.address.spec_path if re.match(r"\.{1,2}/", path) else "",
path,
)
yield f"id={secret},src={os.path.normpath(full_path)}"
class DockerImageBuildSSHOptionField(DockerBuildOptionFieldMixin, StringSequenceField):
alias = "ssh"
default = ()
help = (
"SSH agent socket or keys to expose to the build (only if BuildKit enabled) "
"(format: default|<id>[=<socket>|<key>[,<key>]])\n\n"
"The exposed agent and/or keys can then be used in your `Dockerfile` by mounting them in "
"your `RUN` instructions:\n\n"
" RUN --mount=type=ssh ...\n\n"
"See [Docker documentation](https://docs.docker.com/develop/develop-images"
"/build_enhancements/#using-ssh-to-access-private-data-in-builds) for more information."
)
docker_build_option = "--ssh"
def option_values(self, **kwargs) -> Iterator[str]:
yield from cast("tuple[str]", self.value)
class DockerImageTarget(Target):
alias = "docker_image"
core_fields = (
*COMMON_TARGET_FIELDS,
DockerImageBuildArgsField,
DockerImageDependenciesField,
DockerImageSourceField,
DockerImageInstructionsField,
DockerImageContextRootField,
DockerImageTagsField,
DockerImageRegistriesField,
DockerImageRepositoryField,
DockerImageBuildImageLabelsOptionField,
DockerImageBuildSecretsOptionField,
DockerImageBuildSSHOptionField,
DockerImageSkipPushField,
DockerImageTargetStageField,
RestartableField,
)
help = (
"The `docker_image` target describes how to build and tag a Docker image.\n\n"
"Any dependencies, as inferred or explicitly specified, will be included in the Docker "
"build context, after being packaged if applicable.\n\n"
"By default, will use a Dockerfile from the same directory as the BUILD file this target "
"is defined in. Point at another file with the `source` field, or use the `instructions` "
"field to have the Dockerfile contents verbatim directly in the BUILD file.\n\n"
"Dependencies on upstream/base images defined by another `docker_image` are inferred if "
"referenced by a build argument with a default value of the target address.\n\n"
+ dedent(
"""\
Example:
# src/docker/downstream/Dockerfile
ARG BASE=src/docker/upstream:image
FROM $BASE
...
"""
)
)
| 38.574468 | 120 | 0.643685 |
f709a9ab548efdde5cce699085047d8dc56830d2 | 7,366 | py | Python | pyrseas/dbobject/column.py | andreypopp/Pyrseas | 5fadc91bfd1e3e430e8f53d434df18b9abea3cb0 | [
"BSD-3-Clause"
] | 1 | 2015-03-16T09:10:47.000Z | 2015-03-16T09:10:47.000Z | pyrseas/dbobject/column.py | andreypopp/Pyrseas | 5fadc91bfd1e3e430e8f53d434df18b9abea3cb0 | [
"BSD-3-Clause"
] | null | null | null | pyrseas/dbobject/column.py | andreypopp/Pyrseas | 5fadc91bfd1e3e430e8f53d434df18b9abea3cb0 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
pyrseas.column
~~~~~~~~~~~~~~
This module defines two classes: Column derived from
DbSchemaObject and ColumnDict derived from DbObjectDict.
"""
from pyrseas.dbobject import DbObjectDict, DbSchemaObject, quote_id
class Column(DbSchemaObject):
"A table column definition"
keylist = ['schema', 'table']
def to_map(self):
"""Convert a column to a YAML-suitable format
:return: dictionary
"""
if hasattr(self, 'dropped'):
return None
dct = self._base_map()
del dct['number'], dct['name'], dct['_table']
if hasattr(self, 'inherited'):
dct['inherited'] = (self.inherited != 0)
return {self.name: dct}
def add(self):
"""Return a string to specify the column in a CREATE or ALTER TABLE
:return: partial SQL statement
"""
stmt = "%s %s" % (quote_id(self.name), self.type)
if hasattr(self, 'not_null'):
stmt += ' NOT NULL'
if hasattr(self, 'default'):
if not self.default.startswith('nextval'):
stmt += ' DEFAULT ' + self.default
return (stmt, '' if not hasattr(self, 'description')
else self.comment())
def comment(self):
"""Return a SQL COMMENT statement for the column
:return: SQL statement
"""
return "COMMENT ON COLUMN %s.%s IS %s" % (
self._table.qualname(), self.name, self._comment_text())
def drop(self):
"""Return string to drop the column via ALTER TABLE
:return: SQL statement
"""
if hasattr(self, 'dropped'):
return ""
if hasattr(self, '_table'):
(comptype, objtype) = (self._table.objtype, 'COLUMN')
compname = self._table.qualname()
else:
# TODO: this is only a PG 9.1 feature, so more is required
(comptype, objtype) = ('TYPE', 'ATTRIBUTE')
compname = self.table
return "ALTER %s %s DROP %s %s" % (comptype, compname, objtype,
self.name)
def rename(self, newname):
"""Return SQL statement to RENAME the column
:param newname: the new name of the object
:return: SQL statement
"""
stmt = "ALTER TABLE %s RENAME COLUMN %s TO %s" % (
self._table.qualname(), self.name, newname)
self.name = newname
return stmt
def set_sequence_default(self):
"""Return SQL statements to set a nextval() DEFAULT
:return: list of SQL statements
"""
stmts = []
pth = self.set_search_path()
if pth:
stmts.append(pth)
stmts.append("ALTER TABLE %s ALTER COLUMN %s SET DEFAULT %s" % (
quote_id(self.table), quote_id(self.name), self.default))
return stmts
def diff_map(self, incol):
"""Generate SQL to transform an existing column
:param insequence: a YAML map defining the new column
:return: list of partial SQL statements
Compares the column to an input column and generates partial
SQL statements to transform it into the one represented by the
input.
"""
stmts = []
base = "ALTER COLUMN %s " % self.name
# check NOT NULL
if not hasattr(self, 'not_null') and hasattr(incol, 'not_null'):
stmts.append(base + "SET NOT NULL")
if hasattr(self, 'not_null') and not hasattr(incol, 'not_null'):
stmts.append(base + "DROP NOT NULL")
# check data types
if not hasattr(self, 'type'):
raise ValueError("Column '%s' missing datatype" % self.name)
if not hasattr(incol, 'type'):
raise ValueError("Input column '%s' missing datatype" % incol.name)
if self.type != incol.type:
# validate type conversion?
stmts.append(base + "TYPE %s" % incol.type)
# check DEFAULTs
if not hasattr(self, 'default') and hasattr(incol, 'default'):
stmts.append(base + "SET DEFAULT %s" % incol.default)
if hasattr(self, 'default') and not hasattr(incol, 'default'):
stmts.append(base + "DROP DEFAULT")
return (", ".join(stmts), self.diff_description(incol))
class ColumnDict(DbObjectDict):
"The collection of columns in tables in a database"
cls = Column
query = \
"""SELECT nspname AS schema, relname AS table, attname AS name,
attnum AS number, format_type(atttypid, atttypmod) AS type,
attnotnull AS not_null, attinhcount AS inherited,
pg_get_expr(adbin, adrelid) AS default,
attisdropped AS dropped,
col_description(c.oid, attnum) AS description
FROM pg_attribute JOIN pg_class c ON (attrelid = c.oid)
JOIN pg_namespace ON (relnamespace = pg_namespace.oid)
LEFT JOIN pg_attrdef ON (attrelid = pg_attrdef.adrelid
AND attnum = pg_attrdef.adnum)
WHERE relkind in ('c', 'r', 'f')
AND (nspname != 'pg_catalog'
AND nspname != 'information_schema')
AND attnum > 0
ORDER BY nspname, relname, attnum"""
def _from_catalog(self):
"""Initialize the dictionary of columns by querying the catalogs"""
for col in self.fetch():
sch, tbl = col.key()
if (sch, tbl) not in self:
self[(sch, tbl)] = []
self[(sch, tbl)].append(col)
def from_map(self, table, incols):
"""Initialize the dictionary of columns by converting the input list
:param table: table or type owning the columns/attributes
:param incols: YAML list defining the columns
"""
if not incols:
raise ValueError("Table '%s' has no columns" % table.name)
cols = self[(table.schema, table.name)] = []
for col in incols:
for key in list(col.keys()):
if isinstance(col[key], dict):
arg = col[key]
else:
arg = {'type': col[key]}
cols.append(Column(schema=table.schema, table=table.name,
name=key, **arg))
def diff_map(self, incols):
"""Generate SQL to transform existing columns
:param incols: a YAML map defining the new columns
:return: list of SQL statements
Compares the existing column definitions, as fetched from the
catalogs, to the input map and generates SQL statements to
transform the columns accordingly.
This takes care of dropping columns that are not present in
the input map. It's separate so that it can be done last,
after other table, constraint and index changes.
"""
stmts = []
if not incols or not self:
return stmts
for (sch, tbl) in list(incols.keys()):
if (sch, tbl) in list(self.keys()):
for col in self[(sch, tbl)]:
if col.name not in [c.name for c in incols[(sch, tbl)]] \
and not hasattr(col, 'dropped'):
stmts.append(col.drop())
return stmts
| 36.83 | 79 | 0.562449 |
f709b83b7dd0d4dc8f2ed9be76428c1683165b7d | 1,895 | py | Python | luafun/game/config.py | Delaunay/LuaFun | bd0efd8fc2b064d6bf58993e59a6ad4ac6713b39 | [
"BSD-3-Clause"
] | 1 | 2021-02-06T06:42:29.000Z | 2021-02-06T06:42:29.000Z | luafun/game/config.py | Delaunay/LuaFun | bd0efd8fc2b064d6bf58993e59a6ad4ac6713b39 | [
"BSD-3-Clause"
] | 6 | 2021-04-08T21:46:06.000Z | 2021-05-09T01:40:04.000Z | luafun/game/config.py | Delaunay/LuaFun | bd0efd8fc2b064d6bf58993e59a6ad4ac6713b39 | [
"BSD-3-Clause"
] | null | null | null | import os
EXECUTABLE_PATH_WINDOWS = '/game/bin/win64/dota2.exe'
EXECUTABLE_PATH_LINUX = '/game/dota.sh'
EXECUTABLE_PATH_LINUX = '/game/bin/linuxsteamrt64/dota2'
BOT_PATH = '/game/dota/scripts/vscripts/bots/'
CONSOLE_LOG = '/game/dota/scripts/vscripts/bots/console.log'
SEND_MSG = '/game/dota/scripts/vscripts/bots/IPC_recv.lua'
CONFIG_MSG = '/game/dota/scripts/vscripts/bots/IPC_config.lua'
LINUX_APP_PATH = "~/Steam/steamapps/common/dota 2 beta"
OSX_APP_PATH = "~/Library/Application Support/Steam/SteamApps/common/dota 2 beta"
WINDOWS_APP_PATH = "C:/Program Files (x86)/Steam/steamapps/common/dota 2 beta"
# <steam path>/ubuntu12_32/steam-runtime/run.sh
class DotaPaths:
"""Class to hold system specific configuration"""
def __init__(self, path=None):
if path is None:
path = self.guess()
self.path = path
def guess(self):
from sys import platform
if platform == "linux" or platform == "linux2":
return os.path.expanduser(LINUX_APP_PATH)
elif platform == "darwin":
return os.path.expanduser(OSX_APP_PATH)
return WINDOWS_APP_PATH
@property
def executable_path(self):
from sys import platform
if platform == "linux" or platform == "linux2":
return self.path + '/' + EXECUTABLE_PATH_LINUX
return self.path + '/' + EXECUTABLE_PATH_WINDOWS
@property
def ipc_recv_handle(self):
return self.path + '/' + CONSOLE_LOG
@property
def console_log(self):
return self.ipc_recv_handle
@property
def ipc_send_handle(self):
return self.path + '/' + SEND_MSG
@property
def ipc_config_handle(self):
return self.path + '/' + CONFIG_MSG
def bot_file(self, filename):
"""Return a file path that is located in the bot folder"""
return self.path + '/' + BOT_PATH + filename
| 28.712121 | 81 | 0.667018 |
f709b9bf5ba6566862a18830554448f31ea2f564 | 20,398 | py | Python | hs_labels/models.py | hydroshare/hydroshare | bf9888bbe61507aff070b1dfcec2fdec1921468d | [
"BSD-3-Clause"
] | 178 | 2015-01-08T23:03:36.000Z | 2022-03-03T13:56:45.000Z | hs_labels/models.py | hydroshare/hydroshare | bf9888bbe61507aff070b1dfcec2fdec1921468d | [
"BSD-3-Clause"
] | 4,125 | 2015-01-01T14:26:15.000Z | 2022-03-31T16:38:55.000Z | hs_labels/models.py | hydroshare/hydroshare | bf9888bbe61507aff070b1dfcec2fdec1921468d | [
"BSD-3-Clause"
] | 53 | 2015-03-15T17:56:51.000Z | 2022-03-17T00:32:16.000Z | """
This model supports user labeling of resources in various ways.
For a User u, this instantiates a subobject u.ulabels (like u.uaccess)
that contains all the labeling functions.
Functions include:
* u.ulabels.label_resource(r, label)
instantiates a label for a resource. Resources can have multiple labels.
* u.ulabels.unlabel_resource(r, label)
removes a label; there can be many labels.
* u.ulabels.clear_resource_labels(r)
removes all labels for a resource
* u.ulabels.favorite_resource(r)
favorites a resource
* u.ulabels.unfavorite_resource(r)
removes a favorite
and the reporting functions
* u.ulabels.labeled_resources
A queryset of resources that are labeled.
* u.ulabels.favorited_resources
A queryset of resources that have been favorited
* u.ulabels.get_resources_with_label(label)
Get a queryset of resources possessing a specific label.
For a BaseResource r, this also adds a subobject rlabels that reports on labels for resources
* r.rlabels.get_labels(u)
* r.rlabels.is_favorite(u)
* r.rlabels.is_mine(u)
"""
# TODO: combine label filtering with access control
import re
from django.contrib.auth.models import User
from django.db import models
from django.db import transaction
from django.db.models import Q
from hs_core.models import BaseResource
class FlagCodes(object):
"""
Flag codes describe the meanings of per-user flags for a resource.
* 1 or FlagCodes.FAVORITE:
marked as a favorite on "My Resources" page
* 2 or FlagCodes.MINE:
marked as being part of "My Resources" on "Discover" page.
"""
FAVORITE = 1
MINE = 2
OPEN_WITH_APP = 3
FLAG_CHOICES = (
(FAVORITE, 'Favorite'), # marked as favorite in my resources page.
(MINE, 'Mine'), # marked as mine in discovery page.
(OPEN_WITH_APP, 'Open With App'), # marked as a open_with app
)
class UserResourceLabels(models.Model):
"""
Labels of a user for a resource
This model stores labels of an individual user, like an access control list. T
"""
start = models.DateTimeField(editable=False, auto_now=True)
user = models.ForeignKey(User, null=False, editable=False,
related_name='u2url', # unused but must be defined and unique
help_text='user assigning a label',
on_delete=models.CASCADE)
resource = models.ForeignKey(BaseResource, null=False, editable=False,
related_name='r2url', # unused but must be defined and unique
help_text='resource to which a label applies',
on_delete=models.CASCADE)
label = models.TextField(null=False)
class Meta:
unique_together = ('user', 'resource', 'label')
class UserResourceFlags(models.Model):
"""
Per-user flagging of resources.
This model stores labels of an individual user, like an access
control list; There are several kinds of labels documented in FlagCodes.
These are similar in implementation but differ in semantics.
"""
kind = models.IntegerField(choices=FlagCodes.FLAG_CHOICES,
editable=False,
default=FlagCodes.FAVORITE)
start = models.DateTimeField(editable=False, auto_now=True)
user = models.ForeignKey(User, null=False, editable=False,
related_name='u2urf', # unused but must be defined and unique
help_text='user assigning a flag',
on_delete=models.CASCADE)
resource = models.ForeignKey(BaseResource, null=False, editable=False,
related_name="r2urf", # unused but must be defined and unique
help_text='resource to which a flag applies',
on_delete=models.CASCADE)
class Meta:
unique_together = ('user', 'resource', 'kind')
class UserStoredLabels(models.Model):
"""
Storage class for persistent labels that are reusable across different kinds of objects
"""
user = models.ForeignKey(User, null=False,
help_text='user who stored the label',
related_name='ul2usl',
on_delete=models.CASCADE)
label = models.TextField(help_text='label to be stored by user')
class Meta:
unique_together = ('user', 'label')
class UserLabels(models.Model):
"""
Projection class puts methods and content inside basic User object
so that one can access things easily from that context.
This model is injected into the BaseResource as the related name "user".
Thus for an User u, u.user is this model.
"""
user = models.OneToOneField(User,
editable=False,
null=True,
related_name='ulabels', # induced field in User class.
related_query_name='ulabels',
on_delete=models.CASCADE)
##########################################
# PUBLIC FUNCTIONS: resources
##########################################
@property
def labeled_resources(self):
"""
Get a QuerySet of resources labeled by a user.
This eliminates duplicates.
"""
return BaseResource.objects.filter(r2url__user=self.user).distinct()
def get_flagged_resources(self, this_flagcode):
"""
Get resources with a specific flag.
"""
if __debug__: # during testing only, check argument types and preconditions
assert this_flagcode == FlagCodes.FAVORITE or this_flagcode == FlagCodes.MINE or \
this_flagcode == FlagCodes.OPEN_WITH_APP
return BaseResource.objects.filter(r2urf__user=self.user,
r2urf__kind=this_flagcode)
@property
def favorited_resources(self):
"""
Get a QuerySet of resources favorited by a user.
This eliminates duplicates.
"""
return self.get_flagged_resources(FlagCodes.FAVORITE)
@property
def my_resources(self):
"""
Get a QuerySet of resources marked as mine (add to my resources) by a user.
This eliminates duplicates.
"""
return self.get_flagged_resources(FlagCodes.MINE)
@property
def resources_of_interest(self):
"""
Get a QuerySet of resources the user has tagged in any way.
"""
return BaseResource.objects.filter(Q(r2url__user=self.user) | Q(r2urf__user=self.user)).distinct()
def get_resources_with_label(self, this_label):
"""
Get a QuerySet of resources with a specific label.
"""
if __debug__: # during testing only, check argument types and preconditions
assert isinstance(this_label, str)
label_string = UserLabels.clean_label(this_label) # remove leading and trailing spaces
return BaseResource.objects.filter(r2url__user=self.user,
r2url__label__exact=label_string)\
.distinct()\
.order_by('r2url__label')
@property
def user_labels(self):
"""
Get a QuerySet of labels in use now.
"""
return UserResourceLabels.objects.values_list('label', flat=True)\
.filter(user=self.user)\
.distinct().order_by('label')
######################################
# Label a resource
######################################
@staticmethod
def clean_label(name):
label_string = re.sub('/', r'', name) # no /'s
label_string = label_string.strip() # no leading or trailing whitespace
label_string = re.sub(r'\s+', r' ', label_string) # collapse multiple whitespace, including tabs
return label_string
def label_resource(self, this_resource, this_label):
"""
Assign a label to a resource
Users are allowed to label any resource, including resources to which they do not have access.
This is not an access control problem because labeling information is private.
"""
if __debug__: # during testing only, check argument types and preconditions
assert isinstance(this_resource, BaseResource)
assert isinstance(this_label, str)
# remove leading and trailing spaces
label_string = UserLabels.clean_label(this_label)
with transaction.atomic(): # empirically, get_or_create is not atomic.
UserResourceLabels.objects.get_or_create(resource=this_resource,
label=label_string,
user=self.user)
def unlabel_resource(self, this_resource, this_label):
"""
Remove one label from a resource
Users are allowed to label any resource, including resources to which they do not have access.
This is not an access control problem because labeling information is private.
"""
if __debug__: # during testing only, check argument types and preconditions
assert isinstance(this_resource, BaseResource)
assert isinstance(this_label, str)
# remove leading and trailing spaces
label_string = UserLabels.clean_label(this_label)
UserResourceLabels.objects.filter(resource=this_resource,
label__exact=label_string,
user=self.user).delete()
def clear_resource_labels(self, this_resource):
"""
Clear all labels for a resource
"""
if __debug__: # during testing only, check argument types and preconditions
assert isinstance(this_resource, BaseResource)
UserResourceLabels.objects.filter(resource=this_resource,
user=self.user).delete()
def remove_resource_label(self, this_label):
"""
clear a label from the labeling system.
"""
if __debug__: # during testing only, check argument types and preconditions
assert isinstance(this_label, str)
UserResourceLabels.objects.filter(label=this_label, user=self.user)\
.delete()
##########################################
# general flagging of resources
##########################################
def flag_resource(self, this_resource, this_flagcode):
"""
flag a resource with a specific flag code from FlagCodes
Users are allowed to flag any resource, including resources to which they do not have access.
This is not an access control problem because flagging information is private.
"""
if __debug__: # during testing only, check argument types and preconditions
assert isinstance(this_resource, BaseResource)
assert this_flagcode == FlagCodes.FAVORITE or this_flagcode == FlagCodes.MINE or \
this_flagcode == FlagCodes.OPEN_WITH_APP
with transaction.atomic(): # empirically, get_or_create is not atomic.
UserResourceFlags.objects.get_or_create(resource=this_resource,
kind=this_flagcode,
user=self.user)
def unflag_resource(self, this_resource, this_flagcode):
"""
unflag a resource with a specific flag.
Users are allowed to flag any resource, including resources to which they do not have access.
This is not an access control problem because flagging information is private.
"""
if __debug__: # during testing only, check argument types and preconditions
assert isinstance(this_resource, BaseResource)
assert this_flagcode == FlagCodes.FAVORITE or this_flagcode == FlagCodes.MINE or \
this_flagcode == FlagCodes.OPEN_WITH_APP
UserResourceFlags.objects.filter(user=self.user,
resource=this_resource,
kind=this_flagcode).delete()
def clear_all_flags(self, this_flagcode):
"""
remove all flags of a specific kind for a user
"""
UserResourceFlags.objects.filter(user=self.user,
kind=this_flagcode)\
.delete()
##########################################
# favorite resources
##########################################
def favorite_resource(self, this_resource):
"""
Mark a resource as favorite.
Users are allowed to flag any resource, including resources to which they do not have access.
This is not an access control problem because labeling information is private.
"""
self.flag_resource(this_resource, FlagCodes.FAVORITE)
def unfavorite_resource(self, this_resource):
"""
Clear favorite label for a resource
Users are allowed to flag any resource, including resources to which they do not have access.
This is not an access control problem because labeling information is private.
"""
self.unflag_resource(this_resource, FlagCodes.FAVORITE)
##########################################
# my resources
##########################################
def claim_resource(self, this_resource):
"""
Label a resource as 'MINE' (adds to my resources).
Users are allowed to flag any resource, including resources to which they do not have access.
This is not an access control problem because labeling information is private.
"""
self.flag_resource(this_resource, FlagCodes.MINE)
def unclaim_resource(self, this_resource):
"""
Clear 'MINE' label for a resource (removes from my resources)
Users are allowed to flag any resource, including resources to which they do not have access.
This is not an access control problem because labeling information is private.
"""
self.unflag_resource(this_resource, FlagCodes.MINE)
##########################################
# open with app
##########################################
def add_open_with_app(self, this_resource):
"""
Mark a webapp resource as open-with-app
Users are allowed to flag any resource, including resources to which they do not have access.
This is not an access control problem because labeling information is private.
The calling function should make sure resource is a webapp resource
"""
self.flag_resource(this_resource, FlagCodes.OPEN_WITH_APP)
def remove_open_with_app(self, this_resource):
"""
Unmark a webapp resource as open-with-app
Users are allowed to flag any resource, including resources to which they do not have access.
This is not an access control problem because labeling information is private.
The calling function should make sure resource is a webapp resource
"""
self.unflag_resource(this_resource, FlagCodes.OPEN_WITH_APP)
##########################################
# routines that apply to all kinds of annotations
##########################################
def clear_resource_all(self, this_resource):
"""
Clear all annotations for a resource
"""
if __debug__: # during testing only, check argument types and preconditions
assert isinstance(this_resource, BaseResource)
UserResourceLabels.objects\
.filter(resource=this_resource,
user=self.user)\
.delete()
UserResourceFlags.objects\
.filter(resource=this_resource,
user=self.user)\
.delete()
##########################################
# save unused labels
##########################################
def save_label(self, this_label):
"""
Save a label for use later.
Users are allowed to label any resource, including resources to which they do not have access.
This is not an access control problem because labeling information is private.
"""
label_string = UserLabels.clean_label(this_label) # remove leading and trailing spaces
with transaction.atomic(): # empirically, get_or_create is not atomic.
UserStoredLabels.objects.get_or_create(label=label_string, user=self.user)
def unsave_label(self, this_label):
"""
Remove the specified saved label.
"""
# remove leading and trailing spaces
label_string = UserLabels.clean_label(this_label)
UserStoredLabels.objects.filter(label__exact=label_string, user=self.user).delete()
# remove all uses of that label from resources.
self.remove_resource_label(label_string)
def clear_saved_labels(self):
"""
Clear all saved labels for a user
"""
UserStoredLabels.objects.filter(user=self.user).delete()
@property
def saved_labels(self):
"""
Return a QuerySet of saved labels.
"""
return UserStoredLabels.objects.filter(user=self.user).values_list('label', flat=True).distinct()
class ResourceLabels(models.Model):
"""
For a BaseResource r, r.rlabels is this model. It contains functions relevant to resources.
"""
resource = models.OneToOneField(BaseResource,
editable=False,
null=True,
related_name='rlabels',
related_query_name='rlabels',
on_delete=models.CASCADE)
def get_users(self):
"""
Return a QuerySet of all users who have labeled this resource.
"""
return User.objects.filter(Q(u2url__resource=self.resource) | Q(u2urf__resource=self.resource))
def get_labels(self, this_user):
"""
Return a QuerySet of all user assigned labels for a resource
"""
if __debug__: # during testing only, check argument types and preconditions
assert isinstance(this_user, User)
labels = UserResourceLabels.objects\
.values_list('label', flat=True)\
.filter(user=this_user,
resource=self.resource)\
.order_by("label").all()
return labels
def is_flagged(self, this_user, this_flagcode):
"""
Return True if this resource has been flagged by a given user
"""
if __debug__: # during testing only, check argument types and preconditions
assert isinstance(this_user, User)
assert this_flagcode == FlagCodes.FAVORITE or this_flagcode == FlagCodes.MINE or \
this_flagcode == FlagCodes.OPEN_WITH_APP
return UserResourceFlags.objects.filter(user=this_user,
resource=self.resource,
kind=this_flagcode).exists()
def is_favorite(self, this_user):
"""
Return True if this resource has been favorited by a given user
"""
return self.is_flagged(this_user, FlagCodes.FAVORITE)
def is_mine(self, this_user):
"""
Return True if this resource has been labeled as mine by a given user
"""
return self.is_flagged(this_user, FlagCodes.MINE)
def is_open_with_app(self, this_user):
"""
Return True if this resource has been set as open-with-app by a given user
"""
return self.is_flagged(this_user, FlagCodes.OPEN_WITH_APP)
| 38.779468 | 110 | 0.595402 |
f709d7d9a6c3f3c9af884ebe9c62a536e3dfa929 | 597 | py | Python | verification_rules/common/__init__.py | adrianmkng/watchmen | 4be15ad64a5d54d4f546ca8c139fa41fd42dd6aa | [
"Apache-2.0"
] | 190 | 2017-12-13T05:01:42.000Z | 2021-11-15T23:35:54.000Z | verification_rules/common/__init__.py | adrianmkng/watchmen | 4be15ad64a5d54d4f546ca8c139fa41fd42dd6aa | [
"Apache-2.0"
] | 2 | 2018-08-31T04:53:03.000Z | 2018-11-14T00:13:49.000Z | verification_rules/common/__init__.py | adrianmkng/watchmen | 4be15ad64a5d54d4f546ca8c139fa41fd42dd6aa | [
"Apache-2.0"
] | 22 | 2017-12-13T04:36:46.000Z | 2021-07-29T07:37:41.000Z | # Copyright 2017 Insurance Australia Group Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| 39.8 | 74 | 0.768844 |
f709dac114e4abf7ce428aa013816678149fbf10 | 19 | py | Python | btd6_memory_info/generated/SteamNative/Platform/Linux32/linux32.py | 56kyle/bloons_auto | 419d55b51d1cddc49099593970adf1c67985b389 | [
"MIT"
] | null | null | null | btd6_memory_info/generated/SteamNative/Platform/Linux32/linux32.py | 56kyle/bloons_auto | 419d55b51d1cddc49099593970adf1c67985b389 | [
"MIT"
] | null | null | null | btd6_memory_info/generated/SteamNative/Platform/Linux32/linux32.py | 56kyle/bloons_auto | 419d55b51d1cddc49099593970adf1c67985b389 | [
"MIT"
] | null | null | null | class Linux32: pass | 19 | 19 | 0.842105 |
f709db49836f0a337d880632a4ab2b1e17e82610 | 1,896 | py | Python | hooks/hook-pygame.py | tappi287/rf2_video_settings | 6ae73c63f48e6d515a9efb653f236dea0494d9f1 | [
"MIT"
] | 8 | 2020-12-09T17:34:40.000Z | 2022-02-21T10:15:09.000Z | hooks/hook-pygame.py | tappi287/rf2_video_settings | 6ae73c63f48e6d515a9efb653f236dea0494d9f1 | [
"MIT"
] | 11 | 2021-02-27T00:21:47.000Z | 2022-02-25T14:41:56.000Z | hooks/hook-pygame.py | tappi287/rf2_video_settings | 6ae73c63f48e6d515a9efb653f236dea0494d9f1 | [
"MIT"
] | 2 | 2021-06-28T21:11:53.000Z | 2022-02-06T17:20:18.000Z | """
binaries hook for pygame seems to be required for pygame 2.0 Windows.
Otherwise some essential DLLs will not be transfered to the exe.
And also put hooks for datas, resources that pygame uses, to work
correctly with pyinstaller
"""
import os
import platform
from pygame import __file__ as pygame_main_file
# Get pygame's folder
pygame_folder = os.path.dirname(os.path.abspath(pygame_main_file))
# datas is the variable that pyinstaller looks for while processing hooks
datas = []
# exclude some unneeded binaries
exclude_bin = ('libFLAC-8', 'libfreetype-6', 'libjpeg-9', 'libmodplug-1', 'libmpg123-0', 'libogg-0', 'libopus-0',
'libopusfile-0', 'libpng16-16', 'libtiff-5', 'libvorbis-0', 'libvorbisfile-3', 'libwebp-7', 'portmidi',
'SDL2_image', 'SDL2_mixer', 'SDL2_ttf')
# A helper to append the relative path of a resource to hook variable - datas
def _append_to_datas(file_path):
global datas
res_path = os.path.join(pygame_folder, file_path)
if os.path.exists(res_path):
datas.append((res_path, "pygame"))
# First append the font file, then based on the OS, append pygame icon file
_append_to_datas("freesansbold.ttf")
if platform.system() == "Darwin":
_append_to_datas("pygame_icon.tiff")
else:
_append_to_datas("pygame_icon.bmp")
if platform.system() == "Windows":
from PyInstaller.utils.hooks import collect_dynamic_libs
pre_binaries = collect_dynamic_libs('pygame')
binaries = []
for b in pre_binaries:
binary, location = b
filename = os.path.split(binary)[-1]
if filename.removesuffix('.dll') in exclude_bin:
print('Custom pygame hook excluding binary:', filename)
continue
# settles all the DLLs into the top level folder, which prevents duplication
# with the DLLs already being put there.
binaries.append((binary, "."))
| 33.857143 | 118 | 0.701477 |