hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9748178ba8a2985c5abc1fc95c30f647601960ac | 6,052 | py | Python | docs/conf.py | mrshannon/flask-signin | 7886879622f3c30056874b6a0ab61d356c92f406 | [
"MIT"
] | null | null | null | docs/conf.py | mrshannon/flask-signin | 7886879622f3c30056874b6a0ab61d356c92f406 | [
"MIT"
] | null | null | null | docs/conf.py | mrshannon/flask-signin | 7886879622f3c30056874b6a0ab61d356c92f406 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
from flask_signin import __version__
# -- Project information -----------------------------------------------------
project = 'Flask SignIn'
copyright = '2018, Michael R. Shannon'
author = 'Michael R. Shannon'
# The short X.Y version
version = __version__
# The full version, including alpha/beta/rc tags
release = version
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
'sphinx.ext.napoleon',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
# pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'flask'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'index_logo': 'flask-signin.png',
'github_fork': 'mrshannon/flask-signin',
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'FlaskSignIndoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'FlaskSignIn.tex', 'Flask SignIn Documentation',
'Michael R. Shannon', 'manual'),
]
latex_logo = '_static/flask-signin.pdf'
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'flasksignin', 'Flask SignIn Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'FlaskSignIn', 'Flask SignIn Documentation',
author, 'FlaskSignIn', 'Simple authentication for Flask.',
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),
'flask': ('http://flask.pocoo.org/docs/', None),
'flask_dance': ('https://flask-dance.readthedocs.io/en/latest/', None),
'flask_login': ('https://flask-login.readthedocs.io/en/latest/', None),
'werkzeug': ('http://werkzeug.pocoo.org/docs/', None),
'requests': ('http://docs.python-requests.org/en/latest/', None),
'requests_oauthlib': ('https://requests-oauthlib.readthedocs.io/en/latest/', None),
}
# -- Options for todo extension ----------------------------------------------
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
| 32.363636 | 87 | 0.648876 |
486fdc95bfe525fd2dad9d3234f90d18c38c3f73 | 3,064 | py | Python | src/waldur_mastermind/marketplace/managers.py | opennode/nodeconductor-assembly-waldur | cad9966389dc9b52b13d2301940c99cf4b243900 | [
"MIT"
] | 2 | 2017-01-20T15:26:25.000Z | 2017-08-03T04:38:08.000Z | src/waldur_mastermind/marketplace/managers.py | opennode/nodeconductor-assembly-waldur | cad9966389dc9b52b13d2301940c99cf4b243900 | [
"MIT"
] | null | null | null | src/waldur_mastermind/marketplace/managers.py | opennode/nodeconductor-assembly-waldur | cad9966389dc9b52b13d2301940c99cf4b243900 | [
"MIT"
] | null | null | null | from django.db import models as django_models
from django.db.models import Q
from waldur_core.core import managers as core_managers
from waldur_core.structure import models as structure_models
from waldur_core.structure import utils as structure_utils
class MixinManager(core_managers.GenericKeyMixin, django_models.Manager):
pass
class OfferingQuerySet(django_models.QuerySet):
def filter_for_user(self, user):
if user.is_anonymous or user.is_staff or user.is_support:
return self
connected_customers = structure_models.Customer.objects.all().filter(
permissions__user=user, permissions__is_active=True
)
connected_projects = structure_models.Project.available_objects.all().filter(
permissions__user=user, permissions__is_active=True
)
return self.filter(
Q(shared=True)
| Q(shared=False, customer__in=connected_customers)
| Q(shared=False, project__in=connected_projects)
| Q(shared=True, permissions__user=user, permissions__is_active=True),
).distinct()
def filter_for_customer(self, value):
customer = structure_models.Customer.objects.get(uuid=value)
return self.filter(
Q(shared=True, divisions__isnull=True)
| Q(shared=True, divisions__isnull=False, divisions=customer.division)
| Q(customer__uuid=value)
)
def filter_for_service_manager(self, value):
return self.filter(
shared=True, permissions__user__uuid=value, permissions__is_active=True
)
def filter_for_project(self, value):
return self.filter(Q(shared=True) | Q(project__uuid=value))
def filter_importable(self, user):
# Import is limited to staff for shared offerings and to staff/owners for private offerings
if user.is_staff:
return self
return self.filter(
shared=False, customer__in=structure_utils.get_customers_owned_by_user(user)
)
class OfferingManager(MixinManager):
def get_queryset(self):
return OfferingQuerySet(self.model, using=self._db)
class ResourceQuerySet(django_models.QuerySet):
def filter_for_user(self, user):
"""
Resources are available to both service provider and service consumer.
"""
if user.is_anonymous or user.is_staff or user.is_support:
return self
return self.filter(
Q(
project__permissions__user=user,
project__permissions__is_active=True,
)
| Q(
project__customer__permissions__user=user,
project__customer__permissions__is_active=True,
)
| Q(
offering__customer__permissions__user=user,
offering__customer__permissions__is_active=True,
)
).distinct()
class ResourceManager(MixinManager):
def get_queryset(self):
return ResourceQuerySet(self.model, using=self._db)
| 33.304348 | 99 | 0.672977 |
c51460a4b28adc5002682bd650807ec52cd60c7d | 817 | py | Python | examples/rpc_client2.py | fordoo/fizznet | 55d83d11c3b37c3bde7fae8c3579e5cb712d1940 | [
"MIT"
] | 60 | 2015-10-16T03:18:38.000Z | 2021-06-04T07:39:53.000Z | examples/rpc_client2.py | fordoo/fizznet | 55d83d11c3b37c3bde7fae8c3579e5cb712d1940 | [
"MIT"
] | 4 | 2015-11-20T09:38:29.000Z | 2016-09-06T15:27:35.000Z | examples/rpc_client2.py | fordoo/fizznet | 55d83d11c3b37c3bde7fae8c3579e5cb712d1940 | [
"MIT"
] | 15 | 2015-11-07T19:14:28.000Z | 2019-10-12T07:35:50.000Z | # -*- coding: utf-8 -*-
import logging
import sys
sys.path.append('../')
from tornado import ioloop, gen
from example_utils import log_initialize
from torpc import RPCClient
logger = logging.getLogger(__name__)
@gen.coroutine
def test_rpc():
ret = yield rpc_client.call('sum', 11, 22)
logger.info('call sum from the rpc server, result: {0}'.format(ret))
ret = yield rpc_client.call('call_node', "client1", "ping")
logger.info('call ping from client1 through the rpc server, result: {0}'.format(ret))
ret = yield rpc_client.call('ping_client1')
logger.info('call ping_client1 from the rpc server, result: {0}'.format(ret))
if __name__ == '__main__':
log_initialize()
rpc_client = RPCClient(('127.0.0.1', 5000), 'client2')
test_rpc()
ioloop.IOLoop.instance().start()
| 24.029412 | 89 | 0.687882 |
12a7630100c7c0519bb2e746fa65c249b5977e19 | 5,098 | py | Python | src/mlb_statsapi/model/base.py | power-edge/mlb_statsapi_etl | 9cca2ae059e8aab98ed460e7b71ad6eeeed09ffe | [
"Apache-2.0"
] | null | null | null | src/mlb_statsapi/model/base.py | power-edge/mlb_statsapi_etl | 9cca2ae059e8aab98ed460e7b71ad6eeeed09ffe | [
"Apache-2.0"
] | null | null | null | src/mlb_statsapi/model/base.py | power-edge/mlb_statsapi_etl | 9cca2ae059e8aab98ed460e7b71ad6eeeed09ffe | [
"Apache-2.0"
] | null | null | null | """
created by nikos at 4/21/21
"""
import json
import os
from serde import Model, fields, tags
from mlb_statsapi.utils import CONFIGS_PATH
from mlb_statsapi.utils.log import LogMixin
from mlb_statsapi.utils.stats_api_object import StatsAPIObject
__beta_stats_api_default_version__ = '1.0'
beta_stats_api_version = os.environ.get('BETA_STATS_API_VERSION', __beta_stats_api_default_version__)
class MLBStatsAPIModel(Model):
_instance = None
apiVersion: fields.Str()
src_url: fields.Str()
swaggerVersion: fields.Str()
@classmethod
def get_name(cls):
return cls.__module__.split('.')[-1]
@classmethod
def get_open_path(cls):
sub_path = cls._fmt_rel_path.format(name=cls.get_name(), api_version=beta_stats_api_version)
return f"{CONFIGS_PATH}/statsapi/{sub_path}"
@classmethod
def read_doc_str(cls):
with open(cls.get_open_path(), 'r') as f:
api_doc = f.read()
return api_doc
@classmethod
def read_doc(cls, doc_str=None):
if doc_str is None:
doc_str = cls.read_doc_str()
return json.loads(doc_str)
@classmethod
def from_doc(cls):
if cls._instance is None:
cls._instance = cls.from_json(cls.read_doc_str())
return cls._instance
class ResponseMessage(Model):
code: fields.Int()
message: fields.Optional(fields.Str)
responseModel: fields.Optional(fields.Str)
class ItemType(Model):
type: fields.Str()
class Parameter(Model):
allowMultiple: fields.Bool()
defaultValue: fields.Str()
description: fields.Str()
name: fields.Str()
paramType: fields.Choice(['path', 'query'])
required: fields.Bool()
type: fields.Str()
items: fields.Optional(fields.Nested(ItemType))
# optional
items: fields.Optional(fields.Nested(ItemType))
uniqueItems: fields.Optional(fields.Bool)
# even more optional
enum: fields.Optional(fields.List(fields.Str))
class OperationModel(Model):
consumes: fields.List(fields.Str)
deprecated: fields.Str()
method: fields.Choice(['GET', 'POST'])
nickname: fields.Str()
notes: fields.Str()
parameters: fields.List(Parameter)
produces: fields.List(fields.Str)
responseMessages: fields.List(ResponseMessage)
summary: fields.Str()
type: fields.Str()
# optional
items: fields.Optional(fields.Nested(ItemType))
uniqueItems: fields.Optional(fields.Bool)
@property
def path_params(self):
return [param for param in self.parameters if param.paramType == "path"]
@property
def query_params(self):
return [param for param in self.parameters if param.paramType == "query"]
class APIModelBase(Model):
description: fields.Str()
path: fields.Str()
class Meta:
tag = tags.Internal(tag='apis')
class EndpointAPIModel(APIModelBase):
operations: fields.List(OperationModel)
@property
def get_operations_map(self):
return {o.nickname: o for o in self.operations if o.method == "GET"}
@property
def operation(self):
return self.get_operations_map[self.description]
class MLBStatsAPIEndpointModel(MLBStatsAPIModel, LogMixin):
"""
Each api in the api_docs gets an inheriting class to define methods to for the endpoint access patterns.
Some of the api doc json files have small naming issues, therefore the @api_path wraps functions to make explicit
the path and name to search, where the name corresponds to the api.description or the api.operation.nickname, but
method names are corrected for misnaming in the underlying documentation.
These methods return a StatsAPIFileObject which is endpoint/api aware, and can get, save, and load itself.
"""
_methods = None
_fmt_rel_path = 'stats-api-{api_version}/{name}.json'
apis: fields.List(EndpointAPIModel)
api_path: fields.Str()
basePath: fields.Str()
consumes: fields.List(fields.Str)
# models: fields.Dict(fields.Str, ) # very complex serde and *not* necessary for stashing responses in a data store
produces: fields.List(fields.Str)
resourcePath: fields.Str()
class Meta:
tag = tags.Internal(tag='endpoint')
@property
def _api_path_name_map(self):
return {(api.path, api.description): api for api in self.apis}
@property
def _api_description_map(self):
return {api.description: api for api in self.apis}
def get_api_file_object(self, **kwargs):
path, name = kwargs['path'], kwargs['name']
api = self._api_path_name_map[path, name]
operation = api.get_operations_map[name]
path_params, query_params = kwargs.get('path_params'), kwargs.get('query_params')
return StatsAPIObject(
endpoint=self,
api=api,
operation=operation,
path_params=path_params,
query_params=query_params
)
@property
def methods(self):
assert self._methods is not None, 'please define methods for %s' % self.get_name()
return self._methods
| 29.468208 | 120 | 0.686151 |
e98ad22acb6cd11944636abb785d93855c517c8a | 1,362 | py | Python | src/api-service/__app__/timer_tasks/__init__.py | CuteCutePanda/onefuzz | c71ce580bd6ef37e6e8f8ee7c9413a13c0abd695 | [
"MIT"
] | 1 | 2021-12-20T14:48:40.000Z | 2021-12-20T14:48:40.000Z | src/api-service/__app__/timer_tasks/__init__.py | CuteCutePanda/onefuzz | c71ce580bd6ef37e6e8f8ee7c9413a13c0abd695 | [
"MIT"
] | null | null | null | src/api-service/__app__/timer_tasks/__init__.py | CuteCutePanda/onefuzz | c71ce580bd6ef37e6e8f8ee7c9413a13c0abd695 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import logging
import azure.functions as func
from onefuzztypes.enums import JobState, TaskState
from ..onefuzzlib.events import get_events
from ..onefuzzlib.jobs import Job
from ..onefuzzlib.orm import process_state_updates
from ..onefuzzlib.tasks.main import Task
from ..onefuzzlib.tasks.scheduler import schedule_tasks
def main(mytimer: func.TimerRequest, dashboard: func.Out[str]) -> None: # noqa: F841
expired_tasks = Task.search_expired()
for task in expired_tasks:
logging.info(
"stopping expired task. job_id:%s task_id:%s", task.job_id, task.task_id
)
task.mark_stopping()
expired_jobs = Job.search_expired()
for job in expired_jobs:
logging.info("stopping expired job. job_id:%s", job.job_id)
job.stopping()
jobs = Job.search_states(states=JobState.needs_work())
for job in jobs:
logging.info("update job: %s", job.job_id)
process_state_updates(job)
tasks = Task.search_states(states=TaskState.needs_work())
for task in tasks:
logging.info("update task: %s", task.task_id)
process_state_updates(task)
schedule_tasks()
Job.stop_never_started_jobs()
events = get_events()
if events:
dashboard.set(events)
| 28.375 | 85 | 0.701175 |
8d6f7a991c630b16af45b76704f7e73f8579a6db | 4,487 | py | Python | contribute_point_calculator.py | iknoom/LeetCode-Solutions | 85c034dfaf1455bcd69c19a2009197934d83f08e | [
"MIT"
] | 2 | 2021-01-26T04:15:33.000Z | 2021-01-26T04:15:46.000Z | contribute_point_calculator.py | iknoom/LeetCode-Solutions | 85c034dfaf1455bcd69c19a2009197934d83f08e | [
"MIT"
] | null | null | null | contribute_point_calculator.py | iknoom/LeetCode-Solutions | 85c034dfaf1455bcd69c19a2009197934d83f08e | [
"MIT"
] | null | null | null | import os, datetime, pickle, pytz
from collections import defaultdict
class point_calculator:
def __init__(self):
self.parsed_logs = []
with open('problems_info', 'rb') as f:
self.problems_info = pickle.load(f)
def parse_git_log(self):
git_logs = os.popen("bash -c 'git fetch --all > /dev/null; git log --pretty=format:\"# %an | %cd\" --name-status --reverse'").read()
cur_committer = ""
cur_time = ""
for log in git_logs.split('\n'):
if not log: continue
if log[0] == "#":
commitor, time = log.split(' | ')
cur_committer = commitor[2:]
t = datetime.datetime.strptime(time[:-6], "%a %b %d %H:%M:%S %Y")
cur_time = str(t)
else:
stat, file = log.split()
if "solutions/" not in file:
continue
if stat == "A":
self.parsed_logs.append((cur_committer, cur_time, file))
elif stat == "D":
for i in range(len(self.parsed_logs)):
if self.parsed_logs[i][-1] == file:
self.parsed_logs.pop(i)
break
def print_logs(self):
for committer, time, file in self.parsed_logs:
print(committer, time, file)
def event1(self):
dic = defaultdict(set)
for committer, time, _ in self.parsed_logs:
dic[committer].add(time.split()[0])
participants = list(dic.keys())
participants.sort(key=lambda x: len(dic[x]), reverse=True)
contents = []
contents.append("### (Event 1) 가장 꾸준히 기여한 사람")
contents.append("| # | User Name | Points |")
contents.append("| :---: | :---: | :---: |")
for i, participant in enumerate(participants, 1):
contents.append(f"| {i} | {participant} | {len(dic[participant])} |")
return "\n".join(contents)
def event2(self):
dic = defaultdict(int)
vst = set()
for committer, time, file in self.parsed_logs:
_, number, name = file.split('/')
fmt = name.split('.')[1]
if fmt == "c":
fmt = "cpp"
fullname = number + "." + fmt
if fullname not in vst:
vst.add(fullname)
dic[committer] += 1
participants = list(dic.keys())
participants.sort(key=lambda x: dic[x], reverse=True)
contents = []
contents.append("### (Event 2) 모든 난이도에 가장 많이 기여한 사람")
contents.append("| # | User Name | Points |")
contents.append("| :---: | :---: | :---: |")
for i, participant in enumerate(participants, 1):
contents.append(f"| {i} | {participant} | {dic[participant]} |")
return "\n".join(contents)
def event3(self):
dic = defaultdict(int)
vst = set()
for committer, time, file in self.parsed_logs:
_, number, name = file.split('/')
if self.problems_info[int(number)]['Difficulty'] == "Easy":
continue
fmt = name.split('.')[1]
if fmt == "c":
fmt = "cpp"
fullname = number + "." + fmt
if fullname not in vst:
vst.add(fullname)
dic[committer] += 1
participants = list(dic.keys())
participants.sort(key=lambda x: dic[x], reverse=True)
contents = []
contents.append("### (Event 3) Medium, Hard 난이도에 가장 많이 기여한 사람")
contents.append("| # | User Name | Points |")
contents.append("| :---: | :---: | :---: |")
for i, participant in enumerate(participants, 1):
contents.append(f"| {i} | {participant} | {dic[participant]} |")
return "\n".join(contents)
def get_current_time():
date_format = "**%Y년 %m월 %d일 %H시 %M분**"
now = datetime.datetime.now(pytz.timezone("Asia/Seoul"))
return now.strftime(date_format)
def get_contents():
calculator = point_calculator()
calculator.parse_git_log()
contents = []
contents.append("## Ranking")
contents.append(f"{get_current_time()}에 마지막으로 업데이트된 순위입니다.\n")
contents.append(calculator.event1() + "\n")
contents.append(calculator.event2() + "\n")
contents.append(calculator.event3())
return "\n".join(contents)
if __name__ == "__main__":
print(get_contents()) | 36.185484 | 140 | 0.519724 |
82730263acaf100bb98478b51ef86a67f8a385bc | 25,837 | py | Python | cadquery/sketch.py | Hecatron-Forks/cadquery | db457616f0a2cfbdaa836c9b9083b5d635753b7c | [
"Apache-2.0"
] | null | null | null | cadquery/sketch.py | Hecatron-Forks/cadquery | db457616f0a2cfbdaa836c9b9083b5d635753b7c | [
"Apache-2.0"
] | null | null | null | cadquery/sketch.py | Hecatron-Forks/cadquery | db457616f0a2cfbdaa836c9b9083b5d635753b7c | [
"Apache-2.0"
] | null | null | null | from typing import (
Union,
Optional,
List,
Dict,
Callable,
Tuple,
Iterable,
Iterator,
Any,
Sequence,
TypeVar,
cast as tcast,
)
from typing_extensions import Literal
from math import tan, sin, cos, pi, radians
from itertools import product, chain
from multimethod import multimethod
from typish import instance_of, get_type
from .hull import find_hull
from .selectors import StringSyntaxSelector, Selector
from .types import Real
from .occ_impl.shapes import Shape, Face, Edge, Wire, Compound, Vertex, edgesToWires
from .occ_impl.geom import Location, Vector
from .occ_impl.importers.dxf import _importDXF
from .occ_impl.sketch_solver import (
SketchConstraintSolver,
ConstraintKind,
ConstraintInvariants,
DOF,
arc_first,
arc_last,
arc_point,
)
Modes = Literal["a", "s", "i", "c"] # add, subtract, intersect, construct
Point = Union[Vector, Tuple[Real, Real]]
T = TypeVar("T", bound="Sketch")
SketchVal = Union[Shape, Location]
class Constraint(object):
tags: Tuple[str, ...]
args: Tuple[Edge, ...]
kind: ConstraintKind
param: Any
def __init__(
self,
tags: Tuple[str, ...],
args: Tuple[Edge, ...],
kind: ConstraintKind,
param: Any = None,
):
# validate based on the solver provided spec
if kind not in ConstraintInvariants:
raise ValueError(f"Unknown constraint {kind}.")
arity, types, param_type, converter = ConstraintInvariants[kind]
if arity != len(tags):
raise ValueError(
f"Invalid number of entities for constraint {kind}. Provided {len(tags)}, required {arity}."
)
if any(e.geomType() not in types for e in args):
raise ValueError(
f"Unsupported geometry types {[e.geomType() for e in args]} for constraint {kind}."
)
if not instance_of(param, param_type):
raise ValueError(
f"Unsupported argument types {get_type(param)}, required {param_type}."
)
# if all is fine store everything and possibly convert the params
self.tags = tags
self.args = args
self.kind = kind
self.param = tcast(Any, converter)(param) if converter else param
class Sketch(object):
"""
2D sketch. Supports faces, edges and edges with constraints based construction.
"""
parent: Any
locs: List[Location]
_faces: Compound
_wires: List[Wire]
_edges: List[Edge]
_selection: List[SketchVal]
_constraints: List[Constraint]
_tags: Dict[str, Sequence[SketchVal]]
_solve_status: Optional[Dict[str, Any]]
def __init__(self: T, parent: Any = None, locs: Iterable[Location] = (Location(),)):
"""
Construct an empty sketch.
"""
self.parent = parent
self.locs = list(locs)
self._faces = Compound.makeCompound(())
self._wires = []
self._edges = []
self._selection = []
self._constraints = []
self._tags = {}
self._solve_status = None
def __iter__(self) -> Iterator[Face]:
"""
Iterate over faces-locations combinations.
"""
return iter(f for l in self.locs for f in self._faces.moved(l).Faces())
def _tag(self: T, val: Sequence[Union[Shape, Location]], tag: str):
self._tags[tag] = val
# face construction
def face(
self: T,
b: Union[Wire, Iterable[Edge], Compound, T],
angle: Real = 0,
mode: Modes = "a",
tag: Optional[str] = None,
ignore_selection: bool = False,
) -> T:
"""
Construct a face from a wire or edges.
"""
res: Union[Face, Sketch, Compound]
if isinstance(b, Wire):
res = Face.makeFromWires(b)
elif isinstance(b, (Sketch, Compound)):
res = b
elif isinstance(b, Iterable):
wires = edgesToWires(tcast(Iterable[Edge], b))
res = Face.makeFromWires(*(wires[0], wires[1:]))
else:
raise ValueError(f"Unsupported argument {b}")
if angle != 0:
res = res.moved(Location(Vector(), Vector(0, 0, 1), angle))
return self.each(lambda l: res.moved(l), mode, tag, ignore_selection)
def importDXF(
self: T,
filename: str,
tol: float = 1e-6,
exclude: List[str] = [],
angle: Real = 0,
mode: Modes = "a",
tag: Optional[str] = None,
) -> T:
"""
Import a DXF file and construct face(s)
"""
res = Compound.makeCompound(_importDXF(filename, tol, exclude))
return self.face(res, angle, mode, tag)
def rect(
self: T,
w: Real,
h: Real,
angle: Real = 0,
mode: Modes = "a",
tag: Optional[str] = None,
) -> T:
"""
Construct a rectangular face.
"""
res = Face.makePlane(h, w).rotate(Vector(), Vector(0, 0, 1), angle)
return self.each(lambda l: res.located(l), mode, tag)
def circle(self: T, r: Real, mode: Modes = "a", tag: Optional[str] = None) -> T:
"""
Construct a circular face.
"""
res = Face.makeFromWires(Wire.makeCircle(r, Vector(), Vector(0, 0, 1)))
return self.each(lambda l: res.located(l), mode, tag)
def ellipse(
self: T,
a1: Real,
a2: Real,
angle: Real = 0,
mode: Modes = "a",
tag: Optional[str] = None,
) -> T:
"""
Construct an elliptical face.
"""
res = Face.makeFromWires(
Wire.makeEllipse(
a1, a2, Vector(), Vector(0, 0, 1), Vector(1, 0, 0), rotation_angle=angle
)
)
return self.each(lambda l: res.located(l), mode, tag)
def trapezoid(
self: T,
w: Real,
h: Real,
a1: Real,
a2: Optional[float] = None,
angle: Real = 0,
mode: Modes = "a",
tag: Optional[str] = None,
) -> T:
"""
Construct a trapezoidal face.
"""
v1 = Vector(-w / 2, -h / 2)
v2 = Vector(w / 2, -h / 2)
v3 = Vector(-w / 2 + h / tan(radians(a1)), h / 2)
v4 = Vector(w / 2 - h / tan(radians(a2) if a2 else radians(a1)), h / 2)
return self.polygon((v1, v2, v4, v3, v1), angle, mode, tag)
def slot(
self: T,
w: Real,
h: Real,
angle: Real = 0,
mode: Modes = "a",
tag: Optional[str] = None,
) -> T:
"""
Construct a slot-shaped face.
"""
p1 = Vector(-w / 2, h / 2)
p2 = Vector(w / 2, h / 2)
p3 = Vector(-w / 2, -h / 2)
p4 = Vector(w / 2, -h / 2)
p5 = Vector(-w / 2 - h / 2, 0)
p6 = Vector(w / 2 + h / 2, 0)
e1 = Edge.makeLine(p1, p2)
e2 = Edge.makeThreePointArc(p2, p6, p4)
e3 = Edge.makeLine(p4, p3)
e4 = Edge.makeThreePointArc(p3, p5, p1)
wire = Wire.assembleEdges((e1, e2, e3, e4))
return self.face(wire, angle, mode, tag)
def regularPolygon(
self: T,
r: Real,
n: int,
angle: Real = 0,
mode: Modes = "a",
tag: Optional[str] = None,
) -> T:
"""
Construct a regular polygonal face.
"""
pts = [
Vector(r * sin(i * 2 * pi / n), r * cos(i * 2 * pi / n))
for i in range(n + 1)
]
return self.polygon(pts, angle, mode, tag)
def polygon(
self: T,
pts: Iterable[Point],
angle: Real = 0,
mode: Modes = "a",
tag: Optional[str] = None,
) -> T:
"""
Construct a polygonal face.
"""
w = Wire.makePolygon(p if isinstance(p, Vector) else Vector(*p) for p in pts)
return self.face(w, angle, mode, tag)
# distribute locations
def rarray(self: T, xs: Real, ys: Real, nx: int, ny: int) -> T:
"""
Generate a rectangular array of locations.
"""
if nx < 1 or ny < 1:
raise ValueError(f"At least 1 elements required, requested {nx}, {ny}")
locs = []
offset = Vector((nx - 1) * xs, (ny - 1) * ys) * 0.5
for i, j in product(range(nx), range(ny)):
locs.append(Location(Vector(i * xs, j * ys) - offset))
if self._selection:
selection: Sequence[Union[Shape, Location, Vector]] = self._selection
else:
selection = [Vector()]
return self.push(
(l * el if isinstance(el, Location) else l * Location(el.Center()))
for l in locs
for el in selection
)
def parray(self: T, r: Real, a1: Real, a2: Real, n: int, rotate: bool = True) -> T:
"""
Generate a polar array of locations.
"""
if n < 1:
raise ValueError(f"At least 1 elements required, requested {n}")
x = r * sin(radians(a1))
y = r * cos(radians(a1))
if rotate:
loc = Location(Vector(x, y), Vector(0, 0, 1), -a1)
else:
loc = Location(Vector(x, y))
locs = [loc]
angle = (a2 - a1) / (n - 1)
for i in range(1, n):
phi = a1 + (angle * i)
x = r * sin(radians(phi))
y = r * cos(radians(phi))
if rotate:
loc = Location(Vector(x, y), Vector(0, 0, 1), -phi)
else:
loc = Location(Vector(x, y))
locs.append(loc)
if self._selection:
selection: Sequence[Union[Shape, Location, Vector]] = self._selection
else:
selection = [Vector()]
return self.push(
(l * el if isinstance(el, Location) else l * Location(el.Center()))
for l in locs
for el in selection
)
def distribute(
self: T, n: int, start: Real = 0, stop: Real = 1, rotate: bool = True
) -> T:
"""
Distribute locations along selected edges or wires.
"""
if not self._selection:
raise ValueError("Nothing selected to distirbute over")
params = [start + i * (stop - start) / n for i in range(n + 1)]
locs = []
for el in self._selection:
if isinstance(el, (Wire, Edge)):
if rotate:
locs.extend(el.locations(params, planar=True))
else:
locs.extend(Location(v) for v in el.positions(params))
else:
raise ValueError(f"Unsupported selection: {el}")
return self.push(locs)
def push(
self: T, locs: Iterable[Union[Location, Point]], tag: Optional[str] = None,
) -> T:
"""
Set current selection to given locations or points.
"""
self._selection = [
l if isinstance(l, Location) else Location(Vector(l)) for l in locs
]
if tag:
self._tag(self._selection[:], tag)
return self
def each(
self: T,
callback: Callable[[Location], Union[Face, "Sketch", Compound]],
mode: Modes = "a",
tag: Optional[str] = None,
ignore_selection: bool = False,
) -> T:
"""
Apply a callback on all applicable entities.
"""
res: List[Face] = []
locs: List[Location] = []
if self._selection and not ignore_selection:
for el in self._selection:
if isinstance(el, Location):
loc = el
else:
loc = Location(el.Center())
locs.append(loc)
else:
locs.append(Location())
for loc in locs:
tmp = callback(loc)
if isinstance(tmp, Sketch):
res.extend(tmp._faces.Faces())
elif isinstance(tmp, Compound):
res.extend(tmp.Faces())
else:
res.append(tmp)
if tag:
self._tag(res, tag)
if mode == "a":
self._faces = self._faces.fuse(*res)
elif mode == "s":
self._faces = self._faces.cut(*res)
elif mode == "i":
self._faces = self._faces.intersect(*res)
elif mode == "c":
if not tag:
raise ValueError("No tag specified - the geometry will be unreachable")
else:
raise ValueError(f"Invalid mode: {mode}")
return self
# modifiers
def hull(self: T, mode: Modes = "a", tag: Optional[str] = None) -> T:
"""
Generate a convex hull from current selection or all objects.
"""
if self._selection:
rv = find_hull(el for el in self._selection if isinstance(el, Edge))
elif self._faces:
rv = find_hull(el for el in self._faces.Edges())
elif self._edges or self._wires:
rv = find_hull(
chain(self._edges, chain.from_iterable(w.Edges() for w in self._wires))
)
else:
raise ValueError("No objects available for hull construction")
self.face(rv, mode=mode, tag=tag, ignore_selection=bool(self._selection))
return self
def offset(self: T, d: Real, mode: Modes = "a", tag: Optional[str] = None) -> T:
"""
Offset selected wires or edges.
"""
rv = (el.offset2D(d) for el in self._selection if isinstance(el, Wire))
for el in chain.from_iterable(rv):
self.face(el, mode=mode, tag=tag, ignore_selection=bool(self._selection))
return self
def _matchFacesToVertices(self) -> Dict[Face, List[Vertex]]:
rv = {}
for f in self._faces.Faces():
f_vertices = f.Vertices()
rv[f] = [
v for v in self._selection if isinstance(v, Vertex) and v in f_vertices
]
return rv
def fillet(self: T, d: Real) -> T:
"""
Add a fillet based on current selection.
"""
f2v = self._matchFacesToVertices()
self._faces = Compound.makeCompound(
k.fillet2D(d, v) if v else k for k, v in f2v.items()
)
return self
def chamfer(self: T, d: Real) -> T:
"""
Add a chamfer based on current selection.
"""
f2v = self._matchFacesToVertices()
self._faces = Compound.makeCompound(
k.chamfer2D(d, v) if v else k for k, v in f2v.items()
)
return self
def clean(self: T) -> T:
"""
Remove internal wires.
"""
self._faces = self._faces.clean()
return self
# selection
def _unique(self: T, vals: List[SketchVal]) -> List[SketchVal]:
tmp = {hash(v): v for v in vals}
return list(tmp.values())
def _select(
self: T,
s: Optional[Union[str, Selector]],
kind: Literal["Faces", "Wires", "Edges", "Vertices"],
tag: Optional[str] = None,
) -> T:
rv = []
if tag:
for el in self._tags[tag]:
rv.extend(getattr(el, kind)())
elif self._selection:
for el in self._selection:
if not isinstance(el, Location):
rv.extend(getattr(el, kind)())
else:
rv.extend(getattr(self._faces, kind)())
for el in self._edges:
rv.extend(getattr(el, kind)())
if s and isinstance(s, Selector):
filtered = s.filter(rv)
elif s and isinstance(s, str):
filtered = StringSyntaxSelector(s).filter(rv)
else:
filtered = rv
self._selection = self._unique(filtered)
return self
def tag(self: T, tag: str) -> T:
"""
Tag current selection.
"""
self._tags[tag] = list(self._selection)
return self
def select(self: T, *tags: str) -> T:
"""
Select based on tags.
"""
self._selection = []
for tag in tags:
self._selection.extend(self._tags[tag])
return self
def faces(
self: T, s: Optional[Union[str, Selector]] = None, tag: Optional[str] = None
) -> T:
"""
Select faces.
"""
return self._select(s, "Faces", tag)
def wires(
self: T, s: Optional[Union[str, Selector]] = None, tag: Optional[str] = None
) -> T:
"""
Select wires.
"""
return self._select(s, "Wires", tag)
def edges(
self: T, s: Optional[Union[str, Selector]] = None, tag: Optional[str] = None
) -> T:
"""
Select edges.
"""
return self._select(s, "Edges", tag)
def vertices(
self: T, s: Optional[Union[str, Selector]] = None, tag: Optional[str] = None
) -> T:
"""
Select vertices.
"""
return self._select(s, "Vertices", tag)
def reset(self: T) -> T:
"""
Reset current selection.
"""
self._selection = []
return self
def delete(self: T) -> T:
"""
Delete selected object.
"""
for obj in self._selection:
if isinstance(obj, Face):
self._faces.remove(obj)
elif isinstance(obj, Wire):
self._wires.remove(obj)
elif isinstance(obj, Edge):
self._edges.remove(obj)
self._selection = []
return self
# edge based interface
def _startPoint(self) -> Vector:
if not self._edges:
raise ValueError("No free edges available")
e = self._edges[0]
return e.startPoint()
def _endPoint(self) -> Vector:
if not self._edges:
raise ValueError("No free edges available")
e = self._edges[-1]
return e.endPoint()
def edge(
self: T, val: Edge, tag: Optional[str] = None, forConstruction: bool = False
) -> T:
"""
Add an edge to the sketch.
"""
val.forConstruction = forConstruction
self._edges.append(val)
if tag:
self._tag([val], tag)
return self
@multimethod
def segment(
self: T,
p1: Point,
p2: Point,
tag: Optional[str] = None,
forConstruction: bool = False,
) -> T:
"""
Construct a segment.
"""
val = Edge.makeLine(Vector(p1), Vector(p2))
return self.edge(val, tag, forConstruction)
@segment.register
def segment(
self: T, p2: Point, tag: Optional[str] = None, forConstruction: bool = False
) -> T:
p1 = self._endPoint()
val = Edge.makeLine(p1, Vector(p2))
return self.edge(val, tag, forConstruction)
@segment.register
def segment(
self: T,
l: Real,
a: Real,
tag: Optional[str] = None,
forConstruction: bool = False,
) -> T:
p1 = self._endPoint()
d = Vector(l * cos(radians(a)), l * sin(radians(a)))
val = Edge.makeLine(p1, p1 + d)
return self.edge(val, tag, forConstruction)
@multimethod
def arc(
self: T,
p1: Point,
p2: Point,
p3: Point,
tag: Optional[str] = None,
forConstruction: bool = False,
) -> T:
"""
Construct an arc.
"""
val = Edge.makeThreePointArc(Vector(p1), Vector(p2), Vector(p3))
return self.edge(val, tag, forConstruction)
@arc.register
def arc(
self: T,
p2: Point,
p3: Point,
tag: Optional[str] = None,
forConstruction: bool = False,
) -> T:
p1 = self._endPoint()
val = Edge.makeThreePointArc(Vector(p1), Vector(p2), Vector(p3))
return self.edge(val, tag, forConstruction)
@arc.register
def arc(
self: T,
c: Point,
r: Real,
a: Real,
da: Real,
tag: Optional[str] = None,
forConstruction: bool = False,
) -> T:
if abs(da) >= 360:
val = Edge.makeCircle(r, Vector(c), angle1=a, angle2=a, orientation=da > 0)
else:
p0 = Vector(c)
p1 = p0 + r * Vector(cos(radians(a)), sin(radians(a)))
p2 = p0 + r * Vector(cos(radians(a + da / 2)), sin(radians(a + da / 2)))
p3 = p0 + r * Vector(cos(radians(a + da)), sin(radians(a + da)))
val = Edge.makeThreePointArc(p1, p2, p3)
return self.edge(val, tag, forConstruction)
@multimethod
def spline(
self: T,
pts: Iterable[Point],
tangents: Optional[Iterable[Point]],
periodic: bool,
tag: Optional[str] = None,
forConstruction: bool = False,
) -> T:
"""
Construct a spline edge.
"""
val = Edge.makeSpline(
[Vector(*p) for p in pts],
[Vector(*t) for t in tangents] if tangents else None,
periodic,
)
return self.edge(val, tag, forConstruction)
@spline.register
def spline(
self: T,
pts: Iterable[Point],
tag: Optional[str] = None,
forConstruction: bool = False,
) -> T:
return self.spline(pts, None, False, tag, forConstruction)
def close(self: T, tag: Optional[str] = None) -> T:
"""
Connect last edge to the first one.
"""
self.segment(self._endPoint(), self._startPoint(), tag)
return self
def assemble(self: T, mode: Modes = "a", tag: Optional[str] = None) -> T:
"""
Assemble edges into faces.
"""
return self.face(
(e for e in self._edges if not e.forConstruction), 0, mode, tag
)
# constraints
@multimethod
def constrain(self: T, tag: str, constraint: ConstraintKind, arg: Any) -> T:
"""
Add a constraint.
"""
self._constraints.append(
Constraint((tag,), (self._tags[tag][0],), constraint, arg)
)
return self
@constrain.register
def constrain(
self: T, tag1: str, tag2: str, constraint: ConstraintKind, arg: Any
) -> T:
self._constraints.append(
Constraint(
(tag1, tag2),
(self._tags[tag1][0], self._tags[tag2][0]),
constraint,
arg,
)
)
return self
def solve(self: T) -> T:
"""
Solve current constraints and update edge positions.
"""
entities = [] # list with all degrees of freedom
e2i = {} # mapping from tags to indices of entities
geoms = [] # geometry types
# fill entities, e2i and geoms
for i, (k, v) in enumerate(
filter(lambda kv: isinstance(kv[1][0], Edge), self._tags.items())
):
v0 = tcast(Edge, v[0])
# dispatch on geom type
if v0.geomType() == "LINE":
p1 = v0.startPoint()
p2 = v0.endPoint()
ent: DOF = (p1.x, p1.y, p2.x, p2.y)
elif v0.geomType() == "CIRCLE":
p = v0.arcCenter()
p1 = v0.startPoint() - p
p2 = v0.endPoint() - p
pm = v0.positionAt(0.5) - p
a1 = Vector(0, 1).getSignedAngle(p1)
a2 = p1.getSignedAngle(p2)
a3 = p1.getSignedAngle(pm)
if a3 > 0 and a2 < 0:
a2 += 2 * pi
elif a3 < 0 and a2 > 0:
a2 -= 2 * pi
radius = v0.radius()
ent = (p.x, p.y, radius, a1, a2)
else:
continue
entities.append(ent)
e2i[k] = i
geoms.append(v0.geomType())
# build the POD constraint list
constraints = []
for c in self._constraints:
ix = (e2i[c.tags[0]], e2i[c.tags[1]] if len(c.tags) == 2 else None)
constraints.append((ix, c.kind, c.param))
# optimize
solver = SketchConstraintSolver(entities, constraints, geoms)
res, self._solve_status = solver.solve()
self._solve_status["x"] = res
# translate back the solution - update edges
for g, (k, i) in zip(geoms, e2i.items()):
el = res[i]
# dispatch on geom type
if g == "LINE":
p1 = Vector(el[0], el[1])
p2 = Vector(el[2], el[3])
e = Edge.makeLine(p1, p2)
elif g == "CIRCLE":
p1 = Vector(*arc_first(el))
p2 = Vector(*arc_point(el, 0.5))
p3 = Vector(*arc_last(el))
e = Edge.makeThreePointArc(p1, p2, p3)
# overwrite the low level object
self._tags[k][0].wrapped = e.wrapped
return self
# misc
def copy(self: T) -> T:
"""
Create a partial copy of the sketch.
"""
rv = self.__class__()
rv._faces = self._faces.copy()
return rv
def moved(self: T, loc: Location) -> T:
"""
Create a partial copy of the sketch with moved _faces.
"""
rv = self.__class__()
rv._faces = self._faces.moved(loc)
return rv
def located(self: T, loc: Location) -> T:
"""
Create a partial copy of the sketch with a new location.
"""
rv = self.__class__(locs=(loc,))
rv._faces = self._faces.copy()
return rv
def finalize(self) -> Any:
"""
Finish sketch construction and return the parent
"""
return self.parent
| 25.862863 | 108 | 0.51074 |
975110d0dfc627cc8697c02a98961542107d6ad9 | 4,917 | py | Python | train.py | juliagong/sketch2face | 40b7f1ee129dc0ff14c4d3a4e3479a7ee5439296 | [
"BSD-3-Clause"
] | 10 | 2019-07-12T15:13:13.000Z | 2021-11-25T22:28:25.000Z | train.py | juliagong/sketch2face | 40b7f1ee129dc0ff14c4d3a4e3479a7ee5439296 | [
"BSD-3-Clause"
] | 5 | 2020-11-13T18:15:47.000Z | 2022-02-10T00:15:29.000Z | train.py | juliagong/sketch2face | 40b7f1ee129dc0ff14c4d3a4e3479a7ee5439296 | [
"BSD-3-Clause"
] | 2 | 2021-01-08T02:59:15.000Z | 2021-01-22T04:52:28.000Z | """General-purpose training script for image-to-image translation.
This script works for various models (with option '--model': e.g., pix2pix, cyclegan, colorization) and
different datasets (with option '--dataset_mode': e.g., aligned, unaligned, single, colorization).
You need to specify the dataset ('--dataroot'), experiment name ('--name'), and model ('--model').
It first creates model, dataset, and visualizer given the option.
It then does standard network training. During the training, it also visualize/save the images, print/save the loss plot, and save models.
The script supports continue/resume training. Use '--continue_train' to resume your previous training.
Example:
Train a CycleGAN model:
python train.py --dataroot ./datasets/maps --name maps_cyclegan --model cycle_gan
Train a pix2pix model:
python train.py --dataroot ./datasets/facades --name facades_pix2pix --model pix2pix --direction BtoA
See options/base_options.py and options/train_options.py for more training options.
See training and test tips at: https://github.com/junyanz/pytorch-CycleGAN-and-pix2pix/blob/master/docs/tips.md
See frequently asked questions at: https://github.com/junyanz/pytorch-CycleGAN-and-pix2pix/blob/master/docs/qa.md
"""
import time
from options.train_options import TrainOptions
from data import create_dataset
from models import create_model
from util.visualizer import Visualizer
def train(opt):
dataset = create_dataset(opt) # create a dataset given opt.dataset_mode and other options
dataset_size = len(dataset) # get the number of images in the dataset.
print('The number of training images = %d' % dataset_size)
model = create_model(opt) # create a model given opt.model and other options
model.setup(opt) # regular setup: load and print networks; create schedulers
visualizer = Visualizer(opt) # create a visualizer that display/save images and plots
total_iters = 0 # the total number of training iterations
max_epochs = min(opt.max_epochs, opt.niter + opt.niter_decay)+1
for epoch in range(opt.epoch_count, max_epochs): # outer loop for different epochs; we save the model by <epoch_count>, <epoch_count>+<save_latest_freq>
epoch_start_time = time.time() # timer for entire epoch
iter_data_time = time.time() # timer for data loading per iteration
epoch_iter = 0 # the number of training iterations in current epoch, reset to 0 every epoch
for i, data in enumerate(dataset): # inner loop within one epoch
iter_start_time = time.time() # timer for computation per iteration
if total_iters % opt.print_freq == 0:
t_data = iter_start_time - iter_data_time
visualizer.reset()
total_iters += opt.batch_size
epoch_iter += opt.batch_size
model.set_input(data) # unpack data from dataset and apply preprocessing
model.optimize_parameters() # calculate loss functions, get gradients, update network weights
if total_iters % opt.display_freq == 0: # display images on visdom and save images to a HTML file
save_result = total_iters % opt.update_html_freq == 0
model.compute_visuals()
visualizer.display_current_results(model.get_current_visuals(), epoch, save_result)
if total_iters % opt.print_freq == 0: # print training losses and save logging information to the disk
losses = model.get_current_losses()
t_comp = (time.time() - iter_start_time) / opt.batch_size
visualizer.print_current_losses(epoch, epoch_iter, losses, t_comp, t_data)
if opt.display_id > 0:
visualizer.plot_current_losses(epoch, float(epoch_iter) / dataset_size, losses)
if total_iters % opt.save_latest_freq == 0: # cache our latest model every <save_latest_freq> iterations
print('saving the latest model (epoch %d, total_iters %d)' % (epoch, total_iters))
save_suffix = 'iter_%d' % total_iters if opt.save_by_iter else 'latest'
model.save_networks(save_suffix)
iter_data_time = time.time()
if epoch % opt.save_epoch_freq == 0: # cache our model every <save_epoch_freq> epochs
print('saving the model at the end of epoch %d, iters %d' % (epoch, total_iters))
model.save_networks('latest')
model.save_networks(epoch)
print('End of epoch %d / %d \t Time Taken: %d sec' % (epoch, opt.niter + opt.niter_decay, time.time() - epoch_start_time))
model.update_learning_rate() # update learning rates at the end of every epoch.
if __name__ == '__main__':
opt = TrainOptions().parse() # get training options
train(opt) | 59.963415 | 159 | 0.68253 |
1e160ea206c276854600191e2edf0c93535990cb | 183 | py | Python | Week 1 In-out, int and bool/Task14.py | retverd/python_hse | cb9bfb092c1cf68ae0c53b9919ca24a71a8cbf88 | [
"MIT"
] | null | null | null | Week 1 In-out, int and bool/Task14.py | retverd/python_hse | cb9bfb092c1cf68ae0c53b9919ca24a71a8cbf88 | [
"MIT"
] | null | null | null | Week 1 In-out, int and bool/Task14.py | retverd/python_hse | cb9bfb092c1cf68ae0c53b9919ca24a71a8cbf88 | [
"MIT"
] | null | null | null | # Подсчитайте результат вычисления выражения 213169^{123} и запишите его 10 раз подряд.
#
# Подсказка: воспользуйтесь функцией str и умножением строк.
print(str(213169 ** 123) * 10)
| 30.5 | 87 | 0.765027 |
e5139d9499e708ca47d5e9e33d2ebd3e13589d95 | 10,307 | py | Python | apps/courses/views.py | open-source-uc/ramos-uc | 9b8e9151b46a86752910d3bf51bc4267449e8460 | [
"MIT"
] | 7 | 2021-07-14T18:13:35.000Z | 2021-11-21T20:10:54.000Z | apps/courses/views.py | nico-mac/ramos-uc | 9b8e9151b46a86752910d3bf51bc4267449e8460 | [
"MIT"
] | 57 | 2021-07-10T01:31:56.000Z | 2022-01-14T02:02:58.000Z | apps/courses/views.py | nico-mac/ramos-uc | 9b8e9151b46a86752910d3bf51bc4267449e8460 | [
"MIT"
] | 4 | 2021-07-23T16:51:55.000Z | 2021-08-31T02:41:41.000Z | from django.shortcuts import render, get_object_or_404
from django.http import JsonResponse
from django.urls import reverse
from .models import Course, Section
from .serializers import PlannerSearchSerializer
from django.views.decorators.cache import cache_control, never_cache
from django.core.cache import cache
from django.core.paginator import Paginator
import re
# Home
def home(request):
return render(request, "index.html")
def get_fields_values():
return {
"mods": ["8:30", "10:00", "11:30", "2:00", "3:30", "5:00", "6:30", "8:00"],
"schools": Course.objects.available("school"),
"campuses": Section.objects.available("campus"),
"formats": Section.objects.available("format"),
"categories": Course.objects.available("category"),
"areas": Course.objects.available("area"),
}
# Planner index
@cache_control(private=True, max_age=3600 * 12)
def planner(request):
data = cache.get_or_set("possible_values", get_fields_values, 3600 * 12)
return render(request, "courses/planner.html", data)
# Shared schedule
@cache_control(private=True, max_age=3600 * 24)
def share(request):
return render(
request,
"courses/share.html",
{
"mods": ["8:30", "10:00", "11:30", "2:00", "3:30", "5:00", "6:30", "8:00"],
"ids": request.GET.get("h", []),
},
)
# Banner
@never_cache
def banner(request, id):
try:
section = Section.objects.get(pk=id)
except Section.DoesNotExist:
return JsonResponse({"error": "Error 404"}, status=404)
return JsonResponse(
{
"initials": str(section),
"name": section.course.name,
"quota": section.quota_list(),
"total_quota": section.total_quota,
}
)
# Planner search
@cache_control(must_revalidate=True)
def planner_search(request):
# parse QueryDict to data dict
data = {}
for key in request.GET.keys():
v = request.GET.getlist(key)
if key[-2:] == "[]":
key = key[:-2]
elif v[0]:
v = v[0]
else:
continue
data[key] = v
serializer = PlannerSearchSerializer(data=data)
if not serializer.is_valid():
return JsonResponse({"error": serializer.errors})
params = serializer.validated_data
# Base filter
sections = Section.objects.filter(period=params["period"])
# Query filter
if len(params["q"]):
sections = sections.search(params["q"])
# Credits filter
if params["credits"] is not None:
sections = sections.filter(course__credits=params["credits"])
# Campus filter
if len(params["campus"]):
sections = sections.filter(campus__in=params["campus"])
# Format filter
if len(params["format"]):
sections = sections.filter(format__in=params["format"])
# School filter
if len(params["school"]):
sections = sections.filter(course__school__in=params["school"])
# Area filter
if len(params["area"]):
sections = sections.filter(course__area__in=params["area"])
# Req
if params["without_req"]:
sections = sections.filter(course__req="No tiene")
# Max_mod
if params["max_mod"]:
sections = sections.exclude(scheduleinfo__total__gt=params["max_mod"])
# Mod_types
if params["mod_types"]:
disallowed_types = ["AYU", "CLAS", "LAB", "PRA", "SUP", "TAL", "TER", "TES"]
for mod_type in params["mod_types"]:
if mod_type == "OTRO":
disallowed_types.remove("PRA")
disallowed_types.remove("SUP")
disallowed_types.remove("TES")
else:
disallowed_types.remove(mod_type)
for mod_type in disallowed_types:
column = "scheduleinfo__" + mod_type.lower() + "__gt"
sections = sections.exclude(**{column: 0})
# Schedule
if params["overlap"]:
if not params["overlap_except"]:
for module in params["schedule"].strip().split(","):
if module:
column = "fullschedule__" + module.lower()
sections = sections.filter(**{column: "FREE"})
else:
for module in params["schedule"].strip().split(","):
if module:
column = "fullschedule__" + module.lower() + "__in"
sections = sections.filter(**{column: ["FREE", "AYU", "TER"]})
# Category
if len(params["category"]):
sections = sections.filter(course__category__in=params["category"])
# Quota
if params["free_quota"]:
sections = sections.exclude(available_quota=0)
# Paginate an create response
results = []
start = (params["page"] - 1) * 25
for s in sections[start : start + 25]:
results.append(
{
"id": s.id,
"initials": str(s),
"name": s.course.name,
"teachers": s.teachers,
"format": s.format,
"available_quota": s.available_quota,
"schedule": s.schedule,
"course_initials": str(s).split("-")[0],
"section": int(str(s).split("-")[1]),
}
)
return JsonResponse({"results": results})
# Course profile
@cache_control(private=True, max_age=3600 * 12)
def single_course(request, initials):
initials = initials.upper()
period = request.GET.get("period")
# Get course data
cached_course = cache.get("c_" + initials)
if cached_course is None:
course = get_object_or_404(Course, initials=initials)
# Link requirements
requirements = (
re.sub(
r"([a-zA-Z]{3}\d{3,4}[a-zA-Z]?)",
r'<a href="/ramo/\1">\1</a>',
course.req,
)
if course.req
else "No tiene"
)
program = (
course.program.replace("\\", "<br>").replace("\n", "<br>")
if course.program
else "No disponible"
)
cached_course = {
"course": course,
"program": program,
"description": course.get_description(),
"requirements": requirements,
"periods": course.section_set.available("period", desc=True),
"calification": course.get_calification(),
"comments": course.get_comments(),
}
cache.set("c_" + initials, cached_course, 3600 * 24)
# Get sections data
if period is None:
period = cached_course["periods"][0] if len(cached_course["periods"]) else ""
cached_sections = cache.get(f"s_{initials}_{period}")
if cached_sections is None:
course = get_object_or_404(Course, initials=initials)
cached_sections = {
"sections": course.section_set.filter(period=period).order_by("section"),
"period": period,
}
cache.set(f"s_{initials}_{period}", cached_sections, 3600 * 12)
# Send response
return render(request, "courses/course.html", {**cached_course, **cached_sections})
# Section detail on planner
@cache_control(must_revalidate=True)
def single_section(request, id):
try:
section = Section.objects.get(pk=id)
except Section.DoesNotExist:
return JsonResponse({"error": "Error 404"}, status=404)
course = section.course
quota = section.last_quota()
return JsonResponse(
{
"initials": str(section),
"name": course.name,
"is_removable": section.is_removable,
"is_english": section.is_english,
"is_special": section.is_special,
"campus": section.campus,
"credits": course.credits,
"school": course.school,
"area": course.area,
"category": course.category,
"format": section.format,
"teachers": section.teachers,
"req": course.req,
"con": course.con,
"restr": course.restr,
"url": reverse("courses:course", args=[course.initials]),
"quota": list(quota),
"total_quota": section.total_quota,
"available_quota": section.available_quota,
}
)
# Data to add section to schedule
@cache_control(private=True, max_age=3600 * 24)
def schedule(request, id):
try:
section = Section.objects.get(pk=id)
except Section.DoesNotExist:
return JsonResponse({"error": "Error 404"}, status=404)
schedule = section.fullschedule.__dict__
schedule.pop("_state")
schedule.pop("section_id")
clean_schedule = {}
for key in schedule:
if schedule[key] != "FREE":
clean_schedule[key] = schedule[key]
return JsonResponse(
{
"initials": str(section),
"name": section.course.name,
"period": section.period,
"schedule": clean_schedule,
}
)
# Browse
@cache_control(private=True, max_age=3600 * 24)
def browse(request):
school_name = request.GET.get("escuela", None)
# Case single school
if school_name is not None:
courses = Course.objects.filter(school=school_name).order_by("initials")
paginator = Paginator(courses, 50)
page_number = request.GET.get("page")
return render(
request,
"courses/school.html",
{
"courses_page": paginator.get_page(page_number),
"school_name": school_name,
},
)
data = cache.get_or_set("possible_values", get_fields_values, 3600 * 12)
return render(request, "courses/browse.html", data)
# Search
@cache_control(private=True, max_age=3600)
def search(request):
q = request.GET.get("q", "")
results = Section.objects.all().search(q).distinct("course_id")[:20]
return render(
request,
"courses/search.html",
{
"results": results,
"q": q,
"results_count": len(results),
},
)
# Crea
@cache_control(must_revalidate=True)
def create(request):
data = cache.get_or_set("possible_values", get_fields_values, 3600 * 12)
return render(request, "courses/create.html", data)
| 31.233333 | 87 | 0.582808 |
e7cea4180de4096a0b3dc78f32c63d7d9923592d | 2,047 | py | Python | tests/fiaas_skipper/deploy/test_cluster.py | rarruda/skipper | a5ed39eac102116ef49dc952d6f1f4017acaa11c | [
"Apache-2.0"
] | null | null | null | tests/fiaas_skipper/deploy/test_cluster.py | rarruda/skipper | a5ed39eac102116ef49dc952d6f1f4017acaa11c | [
"Apache-2.0"
] | null | null | null | tests/fiaas_skipper/deploy/test_cluster.py | rarruda/skipper | a5ed39eac102116ef49dc952d6f1f4017acaa11c | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8
# Copyright 2017-2019 The FIAAS Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from k8s.models.common import ObjectMeta
from k8s.models.configmap import ConfigMap
from mock import mock
from fiaas_skipper.deploy.cluster import Cluster
NAME = "deployment_target"
def _create_configmap(namespace, tag=None):
metadata = ObjectMeta(name=NAME, namespace=namespace)
data = {}
if tag:
data["tag"] = tag
return ConfigMap(metadata=metadata, data=data)
def _assert_deployment_config(result, namespace, tag):
assert namespace == result.namespace
assert tag == result.tag
class TestCluster(object):
@pytest.fixture
def config_map_find(self):
with mock.patch("k8s.models.configmap.ConfigMap.find") as finder:
finder.return_value = (
_create_configmap("ns1", "stable"),
_create_configmap("ns2", "latest"),
_create_configmap("ns3"),
_create_configmap("ns4", "latest"),
_create_configmap("ns5", "stable"),
)
yield finder
@pytest.mark.usefixtures("config_map_find")
def test_finds_deployment_configs(self):
cluster = Cluster()
results = cluster.find_deployment_configs(NAME)
assert len(results) == 5
_assert_deployment_config(results[0], "ns1", "stable")
_assert_deployment_config(results[1], "ns2", "latest")
_assert_deployment_config(results[2], "ns3", "stable")
| 31.984375 | 74 | 0.685393 |
f51ffcfb554fd822f6c63157eaac4d87854073f1 | 2,094 | py | Python | envido.py | cabustillo13/Truco-Argentino | 84700759176cdfe1c9d7a4852abf63503fa22371 | [
"MIT"
] | null | null | null | envido.py | cabustillo13/Truco-Argentino | 84700759176cdfe1c9d7a4852abf63503fa22371 | [
"MIT"
] | null | null | null | envido.py | cabustillo13/Truco-Argentino | 84700759176cdfe1c9d7a4852abf63503fa22371 | [
"MIT"
] | null | null | null | def extraerDatos(dato):
palo = dato[0:1] # Se extrae el primer caracter correspondiente a un palo
aux = dato.split(palo)
valor = int(aux[1]) # Se extra el valor del caracter
if (valor >= 10): # En el envido las cartas de 10 para arriba suman cero
valor = 0
return palo, valor
def contarEnvido(carta1,carta2,carta3):
palo1, aux1 = extraerDatos(carta1)
palo2, aux2 = extraerDatos(carta2)
palo3, aux3 = extraerDatos(carta3)
if ((palo1 == palo2) and (palo2 == palo3)):
#print("Flor") # Flor significa que las 3 cartas son del mismo palo
suma = 20 + aux1+aux2+aux3
if suma > 33: # La maxima suma que se puede realizar con la flor es 33
auxMax = max([aux1,aux2,aux3])
auxMin = min([aux1,aux2,aux3])
if ((aux1 > auxMin) and (aux1 < auxMax)): suma = 20 + aux1 + auxMax
if ((aux2 > auxMin) and (aux2 < auxMax)): suma = 20 + aux2 + auxMax
if ((aux3 > auxMin) and (aux3 < auxMax)): suma = 20 + aux3 + auxMax
texto = "FLOR. La suma de la flor es: " + str(suma)
#print("La suma de la flor es: {}".format(suma))
elif ((palo1 == palo2) and (palo2 != palo3)):
suma = 20 + aux1 + aux2
texto = "La suma del envido es: " + str(suma)
#print("La suma del envido es: {}".format(suma))
elif ((palo1 == palo3) and (palo1 != palo2)):
suma = 20 + aux1 + aux3
texto = "La suma del envido es: " + str(suma)
#print("La suma del envido es: {}".format(suma))
elif ((palo2 == palo3) and (palo1 != palo2)):
suma = 20 + aux2 + aux3
texto = "La suma del envido es: " + str(suma)
#print("La suma del envido es: {}".format(suma))
else:
#print("Mentiste. No tenias nada para el envido")
suma = max([aux1,aux2,aux3])
texto = "La suma del envido es: " + str(suma)
#print("La suma del envido es: {}".format(suma))
return texto
| 36.103448 | 88 | 0.536772 |
d8c38457a7da16f828a105c1a3d40f34d7640064 | 8,481 | py | Python | habitat_sim/agent/agent.py | rakeshshrestha31/habitat-sim | 5a304163319053ce47e9e31026323df3262f2f64 | [
"MIT"
] | 1 | 2019-04-22T06:04:48.000Z | 2019-04-22T06:04:48.000Z | habitat_sim/agent/agent.py | rakeshshrestha31/habitat-sim | 5a304163319053ce47e9e31026323df3262f2f64 | [
"MIT"
] | null | null | null | habitat_sim/agent/agent.py | rakeshshrestha31/habitat-sim | 5a304163319053ce47e9e31026323df3262f2f64 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import attr
import numpy as np
from .controls import ObjectControls, ActuationSpec
from habitat_sim import utils
from habitat_sim.sensors import SensorSuite
import habitat_sim.bindings as hsim
import habitat_sim.errors
from typing import Dict, Any, List, Union
__all__ = ["ActionSpec", "SixDOFPose", "AgentState", "AgentConfiguration", "Agent"]
BodyActions = {
"move_right",
"move_left",
"move_forward",
"move_backward",
"turn_left",
"turn_right",
}
def _default_action_space():
return dict(
move_forward=ActionSpec("move_forward", ActuationSpec(amount=0.25)),
turn_left=ActionSpec("turn_left", ActuationSpec(amount=10.0)),
turn_right=ActionSpec("turn_right", ActuationSpec(amount=10.0)),
)
@attr.s(auto_attribs=True, slots=True)
class ActionSpec(object):
r"""Defines how a specific action is implemented
Args:
name (str): Name of the function implementing the action in the move_func_map
actuation (ActuationSpec): Arguements that will be passed to the function
"""
name: str
actuation: ActuationSpec = None
@attr.s(auto_attribs=True, slots=True)
class SixDOFPose(object):
r"""Specifies a position with 6 degrees of freedom
Args:
position (np.array): xyz position
rotation (np.quaternion): unit quaternion rotation
"""
position: np.array = np.zeros(3)
rotation: Union[np.quaternion, List] = np.quaternion(1, 0, 0, 0)
@attr.s(auto_attribs=True, slots=True)
class AgentState(object):
position: np.array = np.zeros(3)
rotation: Union[np.quaternion, List] = np.quaternion(1, 0, 0, 0)
velocity: np.array = np.zeros(3)
angular_velocity: np.array = np.zeros(3)
force: np.array = np.zeros(3)
torque: np.array = np.zeros(3)
sensor_states: Dict[str, SixDOFPose] = attr.Factory(dict)
@attr.s(auto_attribs=True, slots=True)
class AgentConfiguration(object):
height: float = 1.5
radius: float = 0.1
mass: float = 32.0
linear_acceleration: float = 20.0
angular_acceleration: float = 4 * np.pi
linear_friction: float = 0.5
angular_friction: float = 1.0
coefficient_of_restitution: float = 0.0
sensor_specifications: List[hsim.SensorSpec] = attr.Factory(
lambda: [hsim.SensorSpec()]
)
action_space: Dict[Any, ActionSpec] = attr.Factory(_default_action_space)
body_type: str = "cylinder"
@attr.s(auto_attribs=True)
class Agent(object):
r"""Implements an agent with multiple sensors
Args:
agent_config (AgentConfiguration): The configuration of the agent
Warning:
Agents are given controls over a node in the scene graph, but do **not**
own this node. This means that errors will occur if the owner of the scene graph
is deallocated. Generally the owner of the scene graph is the Simulator.
If you'd like to have an agent to control without loading up the simulator,
see unit tests for the agent in `tests/test_agent.py`. We recommend letting the
simulator create the agent and own the scene graph in almost all cases. Using the scene
graph in python is dangerous due to differences in c++ and python memory management
"""
agent_config: AgentConfiguration = attr.Factory(AgentConfiguration)
sensors: SensorSuite = attr.Factory(SensorSuite)
controls: ObjectControls = attr.Factory(ObjectControls)
body: hsim.AttachedObject = attr.Factory(hsim.AttachedObject)
def __attrs_post_init__(self):
self.body.object_type = hsim.AttachedObjectType.AGENT
self.reconfigure(self.agent_config)
def reconfigure(
self, agent_config: AgentConfiguration, reconfigure_sensors: bool = True
):
r"""Re-create the agent with a new configuration
Args:
agent_config (AgentConfiguration): New config
reconfigure_sensors (bool): Whether or not to also reconfigure the sensors, there
are specific cases where false makes sense, but most cases are covered by true
"""
self.agent_config = agent_config
if reconfigure_sensors:
self.sensors.clear()
for spec in self.agent_config.sensor_specifications:
self.sensors.add(hsim.PinholeCamera(spec))
if self.body.is_valid:
for _, v in self.sensors.items():
v.attach(self.scene_node.create_child())
def attach(self, scene_node: hsim.SceneNode):
r"""Gives the agent control over the specified scene node (but **not** ownership)
The agent will recursively call attach for the sensors
Args:
scene_node (hsim.SceneNode)
"""
self.body.attach(scene_node)
for _, v in self.sensors.items():
v.attach(self.scene_node.create_child())
def detach(self):
r"""Detaches the agent from the its current scene_node
Recursively calls detach on any sensors
"""
self.body.detach()
for _, v in self.sensors.items():
v.detach()
def act(self, action_id: Any):
r"""Take the action specified by action_id
Args:
action_id (Any): ID of the action.
Retreives the action from agent_config.action_space
"""
habitat_sim.errors.assert_obj_valid(self.body)
assert (
action_id in self.agent_config.action_space
), f"No action {action_id} in action space"
action = self.agent_config.action_space[action_id]
if action.name in BodyActions:
self.controls.action(
self.scene_node, action.name, action.actuation, apply_filter=True
)
else:
for _, v in self.sensors.items():
habitat_sim.errors.assert_obj_valid(v)
self.controls.action(
v.get_scene_node(),
action.name,
action.actuation,
apply_filter=False,
)
def get_state(self) -> AgentState:
habitat_sim.errors.assert_obj_valid(self.body)
state = AgentState(
self.body.get_absolute_position(),
utils.quat_from_coeffs(self.body.get_rotation()),
)
for k, v in self.sensors.items():
habitat_sim.errors.assert_obj_valid(v)
state.sensor_states[k] = SixDOFPose(
v.get_absolute_position(),
state.rotation * utils.quat_from_coeffs(v.get_rotation()),
)
return state
def set_state(self, state: AgentState, reset_sensors: bool = True):
r"""Sets the agents state
Args:
state (AgentState): The state to set the agent to
reset_sensors (bool): Whether or not to reset the sensors to their default intrinsic/extrinsic parameters
before setting their extrinsic state
"""
habitat_sim.errors.assert_obj_valid(self.body)
if isinstance(state.rotation, list):
state.rotation = utils.quat_from_coeffs(state.rotation)
self.body.reset_transformation()
self.body.translate(state.position)
self.body.set_rotation(utils.quat_to_coeffs(state.rotation))
if reset_sensors:
for _, v in self.sensors.items():
v.set_transformation_from_spec()
for k, v in state.sensor_states.items():
assert k in self.sensors
if isinstance(v.rotation, list):
v.rotation = utils.quat_from_coeffs(v.rotation)
s = self.sensors[k]
s.reset_transformation()
s.translate(
utils.quat_rotate_vector(
state.rotation.inverse(), v.position - state.position
)
)
s.set_rotation(utils.quat_to_coeffs(state.rotation.inverse() * v.rotation))
@property
def scene_node(self):
habitat_sim.errors.assert_obj_valid(self.body)
return self.body.get_scene_node()
@property
def state(self):
return self.get_state()
@state.setter
def state(self, new_state):
self.set_state(new_state, reset_sensors=True)
def __del__(self):
self.detach()
| 32.872093 | 117 | 0.645561 |
e42cede256ece7910db430da56e82c7e048c0f50 | 19,245 | py | Python | deeprobust/graph/defense/node_embedding.py | Louise-LuLin/DeepRobust | a91b2d321f45cd7b24873220bd62a60911829d2c | [
"MIT"
] | null | null | null | deeprobust/graph/defense/node_embedding.py | Louise-LuLin/DeepRobust | a91b2d321f45cd7b24873220bd62a60911829d2c | [
"MIT"
] | null | null | null | deeprobust/graph/defense/node_embedding.py | Louise-LuLin/DeepRobust | a91b2d321f45cd7b24873220bd62a60911829d2c | [
"MIT"
] | null | null | null | """
Code in this file is modified from https://github.com/abojchevski/node_embedding_attack
'Adversarial Attacks on Node Embeddings via Graph Poisoning'
Aleksandar Bojchevski and Stephan Günnemann, ICML 2019
http://proceedings.mlr.press/v97/bojchevski19a.html
Copyright (C) owned by the authors, 2019
"""
import numba
import numpy as np
import scipy.sparse as sp
from gensim.models import Word2Vec
import networkx as nx
from gensim.models import KeyedVectors
from sklearn.linear_model import LogisticRegression
from sklearn.preprocessing import normalize
from sklearn.metrics import f1_score, roc_auc_score, average_precision_score, accuracy_score
class BaseEmbedding:
"""Base class for node embedding methods such as DeepWalk and Node2Vec.
"""
def __init__(self):
self.embedding = None
self.model = None
def evaluate_node_classification(self, labels, idx_train, idx_test,
normalize_embedding=True, lr_params=None):
"""Evaluate the node embeddings on the node classification task..
Parameters
---------
labels: np.ndarray, shape [n_nodes]
The ground truth labels
normalize_embedding: bool
Whether to normalize the embeddings
idx_train: np.array
Indices of training nodes
idx_test: np.array
Indices of test nodes
lr_params: dict
Parameters for the LogisticRegression model
Returns
-------
[numpy.array, float, float] :
Predictions from LR, micro F1 score and macro F1 score
"""
embedding_matrix = self.embedding
if normalize_embedding:
embedding_matrix = normalize(embedding_matrix)
features_train = embedding_matrix[idx_train]
features_test = embedding_matrix[idx_test]
labels_train = labels[idx_train]
labels_test = labels[idx_test]
if lr_params is None:
lr = LogisticRegression(solver='lbfgs', max_iter=1000, multi_class='auto')
else:
lr = LogisticRegression(**lr_params)
lr.fit(features_train, labels_train)
lr_z_predict = lr.predict(features_test)
f1_micro = f1_score(labels_test, lr_z_predict, average='micro')
f1_macro = f1_score(labels_test, lr_z_predict, average='macro')
test_acc = accuracy_score(labels_test, lr_z_predict)
print('Micro F1:', f1_micro)
print('Macro F1:', f1_macro)
return lr_z_predict, f1_micro, f1_macro
def evaluate_link_prediction(self, adj, node_pairs, normalize_embedding=True):
"""Evaluate the node embeddings on the link prediction task.
adj: sp.csr_matrix, shape [n_nodes, n_nodes]
Adjacency matrix of the graph
node_pairs: numpy.array, shape [n_pairs, 2]
Node pairs
normalize_embedding: bool
Whether to normalize the embeddings
Returns
-------
[numpy.array, float, float]
Inner product of embeddings, Area under ROC curve (AUC) score and average precision (AP) score
"""
embedding_matrix = self.embedding
if normalize_embedding:
embedding_matrix = normalize(embedding_matrix)
true = adj[node_pairs[:, 0], node_pairs[:, 1]].A1
scores = (embedding_matrix[node_pairs[:, 0]] * embedding_matrix[node_pairs[:, 1]]).sum(1)
# print(np.unique(true, return_counts=True))
try:
auc_score = roc_auc_score(true, scores)
except Exception as e:
auc_score = 0.00
print('ROC error')
ap_score = average_precision_score(true, scores)
print("AUC:", auc_score)
print("AP:", ap_score)
return scores, auc_score, ap_score
class Node2Vec(BaseEmbedding):
"""node2vec: Scalable Feature Learning for Networks. KDD'15.
To use this model, you need to "pip install node2vec" first.
Examples
----
>>> from deeprobust.graph.data import Dataset
>>> from deeprobust.graph.global_attack import NodeEmbeddingAttack
>>> from deeprobust.graph.defense import Node2Vec
>>> data = Dataset(root='/tmp/', name='cora_ml', seed=15)
>>> adj, features, labels = data.adj, data.features, data.labels
>>> idx_train, idx_val, idx_test = data.idx_train, data.idx_val, data.idx_test
>>> # set up attack model
>>> attacker = NodeEmbeddingAttack()
>>> attacker.attack(adj, attack_type="remove", n_perturbations=1000)
>>> modified_adj = attacker.modified_adj
>>> print("Test Node2vec on clean graph")
>>> model = Node2Vec()
>>> model.fit(adj)
>>> model.evaluate_node_classification(labels, idx_train, idx_test)
>>> print("Test Node2vec on attacked graph")
>>> model = Node2Vec()
>>> model.fit(modified_adj)
>>> model.evaluate_node_classification(labels, idx_train, idx_test)
"""
def __init__(self):
# self.fit = self.node2vec_snap
super(Node2Vec, self).__init__()
self.fit = self.node2vec
def node2vec(self, adj, embedding_dim=64, walk_length=30, walks_per_node=10,
workers=8, window_size=10, num_neg_samples=1, p=4, q=1):
"""Compute Node2Vec embeddings for the given graph.
Parameters
----------
adj : sp.csr_matrix, shape [n_nodes, n_nodes]
Adjacency matrix of the graph
embedding_dim : int, optional
Dimension of the embedding
walks_per_node : int, optional
Number of walks sampled from each node
walk_length : int, optional
Length of each random walk
workers : int, optional
Number of threads (see gensim.models.Word2Vec process)
window_size : int, optional
Window size (see gensim.models.Word2Vec)
num_neg_samples : int, optional
Number of negative samples (see gensim.models.Word2Vec)
p : float
The hyperparameter p in node2vec
q : float
The hyperparameter q in node2vec
"""
walks = sample_n2v_random_walks(adj, walk_length, walks_per_node, p=p, q=q)
walks = [list(map(str, walk)) for walk in walks]
self.model = Word2Vec(walks, vector_size=embedding_dim, window=window_size, min_count=0, sg=1, workers=workers,
negative=num_neg_samples, hs=0, compute_loss=True)
self.embedding = self.model.wv.vectors[np.fromiter(map(int, self.model.wv.index_to_key), np.int32).argsort()]
class DeepWalk(BaseEmbedding):
"""DeepWalk: Online Learning of Social Representations. KDD'14. The implementation is
modified from https://github.com/abojchevski/node_embedding_attack
Examples
----
>>> from deeprobust.graph.data import Dataset
>>> from deeprobust.graph.global_attack import NodeEmbeddingAttack
>>> from deeprobust.graph.defense import DeepWalk
>>> data = Dataset(root='/tmp/', name='cora_ml', seed=15)
>>> adj, features, labels = data.adj, data.features, data.labels
>>> idx_train, idx_val, idx_test = data.idx_train, data.idx_val, data.idx_test
>>> # set up attack model
>>> attacker = NodeEmbeddingAttack()
>>> attacker.attack(adj, attack_type="remove", n_perturbations=1000)
>>> modified_adj = attacker.modified_adj
>>> print("Test DeepWalk on clean graph")
>>> model = DeepWalk()
>>> model.fit(adj)
>>> model.evaluate_node_classification(labels, idx_train, idx_test)
>>> print("Test DeepWalk on attacked graph")
>>> model.fit(modified_adj)
>>> model.evaluate_node_classification(labels, idx_train, idx_test)
>>> print("Test DeepWalk SVD")
>>> model = DeepWalk(type="svd")
>>> model.fit(modified_adj)
>>> model.evaluate_node_classification(labels, idx_train, idx_test)
"""
def __init__(self, type="skipgram"):
super(DeepWalk, self).__init__()
if type == "skipgram":
self.fit = self.deepwalk_skipgram
elif type == "svd":
self.fit = self.deepwalk_svd
else:
raise NotImplementedError
def deepwalk_skipgram(self, adj, embedding_dim=64, walk_length=80, walks_per_node=10,
workers=8, window_size=10, num_neg_samples=1):
"""Compute DeepWalk embeddings for the given graph using the skip-gram formulation.
Parameters
----------
adj : sp.csr_matrix, shape [n_nodes, n_nodes]
Adjacency matrix of the graph
embedding_dim : int, optional
Dimension of the embedding
walks_per_node : int, optional
Number of walks sampled from each node
walk_length : int, optional
Length of each random walk
workers : int, optional
Number of threads (see gensim.models.Word2Vec process)
window_size : int, optional
Window size (see gensim.models.Word2Vec)
num_neg_samples : int, optional
Number of negative samples (see gensim.models.Word2Vec)
"""
walks = sample_random_walks(adj, walk_length, walks_per_node)
walks = [list(map(str, walk)) for walk in walks]
self.model = Word2Vec(walks, vector_size=embedding_dim, window=window_size, min_count=0, sg=1, workers=workers,
negative=num_neg_samples, hs=0, compute_loss=True)
self.embedding = self.model.wv.vectors[np.fromiter(map(int, self.model.wv.index_to_key), np.int32).argsort()]
def deepwalk_svd(self, adj, window_size=10, embedding_dim=64, num_neg_samples=1, sparse=True):
"""Compute DeepWalk embeddings for the given graph using the matrix factorization formulation.
adj: sp.csr_matrix, shape [n_nodes, n_nodes]
Adjacency matrix of the graph
window_size: int
Size of the window
embedding_dim: int
Size of the embedding
num_neg_samples: int
Number of negative samples
sparse: bool
Whether to perform sparse operations
Returns
------
np.ndarray, shape [num_nodes, embedding_dim]
Embedding matrix.
"""
sum_powers_transition = sum_of_powers_of_transition_matrix(adj, window_size)
deg = adj.sum(1).A1
deg[deg == 0] = 1
deg_matrix = sp.diags(1 / deg)
volume = adj.sum()
M = sum_powers_transition.dot(deg_matrix) * volume / (num_neg_samples * window_size)
log_M = M.copy()
log_M[M > 1] = np.log(log_M[M > 1])
log_M = log_M.multiply(M > 1)
if not sparse:
log_M = log_M.toarray()
Fu, Fv = self.svd_embedding(log_M, embedding_dim, sparse)
loss = np.linalg.norm(Fu.dot(Fv.T) - log_M, ord='fro')
self.embedding = Fu
return Fu, Fv, loss, log_M
def svd_embedding(self, x, embedding_dim, sparse=False):
"""Computes an embedding by selection the top (embedding_dim) largest singular-values/vectors.
:param x: sp.csr_matrix or np.ndarray
The matrix that we want to embed
:param embedding_dim: int
Dimension of the embedding
:param sparse: bool
Whether to perform sparse operations
:return: np.ndarray, shape [?, embedding_dim], np.ndarray, shape [?, embedding_dim]
Embedding matrices.
"""
if sparse:
U, s, V = sp.linalg.svds(x, embedding_dim)
else:
U, s, V = np.linalg.svd(x)
S = np.diag(s)
Fu = U.dot(np.sqrt(S))[:, :embedding_dim]
Fv = np.sqrt(S).dot(V)[:embedding_dim, :].T
return Fu, Fv
def sample_random_walks(adj, walk_length, walks_per_node, seed=None):
"""Sample random walks of fixed length from each node in the graph in parallel.
Parameters
----------
adj : sp.csr_matrix, shape [n_nodes, n_nodes]
Sparse adjacency matrix
walk_length : int
Random walk length
walks_per_node : int
Number of random walks per node
seed : int or None
Random seed
Returns
-------
walks : np.ndarray, shape [num_walks * num_nodes, walk_length]
The sampled random walks
"""
if seed is None:
seed = np.random.randint(0, 100000)
adj = sp.csr_matrix(adj)
random_walks = _random_walk(adj.indptr,
adj.indices,
walk_length,
walks_per_node,
seed).reshape([-1, walk_length])
return random_walks
@numba.jit(nopython=True, parallel=True)
def _random_walk(indptr, indices, walk_length, walks_per_node, seed):
"""Sample r random walks of length l per node in parallel from the graph.
Parameters
----------
indptr : array-like
Pointer for the edges of each node
indices : array-like
Edges for each node
walk_length : int
Random walk length
walks_per_node : int
Number of random walks per node
seed : int
Random seed
Returns
-------
walks : array-like, shape [r*N*l]
The sampled random walks
"""
np.random.seed(seed)
N = len(indptr) - 1
walks = []
for ir in range(walks_per_node):
for n in range(N):
for il in range(walk_length):
walks.append(n)
n = np.random.choice(indices[indptr[n]:indptr[n + 1]])
return np.array(walks)
def sample_n2v_random_walks(adj, walk_length, walks_per_node, p, q, seed=None):
"""Sample node2vec random walks of fixed length from each node in the graph in parallel.
Parameters
----------
adj : sp.csr_matrix, shape [n_nodes, n_nodes]
Sparse adjacency matrix
walk_length : int
Random walk length
walks_per_node : int
Number of random walks per node
p: float
The probability to go back
q: float,
The probability to go explore undiscovered parts of the graphs
seed : int or None
Random seed
Returns
-------
walks : np.ndarray, shape [num_walks * num_nodes, walk_length]
The sampled random walks
"""
if seed is None:
seed = np.random.randint(0, 100000)
adj = sp.csr_matrix(adj)
random_walks = _n2v_random_walk(adj.indptr,
adj.indices,
walk_length,
walks_per_node,
p,
q,
seed)
return random_walks
@numba.jit(nopython=True)
def random_choice(arr, p):
"""Similar to `numpy.random.choice` and it suppors p=option in numba.
refer to <https://github.com/numba/numba/issues/2539#issuecomment-507306369>
Parameters
----------
arr : 1-D array-like
p : 1-D array-like
The probabilities associated with each entry in arr
Returns
-------
samples : ndarray
The generated random samples
"""
return arr[np.searchsorted(np.cumsum(p), np.random.random(), side="right")]
@numba.jit(nopython=True)
def _n2v_random_walk(indptr,
indices,
walk_length,
walks_per_node,
p,
q,
seed):
"""Sample r random walks of length l per node in parallel from the graph.
Parameters
----------
indptr : array-like
Pointer for the edges of each node
indices : array-like
Edges for each node
walk_length : int
Random walk length
walks_per_node : int
Number of random walks per node
p: float
The probability to go back
q: float,
The probability to go explore undiscovered parts of the graphs
seed : int
Random seed
Returns
-------
walks : list generator, shape [r, N*l]
The sampled random walks
"""
np.random.seed(seed)
N = len(indptr) - 1
for _ in range(walks_per_node):
for n in range(N):
walk = [n]
current_node = n
previous_node = N
previous_node_neighbors = np.empty(0, dtype=np.int32)
for _ in range(walk_length - 1):
neighbors = indices[indptr[current_node]:indptr[current_node + 1]]
if neighbors.size == 0:
break
probability = np.array([1 / q] * neighbors.size)
probability[previous_node == neighbors] = 1 / p
for i, nbr in enumerate(neighbors):
if np.any(nbr == previous_node_neighbors):
probability[i] = 1.
norm_probability = probability / np.sum(probability)
current_node = random_choice(neighbors, norm_probability)
walk.append(current_node)
previous_node_neighbors = neighbors
previous_node = current_node
yield walk
def sum_of_powers_of_transition_matrix(adj, pow):
"""Computes \sum_{r=1}^{pow) (D^{-1}A)^r.
Parameters
-----
adj: sp.csr_matrix, shape [n_nodes, n_nodes]
Adjacency matrix of the graph
pow: int
Power exponent
Returns
----
sp.csr_matrix
Sum of powers of the transition matrix of a graph.
"""
deg = adj.sum(1).A1
deg[deg == 0] = 1
transition_matrix = sp.diags(1 / deg).dot(adj)
sum_of_powers = transition_matrix
last = transition_matrix
for i in range(1, pow):
last = last.dot(transition_matrix)
sum_of_powers += last
return sum_of_powers
if __name__ == "__main__":
from deeprobust.graph.data import Dataset
from deeprobust.graph.global_attack import NodeEmbeddingAttack
dataset_str = 'cora_ml'
data = Dataset(root='/tmp/', name=dataset_str, seed=15)
adj, features, labels = data.adj, data.features, data.labels
idx_train, idx_val, idx_test = data.idx_train, data.idx_val, data.idx_test
model = NodeEmbeddingAttack()
model.attack(adj, attack_type="add_by_remove", n_perturbations=1000, n_candidates=10000)
modified_adj = model.modified_adj
# train defense model
print("Test DeepWalk on clean graph")
model = DeepWalk()
model.fit(adj)
model.evaluate_node_classification(labels, idx_train, idx_test)
# model.evaluate_node_classification(labels, idx_train, idx_test, lr_params={"max_iter": 10})
print("Test DeepWalk on attacked graph")
model.fit(modified_adj)
model.evaluate_node_classification(labels, idx_train, idx_test)
print("\t link prediciton...")
model.evaluate_link_prediction(modified_adj, np.array(adj.nonzero()).T)
print("Test DeepWalk SVD")
model = DeepWalk(type="svd")
model.fit(modified_adj)
model.evaluate_node_classification(labels, idx_train, idx_test)
# train defense model
print("Test Node2vec on clean graph")
model = Node2Vec()
model.fit(adj)
model.evaluate_node_classification(labels, idx_train, idx_test)
print("Test Node2vec on attacked graph")
model = Node2Vec()
model.fit(modified_adj)
model.evaluate_node_classification(labels, idx_train, idx_test)
| 35.705009 | 119 | 0.625201 |
0d540995bc1eb278c39115a8b1780acf01fb4167 | 3,193 | py | Python | Lib/test/test_contains.py | arvindm95/unladen-swallow | 8175e37eaea7ca66ed03283b46bc1d2db0d3f9c3 | [
"PSF-2.0"
] | 2,293 | 2015-01-02T12:46:10.000Z | 2022-03-29T09:45:43.000Z | python/src/Lib/test/test_contains.py | weiqiangzheng/sl4a | d3c17dca978cbeee545e12ea240a9dbf2a6999e9 | [
"Apache-2.0"
] | 315 | 2015-05-31T11:55:46.000Z | 2022-01-12T08:36:37.000Z | python/src/Lib/test/test_contains.py | weiqiangzheng/sl4a | d3c17dca978cbeee545e12ea240a9dbf2a6999e9 | [
"Apache-2.0"
] | 1,033 | 2015-01-04T07:48:40.000Z | 2022-03-24T09:34:37.000Z | from test.test_support import have_unicode, run_unittest
import unittest
class base_set:
def __init__(self, el):
self.el = el
class set(base_set):
def __contains__(self, el):
return self.el == el
class seq(base_set):
def __getitem__(self, n):
return [self.el][n]
class TestContains(unittest.TestCase):
def test_common_tests(self):
a = base_set(1)
b = set(1)
c = seq(1)
self.assert_(1 in b)
self.assert_(0 not in b)
self.assert_(1 in c)
self.assert_(0 not in c)
self.assertRaises(TypeError, lambda: 1 in a)
self.assertRaises(TypeError, lambda: 1 not in a)
# test char in string
self.assert_('c' in 'abc')
self.assert_('d' not in 'abc')
self.assert_('' in '')
self.assert_('' in 'abc')
self.assertRaises(TypeError, lambda: None in 'abc')
if have_unicode:
def test_char_in_unicode(self):
self.assert_('c' in unicode('abc'))
self.assert_('d' not in unicode('abc'))
self.assert_('' in unicode(''))
self.assert_(unicode('') in '')
self.assert_(unicode('') in unicode(''))
self.assert_('' in unicode('abc'))
self.assert_(unicode('') in 'abc')
self.assert_(unicode('') in unicode('abc'))
self.assertRaises(TypeError, lambda: None in unicode('abc'))
# test Unicode char in Unicode
self.assert_(unicode('c') in unicode('abc'))
self.assert_(unicode('d') not in unicode('abc'))
# test Unicode char in string
self.assert_(unicode('c') in 'abc')
self.assert_(unicode('d') not in 'abc')
def test_builtin_sequence_types(self):
# a collection of tests on builtin sequence types
a = range(10)
for i in a:
self.assert_(i in a)
self.assert_(16 not in a)
self.assert_(a not in a)
a = tuple(a)
for i in a:
self.assert_(i in a)
self.assert_(16 not in a)
self.assert_(a not in a)
class Deviant1:
"""Behaves strangely when compared
This class is designed to make sure that the contains code
works when the list is modified during the check.
"""
aList = range(15)
def __cmp__(self, other):
if other == 12:
self.aList.remove(12)
self.aList.remove(13)
self.aList.remove(14)
return 1
self.assert_(Deviant1() not in Deviant1.aList)
class Deviant2:
"""Behaves strangely when compared
This class raises an exception during comparison. That in
turn causes the comparison to fail with a TypeError.
"""
def __cmp__(self, other):
if other == 4:
raise RuntimeError, "gotcha"
try:
self.assert_(Deviant2() not in a)
except TypeError:
pass
def test_main():
run_unittest(TestContains)
if __name__ == '__main__':
test_main()
| 28.508929 | 72 | 0.547134 |
ab2c24c18597b4eb464cbd5e62f979cfb48ab3f8 | 1,853 | py | Python | input_configs.py | centrify/centrify-hparcsight-integration-sample | d18f2d35fba13a7941cc10a4c9790f071f49864f | [
"Apache-2.0"
] | 1 | 2021-03-17T16:42:46.000Z | 2021-03-17T16:42:46.000Z | input_configs.py | centrify/centrify-hparcsight-integration-sample | d18f2d35fba13a7941cc10a4c9790f071f49864f | [
"Apache-2.0"
] | null | null | null | input_configs.py | centrify/centrify-hparcsight-integration-sample | d18f2d35fba13a7941cc10a4c9790f071f49864f | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 Centrify Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import configparser
import os
config_file = './config.ini'
config = configparser.RawConfigParser()
def read_config():
if not os.path.isfile(config_file):
raise Exception("\'config.ini\' file is missing.")
else:
try:
config.read(config_file)
validate_config()
except Exception as e:
raise Exception("Error while reading cef mappings file. Details: %s" % (e))
def validate_config():
if not config.has_section('common'):
raise Exception("\'common\' section in config file is missing.")
if not config.has_option('common', 'tenant') or not config.has_option('common',
'siem_username') or not config.has_option(
'common', 'siem_password'):
raise Exception(
"\'Make sure that following 3 configs are present in config.ini - tenant, siem_username and siem_password.")
if len(config['common']['tenant']) == 0 or len(config['common']['siem_username']) == 0 or len(
config['common']['siem_password']) == 0:
raise Exception(
"\'Make sure that following 3 configs have values in config.ini - tenant, siem_username and siem_password.")
read_config() | 39.425532 | 120 | 0.660011 |
e76c4b9335e67250430eed22780115afa827fe36 | 3,421 | py | Python | examples/agents/cem.py | afernandezcanosa/gym | 11c3b77ea0afd467b51e4f2c73e47e103ef128e9 | [
"Python-2.0",
"OLDAP-2.7"
] | 94 | 2018-01-19T05:31:39.000Z | 2020-10-27T15:11:47.000Z | examples/agents/cem.py | huangjiancong1/gym_baxter | 7534d9504b4678a3b09a4e17466f54eaeaf23ccc | [
"Apache-2.0"
] | 14 | 2020-01-28T23:09:45.000Z | 2022-03-12T00:06:59.000Z | examples/agents/cem.py | huangjiancong1/gym_baxter | 7534d9504b4678a3b09a4e17466f54eaeaf23ccc | [
"Apache-2.0"
] | 17 | 2018-02-17T17:14:16.000Z | 2019-04-28T05:49:04.000Z | from __future__ import print_function
import gym
from gym import wrappers, logger
import numpy as np
from six.moves import cPickle as pickle
import json, sys, os
from os import path
from _policies import BinaryActionLinearPolicy # Different file so it can be unpickled
import argparse
def cem(f, th_mean, batch_size, n_iter, elite_frac, initial_std=1.0):
"""
Generic implementation of the cross-entropy method for maximizing a black-box function
f: a function mapping from vector -> scalar
th_mean: initial mean over input distribution
batch_size: number of samples of theta to evaluate per batch
n_iter: number of batches
elite_frac: each batch, select this fraction of the top-performing samples
initial_std: initial standard deviation over parameter vectors
"""
n_elite = int(np.round(batch_size*elite_frac))
th_std = np.ones_like(th_mean) * initial_std
for _ in range(n_iter):
ths = np.array([th_mean + dth for dth in th_std[None,:]*np.random.randn(batch_size, th_mean.size)])
ys = np.array([f(th) for th in ths])
elite_inds = ys.argsort()[::-1][:n_elite]
elite_ths = ths[elite_inds]
th_mean = elite_ths.mean(axis=0)
th_std = elite_ths.std(axis=0)
yield {'ys' : ys, 'theta_mean' : th_mean, 'y_mean' : ys.mean()}
def do_rollout(agent, env, num_steps, render=False):
total_rew = 0
ob = env.reset()
for t in range(num_steps):
a = agent.act(ob)
(ob, reward, done, _info) = env.step(a)
total_rew += reward
if render and t%3==0: env.render()
if done: break
return total_rew, t+1
if __name__ == '__main__':
logger.set_level(logger.INFO)
parser = argparse.ArgumentParser()
parser.add_argument('--display', action='store_true')
parser.add_argument('target', nargs="?", default="CartPole-v0")
args = parser.parse_args()
env = gym.make(args.target)
env.seed(0)
np.random.seed(0)
params = dict(n_iter=10, batch_size=25, elite_frac=0.2)
num_steps = 200
# You provide the directory to write to (can be an existing
# directory, but can't contain previous monitor results. You can
# also dump to a tempdir if you'd like: tempfile.mkdtemp().
outdir = '/tmp/cem-agent-results'
env = wrappers.Monitor(env, outdir, force=True)
# Prepare snapshotting
# ----------------------------------------
def writefile(fname, s):
with open(path.join(outdir, fname), 'w') as fh: fh.write(s)
info = {}
info['params'] = params
info['argv'] = sys.argv
info['env_id'] = env.spec.id
# ------------------------------------------
def noisy_evaluation(theta):
agent = BinaryActionLinearPolicy(theta)
rew, T = do_rollout(agent, env, num_steps)
return rew
# Train the agent, and snapshot each stage
for (i, iterdata) in enumerate(
cem(noisy_evaluation, np.zeros(env.observation_space.shape[0]+1), **params)):
print('Iteration %2i. Episode mean reward: %7.3f'%(i, iterdata['y_mean']))
agent = BinaryActionLinearPolicy(iterdata['theta_mean'])
if args.display: do_rollout(agent, env, 200, render=True)
writefile('agent-%.4i.pkl'%i, str(pickle.dumps(agent, -1)))
# Write out the env at the end so we store the parameters of this
# environment.
writefile('info.json', json.dumps(info))
env.close()
| 36.393617 | 108 | 0.64981 |
a7b68ede6eefacfdd43fef9883a61abd674325d0 | 16,942 | py | Python | arxiv_latex_cleaner/tests/arxiv_latex_cleaner_test.py | aditya95sriram/arxiv-latex-cleaner | 00918f146a54b0edd011b63a1bab3bef380faa4e | [
"Apache-2.0"
] | 1 | 2021-09-14T04:35:01.000Z | 2021-09-14T04:35:01.000Z | arxiv_latex_cleaner/tests/arxiv_latex_cleaner_test.py | aditya95sriram/arxiv-latex-cleaner | 00918f146a54b0edd011b63a1bab3bef380faa4e | [
"Apache-2.0"
] | null | null | null | arxiv_latex_cleaner/tests/arxiv_latex_cleaner_test.py | aditya95sriram/arxiv-latex-cleaner | 00918f146a54b0edd011b63a1bab3bef380faa4e | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2018 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import filecmp
from os import path
import shutil
import unittest
from absl.testing import parameterized
from arxiv_latex_cleaner import arxiv_latex_cleaner
from PIL import Image
def make_args(
input_folder='foo/bar',
resize_images=False,
im_size=500,
compress_pdf=False,
pdf_im_resolution=500,
images_whitelist=None,
commands_to_delete=None,
use_external_tikz='foo/bar/tikz',
):
if images_whitelist is None:
images_whitelist = {}
if commands_to_delete is None:
commands_to_delete = []
args = {
'input_folder': input_folder,
'resize_images': resize_images,
'im_size': im_size,
'compress_pdf': compress_pdf,
'pdf_im_resolution': pdf_im_resolution,
'images_whitelist': images_whitelist,
'commands_to_delete': commands_to_delete,
'use_external_tikz': use_external_tikz,
}
return args
def make_contents():
contents = (r'& \figcompfigures{'
'\n\timage1.jpg'
'\n}{'
'\n\t'
r'\ww'
'\n}{'
'\n\t1.0'
'\n\t}'
'\n& '
r'\figcompfigures{image2.jpg}{\ww}{1.0}')
return contents
def make_patterns():
pattern = r'(?:\\figcompfigures{\s*)(?P<first>.*?)\s*}\s*{\s*(?P<second>.*?)\s*}\s*{\s*(?P<third>.*?)\s*}'
insertion = r"""\parbox[c]{{
{second}\linewidth
}}{{
\includegraphics[
width={third}\linewidth
]{{
figures/{first}
}}
}} """
description = 'Replace figcompfigures'
output = {
'pattern': pattern,
'insertion': insertion,
'description': description
}
return [output]
def make_search_reference_tests():
return ({
'testcase_name': 'with_extension_prefix1',
'filenames': ['include_image_yes.png', 'include_image.png'],
'contents': '\\include{include_image_yes.png}',
'extension_optional': False,
'true_outputs': ['include_image_yes.png']
}, {
'testcase_name': 'with_extension_prefix2',
'filenames': ['include_image_yes.png', 'include_image.png'],
'contents': '\\include{include_image.png}',
'extension_optional': False,
'true_outputs': ['include_image.png']
}, {
'testcase_name': 'with_extension_nested_more_specific',
'filenames': [
'images/im_included.png', 'images/include/images/im_included.png'
],
'contents': '\\include{images/include/images/im_included.png}',
'extension_optional': False,
'true_outputs': ['images/include/images/im_included.png']
}, {
'testcase_name':
'with_extension_nested_less_specific',
'filenames': [
'images/im_included.png', 'images/include/images/im_included.png'
],
'contents':
'\\include{images/im_included.png}',
'extension_optional':
False,
'true_outputs': [
'images/im_included.png', 'images/include/images/im_included.png'
]
}, {
'testcase_name': 'with_extension_nested_substring',
'filenames': ['images/im_included.png', 'im_included.png'],
'contents': '\\include{images/im_included.png}',
'extension_optional': False,
'true_outputs': ['images/im_included.png']
}, {
'testcase_name': 'with_extension_nested_diffpath',
'filenames': ['images/im_included.png', 'figures/im_included.png'],
'contents': '\\include{images/im_included.png}',
'extension_optional': False,
'true_outputs': ['images/im_included.png']
})
class UnitTests(parameterized.TestCase):
@parameterized.named_parameters(
{
'testcase_name': 'empty config',
'args': make_args(),
'config_params': {},
'final_args': make_args(),
},
{
'testcase_name': 'empty args',
'args': {},
'config_params': make_args(),
'final_args': make_args(),
},
{
'testcase_name':
'args and config provided',
'args':
make_args(
images_whitelist={'path1/': 1000},
commands_to_delete=[r'\todo1']),
'config_params':
make_args(
'foo_/bar_',
True,
1000,
True,
1000,
images_whitelist={'path2/': 1000},
commands_to_delete=[r'\todo2'],
use_external_tikz='foo_/bar_/tikz_',
),
'final_args':
make_args(
images_whitelist={
'path1/': 1000,
'path2/': 1000
},
commands_to_delete=[r'\todo1', r'\todo2'],
),
},
)
def test_merge_args_into_config(self, args, config_params, final_args):
self.assertEqual(
arxiv_latex_cleaner.merge_args_into_config(args, config_params),
final_args)
@parameterized.named_parameters(
{
'testcase_name': 'no_comment',
'line_in': 'Foo\n',
'true_output': 'Foo\n'
}, {
'testcase_name': 'auto_ignore',
'line_in': '%auto-ignore\n',
'true_output': '%auto-ignore\n'
}, {
'testcase_name': 'percent',
'line_in': r'100\% accurate\n',
'true_output': r'100\% accurate\n'
}, {
'testcase_name': 'comment',
'line_in': ' % Comment\n',
'true_output': ''
}, {
'testcase_name': 'comment_inline',
'line_in': 'Foo %Comment\n',
'true_output': 'Foo %\n'
})
def test_remove_comments_inline(self, line_in, true_output):
self.assertEqual(
arxiv_latex_cleaner._remove_comments_inline(line_in), true_output)
@parameterized.named_parameters(
{
'testcase_name': 'no_command',
'text_in': 'Foo\nFoo2\n',
'true_output': 'Foo\nFoo2\n'
}, {
'testcase_name': 'command_not_removed',
'text_in': '\\textit{Foo\nFoo2}\n',
'true_output': '\\textit{Foo\nFoo2}\n'
}, {
'testcase_name': 'command_no_end_line_removed',
'text_in': 'A\\todo{B\nC}D\nE\n\\end{document}',
'true_output': 'AD\nE\n\\end{document}'
}, {
'testcase_name': 'command_with_end_line_removed',
'text_in': 'A\n\\todo{B\nC}\nD\n\\end{document}',
'true_output': 'A\n%\nD\n\\end{document}'
})
def test_remove_command(self, text_in, true_output):
self.assertEqual(
arxiv_latex_cleaner._remove_command(text_in, 'todo'), true_output)
@parameterized.named_parameters(
{
'testcase_name': 'no_environment',
'text_in': 'Foo\n',
'true_output': 'Foo\n'
}, {
'testcase_name': 'environment_not_removed',
'text_in': 'Foo\n\\begin{equation}\n3x+2\n\\end{equation}\nFoo',
'true_output': 'Foo\n\\begin{equation}\n3x+2\n\\end{equation}\nFoo'
}, {
'testcase_name': 'environment_removed',
'text_in': 'Foo\\begin{comment}\n3x+2\n\\end{comment}\nFoo',
'true_output': 'Foo\nFoo'
})
def test_remove_environment(self, text_in, true_output):
self.assertEqual(
arxiv_latex_cleaner._remove_environment(text_in, 'comment'),
true_output)
@parameterized.named_parameters(
{
'testcase_name': 'no_iffalse',
'text_in': 'Foo\n',
'true_output': 'Foo\n'
}, {
'testcase_name': 'if_not_removed',
'text_in': '\\ifvar\n\\ifvar\nFoo\n\\fi\n\\fi\n',
'true_output': '\\ifvar\n\\ifvar\nFoo\n\\fi\n\\fi\n'
}, {
'testcase_name': 'if_removed_with_nested_ifvar',
'text_in': '\\ifvar\n\\iffalse\n\\ifvar\nFoo\n\\fi\n\\fi\n\\fi\n',
'true_output': '\\ifvar\n\\fi\n'
}, {
'testcase_name': 'if_removed_with_nested_iffalse',
'text_in': '\\ifvar\n\\iffalse\n\\iffalse\nFoo\n\\fi\n\\fi\n\\fi\n',
'true_output': '\\ifvar\n\\fi\n'
}, {
'testcase_name': 'if_removed_eof',
'text_in': '\\iffalse\nFoo\n\\fi',
'true_output': ''
}, {
'testcase_name': 'if_removed_space',
'text_in': '\\iffalse\nFoo\n\\fi ',
'true_output': ''
}, {
'testcase_name': 'if_removed_backslash',
'text_in': '\\iffalse\nFoo\n\\fi\\end{document}',
'true_output': '\\end{document}'
})
def test_remove_iffalse_block(self, text_in, true_output):
self.assertEqual(
arxiv_latex_cleaner._remove_iffalse_block(text_in), true_output)
@parameterized.named_parameters(
{
'testcase_name': 'all_pass',
'inputs': ['abc', 'bca'],
'patterns': ['a'],
'true_outputs': ['abc', 'bca'],
}, {
'testcase_name': 'not_all_pass',
'inputs': ['abc', 'bca'],
'patterns': ['a$'],
'true_outputs': ['bca'],
})
def test_keep_pattern(self, inputs, patterns, true_outputs):
self.assertEqual(
list(arxiv_latex_cleaner._keep_pattern(inputs, patterns)), true_outputs)
@parameterized.named_parameters(
{
'testcase_name': 'all_pass',
'inputs': ['abc', 'bca'],
'patterns': ['a'],
'true_outputs': [],
}, {
'testcase_name': 'not_all_pass',
'inputs': ['abc', 'bca'],
'patterns': ['a$'],
'true_outputs': ['abc'],
})
def test_remove_pattern(self, inputs, patterns, true_outputs):
self.assertEqual(
list(arxiv_latex_cleaner._remove_pattern(inputs, patterns)),
true_outputs)
@parameterized.named_parameters(
{
'testcase_name':
'replace_contents',
'content':
make_contents(),
'patterns_and_insertions':
make_patterns(),
'true_outputs': (
r'& \parbox[c]{\ww\linewidth}{\includegraphics[width=1.0\linewidth]{figures/image1.jpg}}'
'\n'
r'& \parbox[c]{\ww\linewidth}{\includegraphics[width=1.0\linewidth]{figures/image2.jpg}}'
),
},)
def test_find_and_replace_patterns(self, content, patterns_and_insertions,
true_outputs):
output = arxiv_latex_cleaner._find_and_replace_patterns(
content, patterns_and_insertions)
output = arxiv_latex_cleaner.strip_whitespace(output)
true_outputs = arxiv_latex_cleaner.strip_whitespace(true_outputs)
self.assertEqual(output, true_outputs)
@parameterized.named_parameters(
{
'testcase_name': 'no_tikz',
'text_in': 'Foo\n',
'figures_in': ['ext_tikz/test1.pdf', 'ext_tikz/test2.pdf'],
'true_output': 'Foo\n'
}, {
'testcase_name':
'tikz_no_match',
'text_in':
'Foo\\tikzsetnextfilename{test_no_match}\n\\begin{tikzpicture}\n\\node (test) at (0,0) {Test1};\n\\end{tikzpicture}\nFoo',
'figures_in': ['ext_tikz/test1.pdf', 'ext_tikz/test2.pdf'],
'true_output':
'Foo\\tikzsetnextfilename{test_no_match}\n\\begin{tikzpicture}\n\\node (test) at (0,0) {Test1};\n\\end{tikzpicture}\nFoo'
}, {
'testcase_name':
'tikz_match',
'text_in':
'Foo\\tikzsetnextfilename{test2}\n\\begin{tikzpicture}\n\\node (test) at (0,0) {Test1};\n\\end{tikzpicture}\nFoo',
'figures_in': ['ext_tikz/test1.pdf', 'ext_tikz/test2.pdf'],
'true_output':
'Foo\\includegraphics{ext_tikz/test2.pdf}\nFoo'
})
def test_replace_tikzpictures(self, text_in, figures_in, true_output):
self.assertEqual(
arxiv_latex_cleaner._replace_tikzpictures(text_in, figures_in),
true_output)
@parameterized.named_parameters(*make_search_reference_tests())
def test_search_reference_weak(self, filenames, contents, extension_optional,
true_outputs):
cleaner_outputs = []
for filename in filenames:
reference = arxiv_latex_cleaner._search_reference(filename, contents,
extension_optional)
if reference is not None:
cleaner_outputs.append(filename)
# weak check (passes as long as cleaner includes a superset of the true_output)
for true_output in true_outputs:
self.assertIn(true_output, cleaner_outputs)
@parameterized.named_parameters(*make_search_reference_tests())
def test_search_reference_strict(self, filenames, contents,
extension_optional, true_outputs):
cleaner_outputs = []
for filename in filenames:
reference = arxiv_latex_cleaner._search_reference(filename, contents,
extension_optional)
if reference is not None:
cleaner_outputs.append(filename)
# strict check (set of files must match exactly)
self.assertEqual(cleaner_outputs, true_outputs)
@parameterized.named_parameters({
'testcase_name': 'basic',
'filename': 'path/to/img.ext',
'content_strs': [
# match
'{img.ext}',
'{to/img.ext}',
'{path/to/img.ext}',
'{%\nimg.ext }',
'{to/img.ext % \n}',
'{ \npath/to/img.ext\n}',
'{img}',
'{to/img}',
'{path/to/img}',
# dont match
'{from/img.ext}',
'{from/img}',
'{imgoext}',
'{from/imgo}',
'{path/img.ext}'
],
'true_outputs': [True] * 9 + [False] * 5
})
def test_search_reference_filewise(self, filename, content_strs,
true_outputs):
for content, true_output in zip(content_strs, true_outputs):
reference = arxiv_latex_cleaner._search_reference(filename, content,
False)
matched = reference is not None
msg_not = ' ' if true_output else ' not '
msg = '{} should have{}matched {}'.format(filename, msg_not, content)
self.assertEqual(matched, true_output, msg)
class IntegrationTests(unittest.TestCase):
def setUp(self):
super(IntegrationTests, self).setUp()
self.out_path = 'tex_arXiv'
def _compare_files(self, filename, filename_true):
if path.splitext(filename)[1].lower() in ['.jpg', '.jpeg', '.png']:
with Image.open(filename) as im, Image.open(filename_true) as im_true:
# We check only the sizes of the images, checking pixels would be too
# complicated in case the resize implementations change.
self.assertEqual(
im.size, im_true.size,
'Images {:s} was not resized properly.'.format(filename))
else:
self.assertTrue(
filecmp.cmp(filename, filename_true),
'{:s} and {:s} are not equal.'.format(filename, filename_true))
def test_complete(self):
out_path_true = 'tex_arXiv_true'
# Make sure the folder does not exist, since we erase it in the test.
if path.isdir(self.out_path):
raise RuntimeError('The folder {:s} should not exist.'.format(
self.out_path))
arxiv_latex_cleaner.run_arxiv_cleaner({
'input_folder': 'tex',
'images_whitelist': {
'images/im2_included.jpg': 200,
'images/im3_included.png': 400,
},
'resize_images': True,
'im_size': 100,
'compress_pdf': False,
'pdf_im_resolution': 500,
'commands_to_delete': ['mytodo'],
'use_external_tikz': 'ext_tikz',
'keep_bib': False
})
# Checks the set of files is the same as in the true folder.
out_files = set(arxiv_latex_cleaner._list_all_files(self.out_path))
out_files_true = set(arxiv_latex_cleaner._list_all_files(out_path_true))
self.assertEqual(out_files, out_files_true)
# Compares the contents of each file against the true value.
for f1 in out_files:
self._compare_files(
path.join(self.out_path, f1), path.join(out_path_true, f1))
def tearDown(self):
shutil.rmtree(self.out_path)
super(IntegrationTests, self).tearDown()
if __name__ == '__main__':
unittest.main()
| 34.717213 | 136 | 0.584347 |
05de24cb02e30a490964a400c195e67101eac565 | 3,288 | py | Python | ProgettoLube/WebInspector/venv/Lib/site-packages/tensorflow/python/keras/api/_v1/keras/applications/mobilenet/__init__.py | Lube-Project/ProgettoLube | cbf33971e2c2e865783ec1a2302625539186a338 | [
"MIT"
] | null | null | null | ProgettoLube/WebInspector/venv/Lib/site-packages/tensorflow/python/keras/api/_v1/keras/applications/mobilenet/__init__.py | Lube-Project/ProgettoLube | cbf33971e2c2e865783ec1a2302625539186a338 | [
"MIT"
] | null | null | null | ProgettoLube/WebInspector/venv/Lib/site-packages/tensorflow/python/keras/api/_v1/keras/applications/mobilenet/__init__.py | Lube-Project/ProgettoLube | cbf33971e2c2e865783ec1a2302625539186a338 | [
"MIT"
] | 1 | 2021-01-28T01:57:41.000Z | 2021-01-28T01:57:41.000Z | # This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""MobileNet v1 models for Keras.
MobileNet is a general architecture and can be used for multiple use cases.
Depending on the use case, it can use different input layer size and
different width factors. This allows different width models to reduce
the number of multiply-adds and thereby
reduce inference cost on mobile devices.
MobileNets support any input size greater than 32 x 32, with larger image sizes
offering better performance.
The number of parameters and number of multiply-adds
can be modified by using the `alpha` parameter,
which increases/decreases the number of filters in each layer.
By altering the image size and `alpha` parameter,
all 16 models from the paper can be built, with ImageNet weights provided.
The paper demonstrates the performance of MobileNets using `alpha` values of
1.0 (also called 100 % MobileNet), 0.75, 0.5 and 0.25.
For each of these `alpha` values, weights for 4 different input image sizes
are provided (224, 192, 160, 128).
The following table describes the size and accuracy of the 100% MobileNet
on size 224 x 224:
----------------------------------------------------------------------------
Width Multiplier (alpha) | ImageNet Acc | Multiply-Adds (M) | Params (M)
----------------------------------------------------------------------------
| 1.0 MobileNet-224 | 70.6 % | 529 | 4.2 |
| 0.75 MobileNet-224 | 68.4 % | 325 | 2.6 |
| 0.50 MobileNet-224 | 63.7 % | 149 | 1.3 |
| 0.25 MobileNet-224 | 50.6 % | 41 | 0.5 |
----------------------------------------------------------------------------
The following table describes the performance of
the 100 % MobileNet on various input sizes:
------------------------------------------------------------------------
Resolution | ImageNet Acc | Multiply-Adds (M) | Params (M)
------------------------------------------------------------------------
| 1.0 MobileNet-224 | 70.6 % | 529 | 4.2 |
| 1.0 MobileNet-192 | 69.1 % | 529 | 4.2 |
| 1.0 MobileNet-160 | 67.2 % | 529 | 4.2 |
| 1.0 MobileNet-128 | 64.4 % | 529 | 4.2 |
------------------------------------------------------------------------
Reference paper:
- [MobileNets: Efficient Convolutional Neural Networks for
Mobile Vision Applications](https://arxiv.org/abs/1704.04861)
"""
from __future__ import print_function as _print_function
import sys as _sys
from tensorflow.python.keras.applications.mobilenet import MobileNet
from tensorflow.python.keras.applications.mobilenet import decode_predictions
from tensorflow.python.keras.applications.mobilenet import preprocess_input
del _print_function
from tensorflow.python.util import module_wrapper as _module_wrapper
if not isinstance(_sys.modules[__name__], _module_wrapper.TFModuleWrapper):
_sys.modules[__name__] = _module_wrapper.TFModuleWrapper(
_sys.modules[__name__], "keras.applications.mobilenet", public_apis=None, deprecation=True,
has_lite=False)
| 48.352941 | 97 | 0.593978 |
aa6876250b6fbbd5b3b30fb6dc4d705bbb7791a0 | 97,217 | py | Python | rapid7vmconsole/api/tag_api.py | kiblik/vm-console-client-python | 038f6d33e8b2654a558326c6eb87f09ee23e0e22 | [
"MIT"
] | 61 | 2018-05-17T05:57:09.000Z | 2022-03-08T13:59:21.000Z | rapid7vmconsole/api/tag_api.py | kiblik/vm-console-client-python | 038f6d33e8b2654a558326c6eb87f09ee23e0e22 | [
"MIT"
] | 33 | 2018-06-26T16:21:14.000Z | 2022-03-03T20:55:47.000Z | rapid7vmconsole/api/tag_api.py | kiblik/vm-console-client-python | 038f6d33e8b2654a558326c6eb87f09ee23e0e22 | [
"MIT"
] | 43 | 2018-02-24T05:45:53.000Z | 2022-03-31T22:15:16.000Z | # coding: utf-8
"""
Python InsightVM API Client
OpenAPI spec version: 3
Contact: support@rapid7.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from rapid7vmconsole.api_client import ApiClient
class TagApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_tag(self, **kwargs): # noqa: E501
"""Tags # noqa: E501
Creates a new tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_tag(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Tag tag: The details of the tag.
:return: ReferenceWithTagIDLink
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_tag_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.create_tag_with_http_info(**kwargs) # noqa: E501
return data
def create_tag_with_http_info(self, **kwargs): # noqa: E501
"""Tags # noqa: E501
Creates a new tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_tag_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param Tag tag: The details of the tag.
:return: ReferenceWithTagIDLink
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['tag'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_tag" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'tag' in params:
body_params = params['tag']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReferenceWithTagIDLink', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_tag(self, id, **kwargs): # noqa: E501
"""Tag # noqa: E501
Deletes the tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_tag(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_tag_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_tag_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_tag_with_http_info(self, id, **kwargs): # noqa: E501
"""Tag # noqa: E501
Deletes the tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_tag_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_tag" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `delete_tag`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tag(self, id, **kwargs): # noqa: E501
"""Tag # noqa: E501
Returns a tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tag(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:return: Tag
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tag_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_tag_with_http_info(id, **kwargs) # noqa: E501
return data
def get_tag_with_http_info(self, id, **kwargs): # noqa: E501
"""Tag # noqa: E501
Returns a tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tag_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:return: Tag
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tag" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_tag`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Tag', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tag_asset_groups(self, id, **kwargs): # noqa: E501
"""Tag Asset Groups # noqa: E501
Returns the asset groups associated with the tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tag_asset_groups(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:return: ReferencesWithAssetGroupIDLink
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tag_asset_groups_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_tag_asset_groups_with_http_info(id, **kwargs) # noqa: E501
return data
def get_tag_asset_groups_with_http_info(self, id, **kwargs): # noqa: E501
"""Tag Asset Groups # noqa: E501
Returns the asset groups associated with the tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tag_asset_groups_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:return: ReferencesWithAssetGroupIDLink
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tag_asset_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_tag_asset_groups`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags/{id}/asset_groups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReferencesWithAssetGroupIDLink', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tag_search_criteria(self, id, **kwargs): # noqa: E501
"""Tag Search Criteria # noqa: E501
Returns the search criteria associated with the tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tag_search_criteria(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:return: SearchCriteria
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tag_search_criteria_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_tag_search_criteria_with_http_info(id, **kwargs) # noqa: E501
return data
def get_tag_search_criteria_with_http_info(self, id, **kwargs): # noqa: E501
"""Tag Search Criteria # noqa: E501
Returns the search criteria associated with the tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tag_search_criteria_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:return: SearchCriteria
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tag_search_criteria" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_tag_search_criteria`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags/{id}/search_criteria', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SearchCriteria', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tagged_assets(self, id, **kwargs): # noqa: E501
"""Tag Assets # noqa: E501
Returns the assets tagged with a tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tagged_assets(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:return: TaggedAssetReferences
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tagged_assets_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_tagged_assets_with_http_info(id, **kwargs) # noqa: E501
return data
def get_tagged_assets_with_http_info(self, id, **kwargs): # noqa: E501
"""Tag Assets # noqa: E501
Returns the assets tagged with a tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tagged_assets_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:return: TaggedAssetReferences
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tagged_assets" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_tagged_assets`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags/{id}/assets', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TaggedAssetReferences', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tagged_sites(self, id, **kwargs): # noqa: E501
"""Tag Sites # noqa: E501
Returns the sites associated with the tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tagged_sites(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:return: ReferencesWithSiteIDLink
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tagged_sites_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_tagged_sites_with_http_info(id, **kwargs) # noqa: E501
return data
def get_tagged_sites_with_http_info(self, id, **kwargs): # noqa: E501
"""Tag Sites # noqa: E501
Returns the sites associated with the tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tagged_sites_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:return: ReferencesWithSiteIDLink
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tagged_sites" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_tagged_sites`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags/{id}/sites', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ReferencesWithSiteIDLink', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tags(self, **kwargs): # noqa: E501
"""Tags # noqa: E501
Returns all tags. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tags(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name
:param str type: type
:param int page: The index of the page (zero-based) to retrieve.
:param int size: The number of records per page to retrieve.
:param list[str] sort: The criteria to sort the records by, in the format: `property[,ASC|DESC]`. The default sort order is ascending. Multiple sort criteria can be specified using multiple sort query parameters.
:return: PageOfTag
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tags_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_tags_with_http_info(**kwargs) # noqa: E501
return data
def get_tags_with_http_info(self, **kwargs): # noqa: E501
"""Tags # noqa: E501
Returns all tags. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tags_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name
:param str type: type
:param int page: The index of the page (zero-based) to retrieve.
:param int size: The number of records per page to retrieve.
:param list[str] sort: The criteria to sort the records by, in the format: `property[,ASC|DESC]`. The default sort order is ascending. Multiple sort criteria can be specified using multiple sort query parameters.
:return: PageOfTag
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'type', 'page', 'size', 'sort'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tags" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
if 'type' in params:
query_params.append(('type', params['type'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'size' in params:
query_params.append(('size', params['size'])) # noqa: E501
if 'sort' in params:
query_params.append(('sort', params['sort'])) # noqa: E501
collection_formats['sort'] = 'multi' # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageOfTag', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_tag_search_criteria(self, id, **kwargs): # noqa: E501
"""Tag Search Criteria # noqa: E501
Removes the search criteria associated with the tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_tag_search_criteria(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_tag_search_criteria_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.remove_tag_search_criteria_with_http_info(id, **kwargs) # noqa: E501
return data
def remove_tag_search_criteria_with_http_info(self, id, **kwargs): # noqa: E501
"""Tag Search Criteria # noqa: E501
Removes the search criteria associated with the tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_tag_search_criteria_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_tag_search_criteria" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `remove_tag_search_criteria`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags/{id}/search_criteria', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_tagged_sites(self, id, **kwargs): # noqa: E501
"""Tag Sites # noqa: E501
Removes the associations between the tag and the sites. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_tagged_sites(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_tagged_sites_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.remove_tagged_sites_with_http_info(id, **kwargs) # noqa: E501
return data
def remove_tagged_sites_with_http_info(self, id, **kwargs): # noqa: E501
"""Tag Sites # noqa: E501
Removes the associations between the tag and the sites. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_tagged_sites_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_tagged_sites" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `remove_tagged_sites`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags/{id}/sites', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_tagged_asset_groups(self, id, **kwargs): # noqa: E501
"""Tag Asset Groups # noqa: E501
Sets the asset groups associated with the tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_tagged_asset_groups(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:param list[int] asset_group_ids: The asset groups to add to the tag.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_tagged_asset_groups_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.set_tagged_asset_groups_with_http_info(id, **kwargs) # noqa: E501
return data
def set_tagged_asset_groups_with_http_info(self, id, **kwargs): # noqa: E501
"""Tag Asset Groups # noqa: E501
Sets the asset groups associated with the tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_tagged_asset_groups_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:param list[int] asset_group_ids: The asset groups to add to the tag.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'asset_group_ids'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_tagged_asset_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `set_tagged_asset_groups`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'asset_group_ids' in params:
body_params = params['asset_group_ids']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags/{id}/asset_groups', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def set_tagged_sites(self, id, **kwargs): # noqa: E501
"""Tag Sites # noqa: E501
Sets the sites associated with the tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_tagged_sites(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:param list[int] sites: The sites to add to the tag.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.set_tagged_sites_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.set_tagged_sites_with_http_info(id, **kwargs) # noqa: E501
return data
def set_tagged_sites_with_http_info(self, id, **kwargs): # noqa: E501
"""Tag Sites # noqa: E501
Sets the sites associated with the tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_tagged_sites_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:param list[int] sites: The sites to add to the tag.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'sites'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method set_tagged_sites" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `set_tagged_sites`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'sites' in params:
body_params = params['sites']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags/{id}/sites', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def tag_asset(self, id, asset_id, **kwargs): # noqa: E501
"""Tag Asset # noqa: E501
Adds an asset to the tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.tag_asset(id, asset_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:param int asset_id: The identifier of the asset. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.tag_asset_with_http_info(id, asset_id, **kwargs) # noqa: E501
else:
(data) = self.tag_asset_with_http_info(id, asset_id, **kwargs) # noqa: E501
return data
def tag_asset_with_http_info(self, id, asset_id, **kwargs): # noqa: E501
"""Tag Asset # noqa: E501
Adds an asset to the tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.tag_asset_with_http_info(id, asset_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:param int asset_id: The identifier of the asset. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'asset_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method tag_asset" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `tag_asset`") # noqa: E501
# verify the required parameter 'asset_id' is set
if ('asset_id' not in params or
params['asset_id'] is None):
raise ValueError("Missing the required parameter `asset_id` when calling `tag_asset`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'asset_id' in params:
path_params['assetId'] = params['asset_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags/{id}/assets/{assetId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def tag_asset_group(self, id, asset_group_id, **kwargs): # noqa: E501
"""Tag Asset Group # noqa: E501
Adds an asset group to this tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.tag_asset_group(id, asset_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:param int asset_group_id: The asset group identifier. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.tag_asset_group_with_http_info(id, asset_group_id, **kwargs) # noqa: E501
else:
(data) = self.tag_asset_group_with_http_info(id, asset_group_id, **kwargs) # noqa: E501
return data
def tag_asset_group_with_http_info(self, id, asset_group_id, **kwargs): # noqa: E501
"""Tag Asset Group # noqa: E501
Adds an asset group to this tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.tag_asset_group_with_http_info(id, asset_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:param int asset_group_id: The asset group identifier. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'asset_group_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method tag_asset_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `tag_asset_group`") # noqa: E501
# verify the required parameter 'asset_group_id' is set
if ('asset_group_id' not in params or
params['asset_group_id'] is None):
raise ValueError("Missing the required parameter `asset_group_id` when calling `tag_asset_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'asset_group_id' in params:
path_params['assetGroupId'] = params['asset_group_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags/{id}/asset_groups/{assetGroupId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def tag_site(self, id, site_id, **kwargs): # noqa: E501
"""Tag Site # noqa: E501
Adds a site to this tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.tag_site(id, site_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:param int site_id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.tag_site_with_http_info(id, site_id, **kwargs) # noqa: E501
else:
(data) = self.tag_site_with_http_info(id, site_id, **kwargs) # noqa: E501
return data
def tag_site_with_http_info(self, id, site_id, **kwargs): # noqa: E501
"""Tag Site # noqa: E501
Adds a site to this tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.tag_site_with_http_info(id, site_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:param int site_id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'site_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method tag_site" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `tag_site`") # noqa: E501
# verify the required parameter 'site_id' is set
if ('site_id' not in params or
params['site_id'] is None):
raise ValueError("Missing the required parameter `site_id` when calling `tag_site`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'site_id' in params:
path_params['siteId'] = params['site_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags/{id}/sites/{siteId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def untag_all_asset_groups(self, id, **kwargs): # noqa: E501
"""Tag Asset Groups # noqa: E501
Removes the associations between the tag and all asset groups. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.untag_all_asset_groups(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.untag_all_asset_groups_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.untag_all_asset_groups_with_http_info(id, **kwargs) # noqa: E501
return data
def untag_all_asset_groups_with_http_info(self, id, **kwargs): # noqa: E501
"""Tag Asset Groups # noqa: E501
Removes the associations between the tag and all asset groups. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.untag_all_asset_groups_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method untag_all_asset_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `untag_all_asset_groups`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags/{id}/asset_groups', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def untag_asset(self, id, asset_id, **kwargs): # noqa: E501
"""Tag Asset # noqa: E501
Removes an asset from the tag. Note: The asset must be added through the asset or tag, if the asset is added using a site, asset group, or search criteria this will not remove the asset. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.untag_asset(id, asset_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:param int asset_id: The identifier of the asset. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.untag_asset_with_http_info(id, asset_id, **kwargs) # noqa: E501
else:
(data) = self.untag_asset_with_http_info(id, asset_id, **kwargs) # noqa: E501
return data
def untag_asset_with_http_info(self, id, asset_id, **kwargs): # noqa: E501
"""Tag Asset # noqa: E501
Removes an asset from the tag. Note: The asset must be added through the asset or tag, if the asset is added using a site, asset group, or search criteria this will not remove the asset. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.untag_asset_with_http_info(id, asset_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:param int asset_id: The identifier of the asset. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'asset_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method untag_asset" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `untag_asset`") # noqa: E501
# verify the required parameter 'asset_id' is set
if ('asset_id' not in params or
params['asset_id'] is None):
raise ValueError("Missing the required parameter `asset_id` when calling `untag_asset`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'asset_id' in params:
path_params['assetId'] = params['asset_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags/{id}/assets/{assetId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def untag_asset_group(self, id, asset_group_id, **kwargs): # noqa: E501
"""Tag Asset Group # noqa: E501
Removes an asset group from this tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.untag_asset_group(id, asset_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:param int asset_group_id: The asset group identifier. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.untag_asset_group_with_http_info(id, asset_group_id, **kwargs) # noqa: E501
else:
(data) = self.untag_asset_group_with_http_info(id, asset_group_id, **kwargs) # noqa: E501
return data
def untag_asset_group_with_http_info(self, id, asset_group_id, **kwargs): # noqa: E501
"""Tag Asset Group # noqa: E501
Removes an asset group from this tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.untag_asset_group_with_http_info(id, asset_group_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:param int asset_group_id: The asset group identifier. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'asset_group_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method untag_asset_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `untag_asset_group`") # noqa: E501
# verify the required parameter 'asset_group_id' is set
if ('asset_group_id' not in params or
params['asset_group_id'] is None):
raise ValueError("Missing the required parameter `asset_group_id` when calling `untag_asset_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'asset_group_id' in params:
path_params['assetGroupId'] = params['asset_group_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags/{id}/asset_groups/{assetGroupId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def untag_site(self, id, site_id, **kwargs): # noqa: E501
"""Tag Site # noqa: E501
Removes a site from this tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.untag_site(id, site_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:param int site_id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.untag_site_with_http_info(id, site_id, **kwargs) # noqa: E501
else:
(data) = self.untag_site_with_http_info(id, site_id, **kwargs) # noqa: E501
return data
def untag_site_with_http_info(self, id, site_id, **kwargs): # noqa: E501
"""Tag Site # noqa: E501
Removes a site from this tag. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.untag_site_with_http_info(id, site_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:param int site_id: The identifier of the site. (required)
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'site_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method untag_site" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `untag_site`") # noqa: E501
# verify the required parameter 'site_id' is set
if ('site_id' not in params or
params['site_id'] is None):
raise ValueError("Missing the required parameter `site_id` when calling `untag_site`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'site_id' in params:
path_params['siteId'] = params['site_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags/{id}/sites/{siteId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_tag(self, id, **kwargs): # noqa: E501
"""Tag # noqa: E501
Updates the details of a tag. For more information about accepted fields for the tag search criteria see the PUT /search_criteria documentation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_tag(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:param Tag tag: The details of the tag.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_tag_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.update_tag_with_http_info(id, **kwargs) # noqa: E501
return data
def update_tag_with_http_info(self, id, **kwargs): # noqa: E501
"""Tag # noqa: E501
Updates the details of a tag. For more information about accepted fields for the tag search criteria see the PUT /search_criteria documentation. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_tag_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:param Tag tag: The details of the tag.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'tag'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_tag" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_tag`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'tag' in params:
body_params = params['tag']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_tag_search_criteria(self, id, **kwargs): # noqa: E501
"""Tag Search Criteria # noqa: E501
Updates the search criteria associated with the tag. The following table outlines the search criteria fields and the available operators: | Field | Operators | | ---------- | ---------------- | | ip-address | is, is-not, in-range, not-in-range, is-like, not-like | | ip-address-type | in, not-in | | alternate-address-type | in | | host-name | is, is-not, starts-with, ends-with, contains, does-not-contain, is-empty, is-not-empty, is-like, not-like | | host-type | in, not-in | | operating-system | contains, does-not-contain, is-empty, is-not-empty | | software | contains, does-not-contain| | open-ports | is, is-not, in-range | | service-name | contains, does-not-contain | | risk-score | is, is-not, in-range, is-greater-than,is-less-than | | last-scan-date | is-on-or-before, is-on-or-after, is-between, is-earlier-than, is-within-the-last | | vulnerability-assessed | is-on-or-before, is-on-or-after, is-between, is-earlier-than, is-within-the-last | | vulnerability-category | is, is-not, starts-with, ends-with, contains, does-not-contain| | vulnerability-cvss-score | is, is-not, in-range, is-greater-than, is-less-than | | vulnerability-cvss-v3-score | is, is-not, in-range, is-greater-than, is-less-than | | vulnerability-exposures | includes, does not-include | | vulnerability-title | contains, does-not-contain, is, is-not, starts-with, ends-with | | cve | is, is-not, contains, does-not-contain | | cvss-access-complexity | is, is-not | | cvss-authentication-required | is, is-not | | cvss-access-vector | is, is-not | | cvss-availability-impact | is, is-not | | cvss-confidentiality-impact | is, is-not | | cvss-integrity-impact | is, is-not | | cvss-v3-confidentiality-impact | is, is-not | | cvss-v3-integrity-impact | is, is-not | | cvss-v3-availability-impact | is, is-not | | cvss-v3-attack-vector | is, is-not | | cvss-v3-attack-complexity | is, is-not | | cvss-v3-user-interaction | is, is-not | | cvss-v3-privileges-required | is, is-not | | mobile-device-last-sync | is-within-the-last, is-earlier-than | | pci-compliance | is | | site-id | in, not-in | | criticality-tag | is, is-not, is-greater-than, is-less-than, is-applied, is-not-applied | | custom-tag | is, is-not, starts-with, ends-with, contains, does-not-contain, is-applied, is-not-applied | | location-tag | is, is-not, starts-with, ends-with, contains, does-not-contain, is-applied, is-not-applied | | owner-tag | is, is-not, starts-with, ends-with, contains, does-not-contain, is-applied, is-not-applied | | vulnerability-validated-status | are | | vasset-cluster | is, is-not, contains, does-not-contain, starts-with | | vasset-datacenter | is, is-not | | vasset-host name | is, is-not, contains, does-not-contain, starts-with | | vasset-power state | in, not-in | | vasset-resource pool path | contains, does-not-contain | | container-image | is, is-not, starts-with, ends-with, contains, does-not-contain, is-like, not-like | | container-status | is, is-not | | containers | are | The following table outlines the operators and the values associated with them: | Operator | Values | | -------- | ------ | | are | A single string property named \"value\" | | is-between | A number property named \"lower\" and a number property named \"upper\" | | contains | A single string property named \"value\" | | does-not-contain | A single string property named \"value\" | | is-earlier-than | A single number property named \"value\" | | ends-with | A single string property named \"value\" | | is-greater-than | A single number property named \"value\" | | in | An array property named \"values\" | | not-in | An array property named \"values\" | | in-range | A number property named \"lower\" and a number property named \"upper\" | | includes | An array property named \"values\" | | is | A single string property named \"value\" | | is-not | A single string property named \"value\" | | is-applied | No value | | is-not-applied | No value | | is-empty | No value | | is-not-empty | No value | | is-less-than | A single number property named \"value\" | | is-like | A single string property named \"value\" | | does-not-contain | A single string property named \"value\" | | not-in-range | A number property named \"lower\" and a number property named \"upper\" | | not-like | A single string property named \"value\" | | is-on-or-after | A single string property named \"value\", which is the date in ISO8601 format (yyyy-MM-dd) | | is-on-or-before | A single string property named \"value\", which is the date in ISO8601 format (yyyy-MM-dd) | | starts-with | A single string property named \"value\" | | is-within-the-last | A single number property named \"value\" | The following fields have enumerated values: | Field | Acceptable Values | | ----- | ----------------- | | containers | 0=present, 1=not present | | vulnerability-validated-status | 0=present, 1=not present | | pci-compliance | 0=fail, 1=pass | | alternate-address-type | 0=IPv4, 1=IPv6 | | ip-address-type | 0=IPv4, 1=IPv6 | | host-type | 0=Unknown, 1=Guest, 2=Hypervisor, 3=Physical, 4=Mobile | | cvss-access-complexity | L=Low, M=Medium, H=High | | cvss-integrity-impact | N=None, P=Partial, C=Complete | | cvss-confidentiality-impact | N=None, P=Partial, C=Complete | | cvss-availability-impact | N=None, P=Partial, C=Complete | | cvss-access-vector | L=Local, A=Adjacent, N=Network | | cvss-authentication-required | N=None, S=Single, M=Multiple | | cvss-access-complexity | L=Low, M=Medium, H=High | | cvss-v3-confidentiality-impact | N=None, L=Low, H=High | | cvss-v3-integrity-impact | N=None, L=Low, H=High | | cvss-v3-availability-impact | N=None, L=Low, H=High | | cvss-v3-attack-vector | N=Network, A=Adjacent, L=Local, P=Physical | | cvss-v3-attack-complexity | L=Low, H=High | | cvss-v3-user-interaction | N=None, R=Required | | cvss-v3-privileges-required | N=None, L=Low, H=High | | container-status | created, running, paused, restarting, exited, dead, unknown | # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_tag_search_criteria(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:param SearchCriteria criterial: The details of the search criteria.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_tag_search_criteria_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.update_tag_search_criteria_with_http_info(id, **kwargs) # noqa: E501
return data
def update_tag_search_criteria_with_http_info(self, id, **kwargs): # noqa: E501
"""Tag Search Criteria # noqa: E501
Updates the search criteria associated with the tag. The following table outlines the search criteria fields and the available operators: | Field | Operators | | ---------- | ---------------- | | ip-address | is, is-not, in-range, not-in-range, is-like, not-like | | ip-address-type | in, not-in | | alternate-address-type | in | | host-name | is, is-not, starts-with, ends-with, contains, does-not-contain, is-empty, is-not-empty, is-like, not-like | | host-type | in, not-in | | operating-system | contains, does-not-contain, is-empty, is-not-empty | | software | contains, does-not-contain| | open-ports | is, is-not, in-range | | service-name | contains, does-not-contain | | risk-score | is, is-not, in-range, is-greater-than,is-less-than | | last-scan-date | is-on-or-before, is-on-or-after, is-between, is-earlier-than, is-within-the-last | | vulnerability-assessed | is-on-or-before, is-on-or-after, is-between, is-earlier-than, is-within-the-last | | vulnerability-category | is, is-not, starts-with, ends-with, contains, does-not-contain| | vulnerability-cvss-score | is, is-not, in-range, is-greater-than, is-less-than | | vulnerability-cvss-v3-score | is, is-not, in-range, is-greater-than, is-less-than | | vulnerability-exposures | includes, does not-include | | vulnerability-title | contains, does-not-contain, is, is-not, starts-with, ends-with | | cve | is, is-not, contains, does-not-contain | | cvss-access-complexity | is, is-not | | cvss-authentication-required | is, is-not | | cvss-access-vector | is, is-not | | cvss-availability-impact | is, is-not | | cvss-confidentiality-impact | is, is-not | | cvss-integrity-impact | is, is-not | | cvss-v3-confidentiality-impact | is, is-not | | cvss-v3-integrity-impact | is, is-not | | cvss-v3-availability-impact | is, is-not | | cvss-v3-attack-vector | is, is-not | | cvss-v3-attack-complexity | is, is-not | | cvss-v3-user-interaction | is, is-not | | cvss-v3-privileges-required | is, is-not | | mobile-device-last-sync | is-within-the-last, is-earlier-than | | pci-compliance | is | | site-id | in, not-in | | criticality-tag | is, is-not, is-greater-than, is-less-than, is-applied, is-not-applied | | custom-tag | is, is-not, starts-with, ends-with, contains, does-not-contain, is-applied, is-not-applied | | location-tag | is, is-not, starts-with, ends-with, contains, does-not-contain, is-applied, is-not-applied | | owner-tag | is, is-not, starts-with, ends-with, contains, does-not-contain, is-applied, is-not-applied | | vulnerability-validated-status | are | | vasset-cluster | is, is-not, contains, does-not-contain, starts-with | | vasset-datacenter | is, is-not | | vasset-host name | is, is-not, contains, does-not-contain, starts-with | | vasset-power state | in, not-in | | vasset-resource pool path | contains, does-not-contain | | container-image | is, is-not, starts-with, ends-with, contains, does-not-contain, is-like, not-like | | container-status | is, is-not | | containers | are | The following table outlines the operators and the values associated with them: | Operator | Values | | -------- | ------ | | are | A single string property named \"value\" | | is-between | A number property named \"lower\" and a number property named \"upper\" | | contains | A single string property named \"value\" | | does-not-contain | A single string property named \"value\" | | is-earlier-than | A single number property named \"value\" | | ends-with | A single string property named \"value\" | | is-greater-than | A single number property named \"value\" | | in | An array property named \"values\" | | not-in | An array property named \"values\" | | in-range | A number property named \"lower\" and a number property named \"upper\" | | includes | An array property named \"values\" | | is | A single string property named \"value\" | | is-not | A single string property named \"value\" | | is-applied | No value | | is-not-applied | No value | | is-empty | No value | | is-not-empty | No value | | is-less-than | A single number property named \"value\" | | is-like | A single string property named \"value\" | | does-not-contain | A single string property named \"value\" | | not-in-range | A number property named \"lower\" and a number property named \"upper\" | | not-like | A single string property named \"value\" | | is-on-or-after | A single string property named \"value\", which is the date in ISO8601 format (yyyy-MM-dd) | | is-on-or-before | A single string property named \"value\", which is the date in ISO8601 format (yyyy-MM-dd) | | starts-with | A single string property named \"value\" | | is-within-the-last | A single number property named \"value\" | The following fields have enumerated values: | Field | Acceptable Values | | ----- | ----------------- | | containers | 0=present, 1=not present | | vulnerability-validated-status | 0=present, 1=not present | | pci-compliance | 0=fail, 1=pass | | alternate-address-type | 0=IPv4, 1=IPv6 | | ip-address-type | 0=IPv4, 1=IPv6 | | host-type | 0=Unknown, 1=Guest, 2=Hypervisor, 3=Physical, 4=Mobile | | cvss-access-complexity | L=Low, M=Medium, H=High | | cvss-integrity-impact | N=None, P=Partial, C=Complete | | cvss-confidentiality-impact | N=None, P=Partial, C=Complete | | cvss-availability-impact | N=None, P=Partial, C=Complete | | cvss-access-vector | L=Local, A=Adjacent, N=Network | | cvss-authentication-required | N=None, S=Single, M=Multiple | | cvss-access-complexity | L=Low, M=Medium, H=High | | cvss-v3-confidentiality-impact | N=None, L=Low, H=High | | cvss-v3-integrity-impact | N=None, L=Low, H=High | | cvss-v3-availability-impact | N=None, L=Low, H=High | | cvss-v3-attack-vector | N=Network, A=Adjacent, L=Local, P=Physical | | cvss-v3-attack-complexity | L=Low, H=High | | cvss-v3-user-interaction | N=None, R=Required | | cvss-v3-privileges-required | N=None, L=Low, H=High | | container-status | created, running, paused, restarting, exited, dead, unknown | # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_tag_search_criteria_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: The identifier of the tag. (required)
:param SearchCriteria criterial: The details of the search criteria.
:return: Links
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'criterial'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_tag_search_criteria" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_tag_search_criteria`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'criterial' in params:
body_params = params['criterial']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json;charset=UTF-8']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/3/tags/{id}/search_criteria', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Links', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 44.492906 | 6,055 | 0.606499 |
d2f5fb62298227e3bba5b81d818b5952b0b3ce7a | 5,386 | py | Python | 2018_02_15_cryptocurrencies_trading/algorithms/shared/mr_btc-1500963590682.py | jiricejchan/AnonymniAnalytici | e4e96f943d0b2232d9099c6e7bb690a3d25ea422 | [
"MIT"
] | 10 | 2017-03-28T06:52:22.000Z | 2017-11-21T17:41:11.000Z | 2018_02_15_cryptocurrencies_trading/algorithms/shared/mr_btc-1500963590682.py | jiricejchan/AnonymniAnalytici | e4e96f943d0b2232d9099c6e7bb690a3d25ea422 | [
"MIT"
] | 1 | 2017-07-21T08:27:01.000Z | 2017-07-21T08:27:01.000Z | 2018_02_15_cryptocurrencies_trading/algorithms/shared/mr_btc-1500963590682.py | jiricejchan/AnonymniAnalytici | e4e96f943d0b2232d9099c6e7bb690a3d25ea422 | [
"MIT"
] | 8 | 2017-03-05T17:21:40.000Z | 2019-12-01T18:46:39.000Z | from catalyst.api import (
order_target_percent,
record,
symbol,
get_open_orders,
set_max_leverage,
schedule_function,
date_rules,
attach_pipeline,
pipeline_output,
)
from catalyst.pipeline import Pipeline
from catalyst.pipeline.data import CryptoPricing
from catalyst.pipeline.factors.crypto import SimpleMovingAverage
from catalyst.pipeline.factors.crypto import AnnualizedVolatility
import math
def initialize(context):
context.ASSET_NAME = 'USDT_BTC'
context.WINDOW= 30
# For all trading pairs in the poloniex bundle, the default denomination
# currently supported by Catalyst is 1/1000th of a full coin. Use this
# constant to scale the price of up to that of a full coin if desired.
context.TICK_SIZE = 1000.0
context.i = 0
context.asset = symbol(context.ASSET_NAME)
attach_pipeline(make_pipeline(context), 'mr_pipeline')
schedule_function(
rebalance,
date_rules.every_day(),
)
def before_trading_start(context, data):
context.pipeline_data = pipeline_output('mr_pipeline')
def make_pipeline(context):
return Pipeline(
columns={
'price': CryptoPricing.open.latest,
'sma': SimpleMovingAverage(
inputs=[CryptoPricing.close],
window_length=context.WINDOW,
),
'std': AnnualizedVolatility(
inputs=[CryptoPricing.close],
window_length=context.WINDOW,
annualization_factor=1,
),
}
)
def rebalance(context, data):
context.i += 1
# Skip first LONG_WINDOW bars to fill windows
if context.i < context.WINDOW:
return
# Get pipeline data for asset of interest
pipeline_data = context.pipeline_data
pipeline_data = pipeline_data[pipeline_data.index == context.asset].iloc[0]
# Compute the necessary statistics
sma = pipeline_data.sma
std = pipeline_data.std()
price = pipeline_data.price
# Compute buy and sell thresholds
# Buy threshold is the simple moving average value plus one standard dev.
# Sell threshold is the simple moving average value minus one standard dev.
buy_threshold = sma-std/math.sqrt(context.WINDOW)
sell_threshold = sma+std/math.sqrt(context.WINDOW)
# Check that the order has not already been placed
open_orders = get_open_orders()
if context.asset not in open_orders:
# check that the asset of interest can currently be traded
if data.can_trade(context.asset):
# Trading logic: if price is less than the buy threshold, mean
# reversion should drive price up. Algorithm invests 100% in the
# asset. In the opposite case, mean reversion should drive price
# down. Algorithm invests 50% in cash and 50% in the asset. If
# price is between buy and sell thresholds, algorithm invests 25%
# in cash and 75% in the asset.
if price < buy_threshold:
order_target_percent(
context.asset,
1.0,
)
elif price > sell_threshold:
order_target_percent(
context.asset,
0.5,
)
else:
order_target_percent(
context.asset,
0.75,
)
record(
price=price,
leverage=context.account.leverage,
sma=sma,
std=std,
buy_threshold=buy_threshold,
sell_threshold=sell_threshold,
)
def analyze(context=None, results=None):
import matplotlib.pyplot as plt
# Plot the portfolio and asset data.
ax1 = plt.subplot(411)
results[['portfolio_value']].plot(ax=ax1)
ax1.set_ylabel('Portfolio value (USD)')
ax2 = plt.subplot(412, sharex=ax1)
ax2.set_ylabel('{asset} (USD)'.format(asset=context.ASSET_NAME))
(context.TICK_SIZE*results[['price', 'sma', 'buy_threshold','sell_threshold']]).plot(ax=ax2)
trans = results.ix[[t != [] for t in results.transactions]]
amounts = [t[0]['amount'] for t in trans.transactions]
buys = trans.ix[
[t[0]['amount'] > 0 for t in trans.transactions]
]
sells = trans.ix[
[t[0]['amount'] < 0 for t in trans.transactions]
]
ax2.plot(
buys.index,
context.TICK_SIZE * results.price[buys.index],
'^',
markersize=10,
color='g',
)
ax2.plot(
sells.index,
context.TICK_SIZE * results.price[sells.index],
'v',
markersize=10,
color='r',
)
ax3 = plt.subplot(413, sharex=ax1)
results[['leverage']].plot(ax=ax3)
ax3.set_ylabel('Leverage (USD)')
results[[
'algorithm',
'benchmark',
]] = results[[
'algorithm_period_return',
'benchmark_period_return',
]]
ax4 = plt.subplot(414, sharex=ax1)
results[[
'algorithm',
'benchmark',
]].plot(ax=ax4)
ax4.set_ylabel('Percent Change')
plt.legend(loc=3)
# Show the plot.
plt.gcf().set_size_inches(18, 8)
plt.show() | 30.602273 | 97 | 0.593205 |
7a670ec0e5e713180e6f0399a08e497a847f932a | 2,660 | py | Python | mldiag/descriptors.py | S-AI-F/MLDiag-1 | 67d79f7b16b112ad9dadefa63818db21ef3e259c | [
"MIT"
] | null | null | null | mldiag/descriptors.py | S-AI-F/MLDiag-1 | 67d79f7b16b112ad9dadefa63818db21ef3e259c | [
"MIT"
] | null | null | null | mldiag/descriptors.py | S-AI-F/MLDiag-1 | 67d79f7b16b112ad9dadefa63818db21ef3e259c | [
"MIT"
] | null | null | null | from abc import ABC, abstractmethod
from functools import partial
from typing import List
import numpy as np
# TODO: reproduce the Augmenter base class of nlpaug
class Description(ABC):
def __init__(self,
name,
verbose):
self.name = name
self.verbose = verbose
@abstractmethod
def describe(self,
data):
raise NotImplementedError
def describes(self,
data: List):
out = []
for d in data:
out.append(self.describe(d))
return out
def gen_describe(
dataset: list,
description: Description,
):
"""
a generic augment process on generator
:param dataset: dataset generator [batch]
:param augmenter:
:return:
"""
for data_point in dataset:
data, label = data_point
descriptors = np.empty(len(list(data)))
if description is not None:
descriptors = description.describes(list(data))
else:
descriptors = np.nan
yield np.asarray(descriptors)
class TextCharCountDescription(Description):
def __init__(self, name="text_char_count", verbose=0):
super().__init__(
name=name,
verbose=verbose)
def describe(self,
data):
if not isinstance(data, str):
raise TypeError("data should be string and not {}".format(type(data)))
return len(data)
class TextSentenceCountDescription(Description):
def __init__(self, name="text_sentence_count", verbose=0):
super().__init__(
name=name,
verbose=verbose)
def describe(self,
data):
if not isinstance(data, str):
raise TypeError("data should be string and not {}".format(type(data)))
return len(data.split("."))
class TextWordCountDescription(Description):
def __init__(self, name="text_word_count", verbose=0):
super().__init__(
name=name,
verbose=verbose)
def describe(self,
data):
if not isinstance(data, str):
raise TypeError("data should be string and not {}".format(type(data)))
return sum([len(x.split(" ")) for x in data.split(".")])
# text char count description
text_char_count_descriptor = partial(gen_describe, description=TextCharCountDescription())
# text sentence count description
text_sentence_count_descriptor = partial(gen_describe, description=TextSentenceCountDescription())
# text word count description
text_word_count_descriptor = partial(gen_describe, description=TextWordCountDescription())
| 28 | 98 | 0.627444 |
ed9b7f1b949b47ed7db483aff0b8cb8b0b385325 | 3,142 | py | Python | nicodl/core.py | yusei-wy/nicodl | 91311610c6590519b31a8a307c7ee9bf698d0e7a | [
"MIT"
] | null | null | null | nicodl/core.py | yusei-wy/nicodl | 91311610c6590519b31a8a307c7ee9bf698d0e7a | [
"MIT"
] | null | null | null | nicodl/core.py | yusei-wy/nicodl | 91311610c6590519b31a8a307c7ee9bf698d0e7a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import argparse
import configparser
import subprocess
import sys
import requests
from bs4 import BeautifulSoup
from selenium import webdriver
class Crawler(object):
_driver = None
def run(self, user, password, url):
if url is None:
self.error("Error: url is not specified")
print(f"run target: {url}")
options = webdriver.ChromeOptions()
options.add_argument("--headless")
self._driver = webdriver.Chrome(chrome_options=options)
# empty check
if not user or not password:
config = configparser.ConfigParser()
config.read("./config.ini")
password = config["account"]["password"]
user = config["account"]["user"]
# login
if not self.login(user, password):
self.error("Error: login failure")
print("logined")
# m3u8
video_data = self.get_video_data(url)
print(f"get video data: {video_data.get('title')}")
# download
ret = self.download(video_data.get("url"), video_data.get("title"))
print("ret", ret)
self.quit()
def login(self, user, password):
url = "https://account.nicovideo.jp/login"
self._driver.get(url)
self._driver.find_element_by_id("input__mailtel").send_keys(user)
self._driver.find_element_by_id("input__password").send_keys(password)
self._driver.find_element_by_id("login__submit").click()
return self._driver.title == "niconico(ニコニコ)"
def get_video_data(self, url):
self._driver.get(url)
title = self._driver.find_element_by_css_selector(".VideoTitle").text
networks = self._driver.execute_script(
"return window.performance.getEntries();"
)
# get m3u8 url
m3u8_url = ""
for n in reversed(networks):
url = n.get("name")
if url.find("master.m3u8?") > 0:
m3u8_url = url
break
return dict(title=title, url=m3u8_url)
def download(self, url, title):
cmd = f"""ffmpeg \
-protocol_whitelist file,http,https,tcp,tls,crypto \
-i {url} \
-movflags faststart \
-c copy {title}.mp4 \
"""
return subprocess.call(cmd.split())
def error(self, msg):
print(f"{msg}", file=sys.stderr)
self.quit()
exit(1)
def quit(self):
if self._driver is not None:
self._driver.quit()
if __name__ == "__main__":
parser = argparse.ArgumentParser(
prog="nicodl",
usage="nicodl \
-u [email or tel] \
-p [password] \
--target [target video page url]",
description="Nico Nico video download software",
add_help=True,
)
parser.add_argument("-u", "--user", help="email or tel number")
parser.add_argument("-p", "--password", help="password")
parser.add_argument("target", help="target video page url")
args = parser.parse_args()
cw = Crawler()
cw.run(args.user, args.password, args.target)
| 29.092593 | 78 | 0.586251 |
0a3fff22b5dfb008da0ef487f5b1992412c3e257 | 1,328 | py | Python | desafio103.py | marcelocmedeiros/RevisaoPython | 04c602bf17e8ab37c9660337a8f8497eb498e10d | [
"MIT"
] | null | null | null | desafio103.py | marcelocmedeiros/RevisaoPython | 04c602bf17e8ab37c9660337a8f8497eb498e10d | [
"MIT"
] | null | null | null | desafio103.py | marcelocmedeiros/RevisaoPython | 04c602bf17e8ab37c9660337a8f8497eb498e10d | [
"MIT"
] | null | null | null | # Marcelo Campos de Medeiros
# ADS UNIFIP
# REVISÃO DE PYTHON
# AULA 21 Funções (Def) 2° parte---> GUSTAVO GUANABARA
'''
Faça um Programa que tenha uma função chamada ficha() que receba dois parâmetros
opcionais: o primeiro que indique o nome do jogador e o outro chamado gols que
informe quantos gols ele fez. O programa deverá ser capaz de mostrar a ficha do
jogador mesmo que algum dado não tenha fido informado corretemente.
'''
print('='*30)
print('{:*^30}'.format(' Def ficha() '))
print('='*30)
print()
# função fatorial, show
def ficha(jog = '<desconhecido>', gol = 0):
'''
-> ficha do jogador
:param nome: nome do atleta
:param gols: números de gols
:return: não tem
'''
print(f'O jogador {jog} fez {gol} gol(s) no campeonato')
#calculo dao fatorial
# Programa principal
# parâmetro/ argumentos nome e gols
nome = str(input('Digite o nome do jogador: '))
# no lugar de int coloco str para ler números como strings
gols = str(input('Quantos gols o jogador fez: '))
# criando uma condição para converter str em numerico
# se gols for número converte str em int
if gols.isnumeric():
gols = int(gols)
# se não gols = 0
else:
gols = 0
# se nome estiver vazio ficha(gol = gols)
if nome.strip() == '':
ficha(gol = gols)
# se não ficha(nome, gols)
else:
ficha(nome, gols)
| 27.102041 | 80 | 0.685994 |
8a4f59b82ca9a6904490638924a6b16be400d1e2 | 98 | py | Python | apps/sage_intacct/apps.py | fylein/fyle-intacct-api | 16e45538ec3a2b7af396742a42302704c33a7bd7 | [
"MIT"
] | null | null | null | apps/sage_intacct/apps.py | fylein/fyle-intacct-api | 16e45538ec3a2b7af396742a42302704c33a7bd7 | [
"MIT"
] | 3 | 2020-07-20T10:54:15.000Z | 2022-02-23T17:13:49.000Z | apps/sage_intacct/apps.py | fylein/fyle-intacct-api | 16e45538ec3a2b7af396742a42302704c33a7bd7 | [
"MIT"
] | 2 | 2020-07-25T14:50:56.000Z | 2020-08-02T13:48:37.000Z | from django.apps import AppConfig
class SageIntacctConfig(AppConfig):
name = 'sage_intacct'
| 16.333333 | 35 | 0.77551 |
4c194f1c2e6799f196aefe434556298263b3cfef | 25,673 | py | Python | colour/utilities/tests/test_array.py | Saransh-cpp/colour | 02612fef79cddbada324def766b8a93283430908 | [
"BSD-3-Clause"
] | 6 | 2019-06-18T18:53:29.000Z | 2021-09-10T21:02:45.000Z | colour/utilities/tests/test_array.py | Saransh-cpp/colour | 02612fef79cddbada324def766b8a93283430908 | [
"BSD-3-Clause"
] | null | null | null | colour/utilities/tests/test_array.py | Saransh-cpp/colour | 02612fef79cddbada324def766b8a93283430908 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Defines unit tests for :mod:`colour.utilities.array` module.
"""
import numpy as np
import unittest
from collections import namedtuple
from colour.constants import DEFAULT_FLOAT_DTYPE, DEFAULT_INT_DTYPE
from colour.utilities import (
as_array, as_int_array, as_float_array, as_numeric, as_int, as_float,
set_float_precision, set_int_precision, as_namedtuple, closest_indexes,
closest, interval, is_uniform, in_array, tstack, tsplit, row_as_diagonal,
orient, centroid, fill_nan, ndarray_write, zeros, ones, full,
index_along_last_axis)
from colour.utilities import is_networkx_installed
__author__ = 'Colour Developers'
__copyright__ = 'Copyright (C) 2013-2021 - Colour Developers'
__license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause'
__maintainer__ = 'Colour Developers'
__email__ = 'colour-developers@colour-science.org'
__status__ = 'Production'
__all__ = [
'TestAsArray', 'TestAsIntArray', 'TestAsFloatArray', 'TestAsNumeric',
'TestAsInt', 'TestAsFloat', 'TestSetFloatPrecision', 'TestSetIntPrecision',
'TestAsNametuple', 'TestClosestIndexes', 'TestClosest', 'TestInterval',
'TestIsUniform', 'TestInArray', 'TestTstack', 'TestTsplit',
'TestRowAsDiagonal', 'TestOrient', 'TestCentroid', 'TestFillNan',
'TestNdarrayWrite', 'TestZeros', 'TestOnes', 'TestFull',
'TestIndexAlongLastAxis'
]
class TestAsArray(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.as_array` definition unit tests
methods.
"""
def test_as_array(self):
"""
Tests :func:`colour.utilities.array.as_array` definition.
"""
np.testing.assert_equal(as_array([1, 2, 3]), np.array([1, 2, 3]))
self.assertEqual(
as_array([1, 2, 3], DEFAULT_FLOAT_DTYPE).dtype,
DEFAULT_FLOAT_DTYPE)
self.assertEqual(
as_array([1, 2, 3], DEFAULT_INT_DTYPE).dtype, DEFAULT_INT_DTYPE)
np.testing.assert_equal(
as_array(dict(zip('abc', [1, 2, 3])).values()), np.array([1, 2,
3]))
class TestAsIntArray(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.as_int_array` definition unit tests
methods.
"""
def test_as_int_array(self):
"""
Tests :func:`colour.utilities.array.as_int_array` definition.
"""
np.testing.assert_equal(
as_int_array([1.0, 2.0, 3.0]), np.array([1, 2, 3]))
self.assertEqual(as_int_array([1, 2, 3]).dtype, DEFAULT_INT_DTYPE)
class TestAsFloatArray(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.as_float_array` definition unit tests
methods.
"""
def test_as_float_array(self):
"""
Tests :func:`colour.utilities.array.as_float_array` definition.
"""
np.testing.assert_equal(as_float_array([1, 2, 3]), np.array([1, 2, 3]))
self.assertEqual(as_float_array([1, 2, 3]).dtype, DEFAULT_FLOAT_DTYPE)
class TestAsNumeric(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.as_numeric` definition unit tests
methods.
"""
def test_as_numeric(self):
"""
Tests :func:`colour.utilities.array.as_numeric` definition.
"""
self.assertEqual(as_numeric(1), 1.0)
self.assertEqual(as_numeric(np.array([1])), 1.0)
np.testing.assert_almost_equal(
as_numeric(np.array([1, 2, 3])), np.array([1.0, 2.0, 3.0]))
self.assertIsInstance(as_numeric(1), DEFAULT_FLOAT_DTYPE)
self.assertIsInstance(as_numeric(1, int), int)
self.assertListEqual(as_numeric(['John', 'Doe']), ['John', 'Doe'])
self.assertEqual(as_numeric('John Doe'), 'John Doe')
class TestAsInt(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.as_int` definition unit tests
methods.
"""
def test_as_int(self):
"""
Tests :func:`colour.utilities.array.as_int` definition.
"""
self.assertEqual(as_int(1), 1)
self.assertEqual(as_int(np.array([1])), 1)
np.testing.assert_almost_equal(
as_int(np.array([1.0, 2.0, 3.0])), np.array([1, 2, 3]))
self.assertEqual(
as_int(np.array([1.0, 2.0, 3.0])).dtype, DEFAULT_INT_DTYPE)
self.assertIsInstance(as_int(1), int)
class TestAsFloat(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.as_float` definition unit tests
methods.
"""
def test_as_float(self):
"""
Tests :func:`colour.utilities.array.as_float` definition.
"""
self.assertEqual(as_float(1), 1.0)
self.assertEqual(as_float(np.array([1])), 1.0)
np.testing.assert_almost_equal(
as_float(np.array([1, 2, 3])), np.array([1.0, 2.0, 3.0]))
self.assertEqual(
as_float(np.array([1, 2, 3])).dtype, DEFAULT_FLOAT_DTYPE)
self.assertIsInstance(as_float(1), DEFAULT_FLOAT_DTYPE)
class TestSetFloatPrecision(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.set_float_precision` definition units
tests methods.
"""
def test_set_float_precision(self):
"""
Tests :func:`colour.utilities.array.set_float_precision` definition.
"""
self.assertEqual(as_float_array(np.ones(3)).dtype, np.float64)
set_float_precision(np.float16)
self.assertEqual(as_float_array(np.ones(3)).dtype, np.float16)
set_float_precision(np.float64)
self.assertEqual(as_float_array(np.ones(3)).dtype, np.float64)
def test_set_float_precision_enforcement(self):
"""
Tests whether :func:`colour.utilities.array.set_float_precision` effect
is applied through most of *Colour* public API.
"""
if not is_networkx_installed():
return
from colour.appearance import (CAM_Specification_CAM16,
CAM_Specification_CIECAM02)
from colour.graph.conversion import (CONVERSION_SPECIFICATIONS_DATA,
convert)
dtype = np.float32
set_float_precision(dtype)
for source, target, _callable in CONVERSION_SPECIFICATIONS_DATA:
if target in ('Hexadecimal', 'Munsell Colour'):
continue
# Spectral distributions are instantiated with float64 data and
# spectral up-sampling optimization fails.
if ('Spectral Distribution' in (source, target) or
target == 'Complementary Wavelength' or
target == 'Dominant Wavelength'):
continue
a = np.array([(0.25, 0.5, 0.25), (0.25, 0.5, 0.25)])
if source == 'CAM16':
a = CAM_Specification_CAM16(J=0.25, M=0.5, h=0.25)
if source == 'CIECAM02':
a = CAM_Specification_CIECAM02(J=0.25, M=0.5, h=0.25)
if source == 'CMYK':
a = np.array([(0.25, 0.5, 0.25, 0.5), (0.25, 0.5, 0.25, 0.5)])
if source == 'Hexadecimal':
a = np.array(['#FFFFFF', '#FFFFFF'])
if source == 'Munsell Colour':
a = ['4.2YR 8.1/5.3', '4.2YR 8.1/5.3']
if source == 'Wavelength':
a = 555
if source.endswith(' xy') or source.endswith(' uv'):
a = np.array([(0.25, 0.5), (0.25, 0.5)])
def dtype_getter(x):
"""
dtype getter callable.
"""
for specification in ('ATD95', 'CIECAM02', 'CAM16', 'Hunt',
'LLAB', 'Nayatani95', 'RLAB'):
if target.endswith(specification):
return x[0].dtype
return x.dtype
self.assertEqual(dtype_getter(convert(a, source, target)), dtype)
def tearDown(self):
"""
After tests actions.
"""
set_float_precision(np.float64)
class TestSetIntPrecision(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.set_int_precision` definition units
tests methods.
"""
def test_set_int_precision(self):
"""
Tests :func:`colour.utilities.array.set_int_precision` definition.
"""
self.assertEqual(as_int_array(np.ones(3)).dtype, np.int64)
set_int_precision(np.int32)
self.assertEqual(as_int_array(np.ones(3)).dtype, np.int32)
set_int_precision(np.int64)
self.assertEqual(as_int_array(np.ones(3)).dtype, np.int64)
def tearDown(self):
"""
After tests actions.
"""
set_int_precision(np.int64)
class TestAsNametuple(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.as_namedtuple` definition unit tests
methods.
"""
def test_as_namedtuple(self):
"""
Tests :func:`colour.utilities.array.as_namedtuple` definition.
"""
NamedTuple = namedtuple('NamedTuple', 'a b c')
a_a = np.ones(3)
a_b = np.ones(3) + 1
a_c = np.ones(3) + 2
named_tuple = NamedTuple(a_a, a_b, a_c)
self.assertEqual(named_tuple, as_namedtuple(named_tuple, NamedTuple))
self.assertEqual(
named_tuple,
as_namedtuple({
'a': a_a,
'b': a_b,
'c': a_c
}, NamedTuple))
self.assertEqual(named_tuple, as_namedtuple([a_a, a_b, a_c],
NamedTuple))
a_r = np.array(
[tuple(a) for a in np.transpose((a_a, a_b, a_c)).tolist()],
dtype=[(str('a'), str('f8')),
(str('b'), str('f8')),
(str('c'), str('f8'))]) # yapf: disable
np.testing.assert_array_equal(
np.array(named_tuple), np.array(as_namedtuple(a_r, NamedTuple)))
class TestClosestIndexes(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.closest_indexes` definition unit
tests methods.
"""
def test_closest_indexes(self):
"""
Tests :func:`colour.utilities.array.closest_indexes` definition.
"""
a = np.array([
24.31357115,
63.62396289,
55.71528816,
62.70988028,
46.84480573,
25.40026416,
])
self.assertEqual(closest_indexes(a, 63.05), 3)
self.assertEqual(closest_indexes(a, 51.15), 4)
self.assertEqual(closest_indexes(a, 24.90), 5)
np.testing.assert_array_equal(
closest_indexes(a, np.array([63.05, 51.15, 24.90])),
np.array([3, 4, 5]))
class TestClosest(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.closest` definition unit tests
methods.
"""
def test_closest(self):
"""
Tests :func:`colour.utilities.array.closest` definition.
"""
a = np.array([
24.31357115,
63.62396289,
55.71528816,
62.70988028,
46.84480573,
25.40026416,
])
self.assertEqual(closest(a, 63.05), 62.70988028)
self.assertEqual(closest(a, 51.15), 46.84480573)
self.assertEqual(closest(a, 24.90), 25.40026416)
np.testing.assert_almost_equal(
closest(a, np.array([63.05, 51.15, 24.90])),
np.array([62.70988028, 46.84480573, 25.40026416]),
decimal=7)
class TestInterval(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.interval` definition unit tests
methods.
"""
def test_interval(self):
"""
Tests :func:`colour.utilities.array.interval` definition.
"""
np.testing.assert_almost_equal(
interval(range(0, 10, 2)), np.array([2]))
np.testing.assert_almost_equal(
interval(range(0, 10, 2), False), np.array([2, 2, 2, 2]))
np.testing.assert_almost_equal(
interval([1, 2, 3, 4, 6, 6.5]), np.array([0.5, 1.0, 2.0]))
np.testing.assert_almost_equal(
interval([1, 2, 3, 4, 6, 6.5], False),
np.array([1.0, 1.0, 1.0, 2.0, 0.5]))
class TestIsUniform(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.is_uniform` definition unit tests
methods.
"""
def test_is_uniform(self):
"""
Tests :func:`colour.utilities.array.is_uniform` definition.
"""
self.assertTrue(is_uniform(range(0, 10, 2)))
self.assertFalse(is_uniform([1, 2, 3, 4, 6]))
class TestInArray(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.in_array` definition unit tests
methods.
"""
def test_in_array(self):
"""
Tests :func:`colour.utilities.array.in_array` definition.
"""
self.assertTrue(
np.array_equal(
in_array(np.array([0.50, 0.60]), np.linspace(0, 10, 101)),
np.array([True, True])))
self.assertFalse(
np.array_equal(
in_array(np.array([0.50, 0.61]), np.linspace(0, 10, 101)),
np.array([True, True])))
self.assertTrue(
np.array_equal(
in_array(np.array([[0.50], [0.60]]), np.linspace(0, 10, 101)),
np.array([[True], [True]])))
def test_n_dimensional_in_array(self):
"""
Tests :func:`colour.utilities.array.in_array` definition n-dimensional
support.
"""
np.testing.assert_almost_equal(
in_array(np.array([0.50, 0.60]), np.linspace(0, 10, 101)).shape,
np.array([2]))
np.testing.assert_almost_equal(
in_array(np.array([[0.50, 0.60]]), np.linspace(0, 10, 101)).shape,
np.array([1, 2]))
np.testing.assert_almost_equal(
in_array(np.array([[0.50], [0.60]]), np.linspace(0, 10,
101)).shape,
np.array([2, 1]))
class TestTstack(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.tstack` definition unit tests
methods.
"""
def test_tstack(self):
"""
Tests :func:`colour.utilities.array.tstack` definition.
"""
a = 0
np.testing.assert_almost_equal(tstack([a, a, a]), np.array([0, 0, 0]))
a = np.arange(0, 6)
np.testing.assert_almost_equal(
tstack([a, a, a]),
np.array([
[0, 0, 0],
[1, 1, 1],
[2, 2, 2],
[3, 3, 3],
[4, 4, 4],
[5, 5, 5],
]))
a = np.reshape(a, (1, 6))
np.testing.assert_almost_equal(
tstack([a, a, a]),
np.array([[
[0, 0, 0],
[1, 1, 1],
[2, 2, 2],
[3, 3, 3],
[4, 4, 4],
[5, 5, 5],
]]))
a = np.reshape(a, (1, 2, 3))
np.testing.assert_almost_equal(
tstack([a, a, a]),
np.array([[
[[0, 0, 0], [1, 1, 1], [2, 2, 2]],
[[3, 3, 3], [4, 4, 4], [5, 5, 5]],
]]))
class TestTsplit(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.tsplit` definition unit tests
methods.
"""
def test_tsplit(self):
"""
Tests :func:`colour.utilities.array.tsplit` definition.
"""
a = np.array([0, 0, 0])
np.testing.assert_almost_equal(tsplit(a), np.array([0, 0, 0]))
a = np.array([
[0, 0, 0],
[1, 1, 1],
[2, 2, 2],
[3, 3, 3],
[4, 4, 4],
[5, 5, 5],
])
np.testing.assert_almost_equal(
tsplit(a),
np.array([
[0, 1, 2, 3, 4, 5],
[0, 1, 2, 3, 4, 5],
[0, 1, 2, 3, 4, 5],
]))
a = np.array([
[[0, 0, 0], [1, 1, 1], [2, 2, 2], [3, 3, 3], [4, 4, 4], [5, 5, 5]],
])
np.testing.assert_almost_equal(
tsplit(a),
np.array([
[[0, 1, 2, 3, 4, 5]],
[[0, 1, 2, 3, 4, 5]],
[[0, 1, 2, 3, 4, 5]],
]))
a = np.array([[
[[0, 0, 0], [1, 1, 1], [2, 2, 2]],
[[3, 3, 3], [4, 4, 4], [5, 5, 5]],
]])
np.testing.assert_almost_equal(
tsplit(a),
np.array([
[[[0, 1, 2], [3, 4, 5]]],
[[[0, 1, 2], [3, 4, 5]]],
[[[0, 1, 2], [3, 4, 5]]],
]))
class TestRowAsDiagonal(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.row_as_diagonal` definition unit
tests methods.
"""
def test_row_as_diagonal(self):
"""
Tests :func:`colour.utilities.array.row_as_diagonal` definition.
"""
np.testing.assert_almost_equal(
row_as_diagonal(np.array(
[[0.25891593, 0.07299478, 0.36586996],
[0.30851087, 0.37131459, 0.16274825],
[0.71061831, 0.67718718, 0.09562581],
[0.71588836, 0.76772047, 0.15476079],
[0.92985142, 0.22263399, 0.88027331]])
),
np.array(
[[[0.25891593, 0.00000000, 0.00000000],
[0.00000000, 0.07299478, 0.00000000],
[0.00000000, 0.00000000, 0.36586996]],
[[0.30851087, 0.00000000, 0.00000000],
[0.00000000, 0.37131459, 0.00000000],
[0.00000000, 0.00000000, 0.16274825]],
[[0.71061831, 0.00000000, 0.00000000],
[0.00000000, 0.67718718, 0.00000000],
[0.00000000, 0.00000000, 0.09562581]],
[[0.71588836, 0.00000000, 0.00000000],
[0.00000000, 0.76772047, 0.00000000],
[0.00000000, 0.00000000, 0.15476079]],
[[0.92985142, 0.00000000, 0.00000000],
[0.00000000, 0.22263399, 0.00000000],
[0.00000000, 0.00000000, 0.88027331]]]
)
) # yapf: disable
class TestOrient(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.orient` definition unit tests
methods.
"""
def test_orient(self):
"""
Tests :func:`colour.utilities.array.orient` definition.
"""
a = np.tile(np.arange(5), (5, 1))
np.testing.assert_almost_equal(orient(a, 'Null'), a, decimal=7)
np.testing.assert_almost_equal(
orient(a, 'Flip'),
np.array([
[4, 3, 2, 1, 0],
[4, 3, 2, 1, 0],
[4, 3, 2, 1, 0],
[4, 3, 2, 1, 0],
[4, 3, 2, 1, 0],
]),
decimal=7)
np.testing.assert_almost_equal(
orient(a, 'Flop'),
np.array([
[0, 1, 2, 3, 4],
[0, 1, 2, 3, 4],
[0, 1, 2, 3, 4],
[0, 1, 2, 3, 4],
[0, 1, 2, 3, 4],
]),
decimal=7)
np.testing.assert_almost_equal(
orient(a, '90 CW'),
np.array([
[0, 0, 0, 0, 0],
[1, 1, 1, 1, 1],
[2, 2, 2, 2, 2],
[3, 3, 3, 3, 3],
[4, 4, 4, 4, 4],
]),
decimal=7)
np.testing.assert_almost_equal(
orient(a, '90 CCW'),
np.array([
[4, 4, 4, 4, 4],
[3, 3, 3, 3, 3],
[2, 2, 2, 2, 2],
[1, 1, 1, 1, 1],
[0, 0, 0, 0, 0],
]),
decimal=7)
np.testing.assert_almost_equal(
orient(a, '180'),
np.array([
[4, 3, 2, 1, 0],
[4, 3, 2, 1, 0],
[4, 3, 2, 1, 0],
[4, 3, 2, 1, 0],
[4, 3, 2, 1, 0],
]),
decimal=7)
class TestCentroid(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.centroid` definition unit tests
methods.
"""
def test_centroid(self):
"""
Tests :func:`colour.utilities.array.centroid` definition.
"""
a = np.arange(5)
np.testing.assert_array_equal(centroid(a), np.array([3]))
a = np.tile(a, (5, 1))
np.testing.assert_array_equal(centroid(a), np.array([2, 3]))
a = np.tile(np.linspace(0, 1, 10), (10, 1))
np.testing.assert_array_equal(centroid(a), np.array([4, 6]))
a = tstack([a, a, a])
np.testing.assert_array_equal(centroid(a), np.array([4, 6, 1]))
class TestFillNan(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.fill_nan` definition unit tests
methods.
"""
def test_fill_nan(self):
"""
Tests :func:`colour.utilities.array.fill_nan` definition.
"""
a = np.array([0.1, 0.2, np.nan, 0.4, 0.5])
np.testing.assert_almost_equal(
fill_nan(a), np.array([0.1, 0.2, 0.3, 0.4, 0.5]), decimal=7)
np.testing.assert_almost_equal(
fill_nan(a, method='Constant', default=8.0),
np.array([0.1, 0.2, 8.0, 0.4, 0.5]),
decimal=7)
class TestNdarrayWrite(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.ndarray_write` definition unit tests
methods.
"""
def test_ndarray_write(self):
"""
Tests :func:`colour.utilities.array.ndarray_write` definition.
"""
a = np.linspace(0, 1, 10)
a.setflags(write=False)
with self.assertRaises(ValueError):
a += 1
with ndarray_write(a):
a += 1
class TestZeros(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.zeros` definition unit tests
methods.
"""
def test_zeros(self):
"""
Tests :func:`colour.utilities.array.zeros` definition.
"""
np.testing.assert_equal(zeros(3), np.zeros(3))
class TestOnes(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.ones` definition unit tests
methods.
"""
def test_ones(self):
"""
Tests :func:`colour.utilities.array.ones` definition.
"""
np.testing.assert_equal(ones(3), np.ones(3))
class TestFull(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.full` definition unit tests
methods.
"""
def test_full(self):
"""
Tests :func:`colour.utilities.array.full` definition.
"""
np.testing.assert_equal(full(3, 0.5), np.full(3, 0.5))
class TestIndexAlongLastAxis(unittest.TestCase):
"""
Defines :func:`colour.utilities.array.index_along_last_axis` definition
unit tests methods.
"""
def test_index_along_last_axis(self):
"""
Tests :func:`colour.utilities.array.index_along_last_axis` definition.
"""
a = np.array([[[[0.51090627, 0.86191718, 0.8687926],
[0.82738158, 0.80587656, 0.28285687]],
[[0.84085977, 0.03851814, 0.06057988],
[0.94659267, 0.79308353, 0.30870888]]],
[[[0.50758436, 0.24066455, 0.20199051],
[0.4507304, 0.84189245, 0.81160878]],
[[0.75421871, 0.88187494, 0.01612045],
[0.38777511, 0.58905552, 0.32970469]]],
[[[0.99285824, 0.738076, 0.0716432],
[0.35847844, 0.0367514, 0.18586322]],
[[0.72674561, 0.0822759, 0.9771182],
[0.90644279, 0.09689787, 0.93483977]]]])
indexes = np.array([[[0, 1], [0, 1]], [[2, 1], [2, 1]], [[2, 1],
[2, 0]]])
np.testing.assert_equal(
index_along_last_axis(a, indexes),
np.array([[[0.51090627, 0.80587656], [0.84085977, 0.79308353]],
[[0.20199051, 0.84189245], [0.01612045, 0.58905552]],
[[0.0716432, 0.0367514], [0.9771182, 0.90644279]]]))
def test_compare_with_argmin_argmax(self):
"""
Tests :func:`colour.utilities.array.index_along_last_axis` definition
by comparison with :func:`argmin` and :func:`argmax`.
"""
a = np.random.random((2, 3, 4, 5, 6, 7))
np.testing.assert_equal(
index_along_last_axis(a, np.argmin(a, axis=-1)), np.min(
a, axis=-1))
np.testing.assert_equal(
index_along_last_axis(a, np.argmax(a, axis=-1)), np.max(
a, axis=-1))
def test_exceptions(self):
"""
Tests :func:`colour.utilities.array.index_along_last_axis` definition
handling of invalid inputs.
"""
a = as_float_array([[11, 12], [21, 22]])
# Bad shape
with self.assertRaises(ValueError):
indexes = np.array([0])
index_along_last_axis(a, indexes)
# Indexes out of range
with self.assertRaises(IndexError):
indexes = np.array([123, 456])
index_along_last_axis(a, indexes)
# Non-integer indexes
with self.assertRaises(IndexError):
indexes = np.array([0., 0.])
index_along_last_axis(a, indexes)
if __name__ == '__main__':
unittest.main()
| 29.307078 | 79 | 0.526234 |
9eb1ac5aa7a6d105980b6dd2742d2749fce9a4e0 | 425 | py | Python | pos/webpos/migrations/0011_auto_20150706_2005.py | NonnEmilia/OpenGenfri | 7061957fb13ef824763922e1891cb72f7d51bb0f | [
"MIT"
] | null | null | null | pos/webpos/migrations/0011_auto_20150706_2005.py | NonnEmilia/OpenGenfri | 7061957fb13ef824763922e1891cb72f7d51bb0f | [
"MIT"
] | null | null | null | pos/webpos/migrations/0011_auto_20150706_2005.py | NonnEmilia/OpenGenfri | 7061957fb13ef824763922e1891cb72f7d51bb0f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('webpos', '0010_auto_20141230_1634'),
]
operations = [
migrations.AlterField(
model_name='item',
name='quantity',
field=models.PositiveSmallIntegerField(null=True, blank=True),
),
]
| 21.25 | 74 | 0.618824 |
05bf6c644a73b0151eb400f9eedfada28335db79 | 8,235 | py | Python | src/cocoa/toga_cocoa/widgets/internal/refresh.py | freespace/toga | 2ae96ddede34b5164b1be3d80a18aa87336f28f0 | [
"BSD-3-Clause"
] | 2 | 2022-01-08T11:18:20.000Z | 2022-01-26T13:41:34.000Z | src/cocoa/toga_cocoa/widgets/internal/refresh.py | freespace/toga | 2ae96ddede34b5164b1be3d80a18aa87336f28f0 | [
"BSD-3-Clause"
] | 4 | 2022-01-05T09:16:30.000Z | 2022-03-29T09:32:44.000Z | src/cocoa/toga_cocoa/widgets/internal/refresh.py | freespace/toga | 2ae96ddede34b5164b1be3d80a18aa87336f28f0 | [
"BSD-3-Clause"
] | 1 | 2022-01-05T08:56:59.000Z | 2022-01-05T08:56:59.000Z | from toga_cocoa.libs import (
SEL,
NSClipView,
NSEvent,
NSEventPhaseEnded,
NSLayoutAttributeCenterX,
NSLayoutAttributeCenterY,
NSLayoutAttributeHeight,
NSLayoutAttributeNotAnAttribute,
NSLayoutAttributeTop,
NSLayoutAttributeWidth,
NSLayoutConstraint,
NSLayoutRelationEqual,
NSMakePoint,
NSMakeRect,
NSNotificationCenter,
NSPoint,
NSProgressIndicator,
NSProgressIndicatorSpinningStyle,
NSRect,
NSScrollElasticityAllowed,
NSScrollView,
NSView,
NSViewBoundsDidChangeNotification,
ObjCInstance,
core_graphics,
kCGScrollEventUnitLine,
objc_method,
c_void_p,
send_super
)
HEADER_HEIGHT = 45.0
class RefreshableClipView(NSClipView):
@objc_method
def constrainScrollPoint_(self, proposedNewOrigin: NSPoint) -> NSPoint:
constrained = send_super(
__class__, self, 'constrainScrollPoint:', proposedNewOrigin,
restype=NSPoint, argtypes=[NSPoint]
)
if self.superview and self.superview.refreshTriggered:
return NSMakePoint(
constrained.x,
max(proposedNewOrigin.y, -self.superview.refreshView.frame.size.height)
)
return constrained
@objc_method
def documentRect(self) -> NSRect:
rect = send_super(__class__, self, 'documentRect', restype=NSRect, argtypes=[])
if self.superview and self.superview.refreshTriggered:
return NSMakeRect(
rect.origin.x, rect.origin.y - self.superview.refreshView.frame.size.height,
rect.size.width, rect.size.height + self.superview.refreshView.frame.size.height
)
return rect
class RefreshableScrollView(NSScrollView):
# Create Header View
@objc_method
def viewDidMoveToWindow(self) -> None:
self.refreshTriggered = False
self.isRefreshing = False
self.refreshView = None
self.refreshIndicator = None
self.createRefreshView()
@objc_method
def createContentView(self):
superClipView = ObjCInstance(send_super(__class__, self, 'contentView'))
if not isinstance(superClipView, RefreshableClipView):
# create new clipview
documentView = superClipView.documentView
clipView = RefreshableClipView.alloc().initWithFrame(superClipView.frame)
clipView.documentView = documentView
clipView.copiesOnScroll = False
clipView.drawsBackground = False
self.setContentView(clipView)
superClipView = ObjCInstance(send_super(__class__, self, 'contentView'))
return superClipView
@objc_method
def createRefreshView(self) -> None:
# delete old stuff if any
if self.refreshView:
self.refreshView.removeFromSuperview()
self.refreshView.release()
self.refreshView = None
self.verticalScrollElasticity = NSScrollElasticityAllowed
# create new content view
self.createContentView()
self.contentView.postsFrameChangedNotifications = True
self.contentView.postsBoundsChangedNotifications = True
NSNotificationCenter.defaultCenter.addObserver(
self,
selector=SEL('viewBoundsChanged:'),
name=NSViewBoundsDidChangeNotification,
object=self.contentView,
)
# Create view to hold the refresh widgets refreshview
contentRect = self.contentView.documentView.frame
self.refreshView = NSView.alloc().init()
self.refreshView.translatesAutoresizingMaskIntoConstraints = False
# Create spinner
self.refreshIndicator = NSProgressIndicator.alloc().init()
self.refreshIndicator.style = NSProgressIndicatorSpinningStyle
self.refreshIndicator.translatesAutoresizingMaskIntoConstraints = False
self.refreshIndicator.displayedWhenStopped = True
self.refreshIndicator.usesThreadedAnimation = True
self.refreshIndicator.indeterminate = True
self.refreshIndicator.bezeled = False
self.refreshIndicator.sizeToFit()
# Center the spinner in the header
self.refreshIndicator.setFrame(
NSMakeRect(
self.refreshView.bounds.size.width / 2 - self.refreshIndicator.frame.size.width / 2,
self.refreshView.bounds.size.height / 2 - self.refreshIndicator.frame.size.height / 2,
self.refreshIndicator.frame.size.width,
self.refreshIndicator.frame.size.height
)
)
# Put everything in place
self.refreshView.addSubview(self.refreshIndicator)
# self.refreshView.addSubview(self.refreshArrow)
self.contentView.addSubview(self.refreshView)
# set layout constraints
indicatorHCenter = NSLayoutConstraint.constraintWithItem_attribute_relatedBy_toItem_attribute_multiplier_constant_( # noqa: E501
self.refreshIndicator, NSLayoutAttributeCenterX,
NSLayoutRelationEqual,
self.refreshView, NSLayoutAttributeCenterX,
1.0, 0,
)
self.refreshView.addConstraint(indicatorHCenter)
indicatorVCenter = NSLayoutConstraint.constraintWithItem_attribute_relatedBy_toItem_attribute_multiplier_constant_( # noqa: E501
self.refreshIndicator, NSLayoutAttributeCenterY,
NSLayoutRelationEqual,
self.refreshView, NSLayoutAttributeCenterY,
1.0, 0,
)
self.refreshView.addConstraint(indicatorVCenter)
refreshWidth = NSLayoutConstraint.constraintWithItem_attribute_relatedBy_toItem_attribute_multiplier_constant_( # noqa: E501
self.refreshView, NSLayoutAttributeWidth,
NSLayoutRelationEqual,
self.contentView, NSLayoutAttributeWidth,
1.0, 0,
)
self.contentView.addConstraint(refreshWidth)
refreshHeight = NSLayoutConstraint.constraintWithItem_attribute_relatedBy_toItem_attribute_multiplier_constant_( # noqa: E501
self.refreshView, NSLayoutAttributeHeight,
NSLayoutRelationEqual,
None, NSLayoutAttributeNotAnAttribute,
1.0, HEADER_HEIGHT,
)
self.contentView.addConstraint(refreshHeight)
refreshHeight = NSLayoutConstraint.constraintWithItem_attribute_relatedBy_toItem_attribute_multiplier_constant_( # noqa: E501
self.refreshView, NSLayoutAttributeTop,
NSLayoutRelationEqual,
self.contentView, NSLayoutAttributeTop,
1.0, -HEADER_HEIGHT,
)
self.contentView.addConstraint(refreshHeight)
# Scroll to top
self.contentView.scrollToPoint(NSMakePoint(contentRect.origin.x, 0))
self.reflectScrolledClipView(self.contentView)
# Detecting scroll
@objc_method
def scrollWheel_(self, event) -> None:
if event.phase == NSEventPhaseEnded:
if self.refreshTriggered and not self.isRefreshing:
self.reload()
send_super(__class__, self, 'scrollWheel:', event, argtypes=[c_void_p])
@objc_method
def viewBoundsChanged_(self, note) -> None:
if self.isRefreshing:
return
if self.contentView.bounds.origin.y <= -self.refreshView.frame.size.height:
self.refreshTriggered = True
# Reload
@objc_method
def reload(self) -> None:
"""Start a reload, starting the reload spinner"""
self.isRefreshing = True
self.refreshIndicator.startAnimation(self)
self.interface.on_refresh(self.interface)
@objc_method
def finishedLoading(self):
"""Invoke to mark the end of a reload, stopping and hiding the reload spinner"""
self.isRefreshing = False
self.refreshTriggered = False
self.refreshIndicator.stopAnimation(self)
self.detailedlist.reloadData()
# Force a scroll event to make the scroll hide the reload
cgEvent = core_graphics.CGEventCreateScrollWheelEvent(None, kCGScrollEventUnitLine, 2, 1, 0)
scrollEvent = NSEvent.eventWithCGEvent(cgEvent)
self.scrollWheel(scrollEvent)
| 36.438053 | 137 | 0.681481 |
bee102b73f3323c3426cce65951f060a710d9be4 | 3,545 | py | Python | scikits/crab/base.py | 3cgg/crab | 81a51f03836291f561c2e1f7966ed11c5c5226d7 | [
"BSD-3-Clause"
] | null | null | null | scikits/crab/base.py | 3cgg/crab | 81a51f03836291f561c2e1f7966ed11c5c5226d7 | [
"BSD-3-Clause"
] | null | null | null | scikits/crab/base.py | 3cgg/crab | 81a51f03836291f561c2e1f7966ed11c5c5226d7 | [
"BSD-3-Clause"
] | 1 | 2021-12-20T07:42:11.000Z | 2021-12-20T07:42:11.000Z | #-*- coding:utf-8 -*-
"""
Base Recommender Models.
"""
# Authors: Marcel Caraciolo <marcel@muricoca.com>
# Bruno Melo <bruno@muricoca.com>
# License: BSD Style.
#from scikits.learn.base import BaseEstimator
from sklearn.base import BaseEstimator
class BaseRecommender(BaseEstimator):
"""
Base Class for Recommenders that suggest items for users.
Should not be used directly, use derived classes instead
Attributes
----------
model: DataModel
Defines the data model where data is fetched.
with_preference: bool
Defines if the recommendations come along with the
estimated preferences. (default= False)
"""
def __init__(self, model, with_preference=False):
self.model = model
self.with_preference = with_preference
def recommend(self, user_id, how_many, **params):
'''
Parameters
----------
user_id: int or string
User for which recommendations are to be computed.
how_many: int
Desired number of recommendations
rescorer: function, optional
Rescoring function to apply before final list of
recommendations.
Returns
---------
Return a list of recommended items, ordered from most strongly
recommend to least.
'''
raise NotImplementedError("BaseRecommender is an abstract class.")
def estimate_preference(self, user_id, item_id, **params):
'''
Parameters
----------
user_id: int or string
User for which recommendations are to be computed.
item_id: int or string
Item for which recommendations are to be computed.
Returns
-------
Return an estimated preference if the user has not expressed a
preference for the item, or else the user's actual preference for the
item. If a preference cannot be estimated, returns None.
'''
raise NotImplementedError("BaseRecommender is an abstract class.")
def all_other_items(self, user_id, **params):
'''
Parameters
----------
user_id: int or string
User for which recommendations are to be computed.
Returns
--------
Return all items in the `model` for which the user has not expressed
the preference and could possibly be recommended to the user.
'''
raise NotImplementedError("BaseRecommender is an abstract class.")
def set_preference(self, user_id, item_id, value):
'''
Set a new preference of a user for a specific item with a certain
magnitude.
Parameters
----------
user_id: int or string
User for which the preference will be updated.
item_id: int or string
Item that will be updated.
value: The new magnitude for the preference of a item_id from a
user_id.
'''
self.model.set_preference(user_id, item_id, value)
def remove_preference(self, user_id, item_id):
'''
Remove a preference of a user for a specific item
Parameters
----------
user_id: int or string
User for which recommendations are to be computed.
item_id: int or string
Item that will be removed the preference for the user_id.
'''
self.model.remove_preference(user_id, item_id)
| 29.297521 | 77 | 0.602821 |
071e73a0f4e0f903cd9a8aff2d3a87febf36288f | 32,345 | py | Python | chc/app/CDictionary.py | Databean/CodeHawk-C | 98720753beb51e0bf5105f8f6838618292fbf55c | [
"MIT"
] | null | null | null | chc/app/CDictionary.py | Databean/CodeHawk-C | 98720753beb51e0bf5105f8f6838618292fbf55c | [
"MIT"
] | null | null | null | chc/app/CDictionary.py | Databean/CodeHawk-C | 98720753beb51e0bf5105f8f6838618292fbf55c | [
"MIT"
] | null | null | null | # ------------------------------------------------------------------------------
# CodeHawk C Analyzer
# Author: Henny Sipma
# ------------------------------------------------------------------------------
# The MIT License (MIT)
#
# Copyright (c) 2017-2020 Kestrel Technology LLC
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ------------------------------------------------------------------------------
from typing import cast, Any, Callable, Dict, List, Optional, Tuple, TYPE_CHECKING
import xml.etree.ElementTree as ET
import chc.app.CDictionaryRecord as CD
import chc.util.fileutil as UF
import chc.util.IndexedTable as IT
from chc.app.CAttributes import CAttrBase, CAttribute, CAttributes, CAttrInt, CAttrStr, CAttrCons
from chc.app.CConstExp import CConstBase, CConstInt, CConstStr, CConstChr, CConstReal
from chc.app.CExp import (
CExpBase,
CExpConst,
CExpSizeOf,
CExpSizeOfE,
CExpSizeOfStr,
CExpUnOp,
CExpBinOp,
CExpCastE,
CExpAddrOf,
CExpStartOf,
CExpLval,
)
from chc.app.CLHost import CLHostBase, CLHostVar, CLHostMem
from chc.app.CLval import CLval
from chc.app.COffsetExp import COffsetBase, CNoOffset, CFieldOffset, CIndexOffset
from chc.app.CTyp import (
CTypBase,
CFunArg,
CFunArgs,
CTypVoid,
CTypInt,
CTypFloat,
CTypComp,
CTypEnum,
CTypNamed,
CTypArray,
CTypPtr,
CTypFun,
CTypBuiltinVaargs,
)
from chc.app.CTypsig import CTypsigTSBase, CTypsigList
from chc.util.IndexedTable import IndexedTable, IndexedTableSuperclass, IndexedTableValue
from chc.util.StringIndexedTable import StringIndexedTable
if TYPE_CHECKING:
from chc.api.STerm import STerm, STNumConstant, STArgValue
from chc.app.CCompInfo import CCompInfo
from chc.app.CFileDictionary import CFileDictionary
from chc.app.CVarInfo import CVarInfo
class CDictionary(object):
"""Indexed types."""
def __init__(self) -> None:
self.attrparam_table: IndexedTable[CAttrBase] = IndexedTable("attrparam-table")
self.attribute_table: IndexedTable[CAttribute] = IndexedTable("attribute-table")
self.attributes_table: IndexedTable[CAttributes] = IndexedTable("attributes-table")
self.constant_table: IndexedTable[CConstBase] = IndexedTable("constant-table")
self.exp_table: IndexedTable[CExpBase] = IndexedTable("exp-table")
self.funarg_table: IndexedTable[CFunArg] = IndexedTable("funarg-table")
self.funargs_table: IndexedTable[CFunArgs] = IndexedTable("funargs-table")
self.lhost_table: IndexedTable[CLHostBase] = IndexedTable("lhost-table")
self.lval_table: IndexedTable[CLval] = IndexedTable("lval-table")
self.offset_table: IndexedTable[COffsetBase] = IndexedTable("offset-table")
self.typ_table: IndexedTable[CTypBase] = IndexedTable("typ-table")
self.typsig_table: IndexedTable[CTypsigTSBase] = IndexedTable("typsig-table")
self.typsiglist_table: IndexedTable[CTypsigList] = IndexedTable("typsiglist-table")
self.string_table = StringIndexedTable("string-table")
self.tables: List[Tuple[IndexedTableSuperclass, Callable[[ET.Element], None]]] = [
(self.attrparam_table, self._read_xml_attrparam_table),
(self.attribute_table, self._read_xml_attribute_table),
(self.attributes_table, self._read_xml_attributes_table),
(self.constant_table, self._read_xml_constant_table),
(self.exp_table, self._read_xml_exp_table),
(self.funarg_table, self._read_xml_funarg_table),
(self.funargs_table, self._read_xml_funargs_table),
(self.lhost_table, self._read_xml_lhost_table),
(self.lval_table, self._read_xml_lval_table),
(self.offset_table, self._read_xml_offset_table),
(self.typ_table, self._read_xml_typ_table),
(self.typsig_table, self._read_xml_typsig_table),
(self.typsiglist_table, self._read_xml_typsiglist_table),
]
self.string_tables: List[Tuple[IndexedTableSuperclass, Callable[[ET.Element], None]]] = [
(self.string_table, self._read_xml_string_table)
]
# --------------- Statistics ---------------------------------------------
def get_stats(self) -> str:
lines = []
for (t, _) in self.tables + self.string_tables:
if t.size() > 0:
lines.append(t.name.ljust(25) + str(t.size()).rjust(4))
return "\n".join(lines)
def get_table(self, n: str) -> Optional[IndexedTableSuperclass]:
return next(x[0] for x in (self.tables + self.string_tables) if x[0].name == (n + "-table"))
# create a count distribution for the objects in the table with name tname
# that satisfy the respective case predicates
def get_distribution(
self,
tname: str,
cases: Dict[str, Callable[[object], bool]],
) -> Dict[str, int]:
table = cast(Optional[IndexedTable[IndexedTableValue]], self.get_table(tname))
if table is None:
raise Exception("No table found for " + tname)
result = {}
for c in cases:
result[c] = len([v for v in table.values() if cases[c](v)])
return result
# -------------- Retrieve items from dictionary tables -------------------
def get_attrparam(self, ix: int) -> CAttrBase:
return self.attrparam_table.retrieve(ix)
def get_attribute(self, ix: int) -> CAttribute:
return self.attribute_table.retrieve(ix)
def get_attributes(self, ix: int) -> CAttributes:
return self.attributes_table.retrieve(ix)
def get_constant(self, ix: int) -> CConstBase:
return self.constant_table.retrieve(ix)
def get_funarg(self, ix: int) -> CFunArg:
return self.funarg_table.retrieve(ix)
def get_funargs(self, ix: int) -> CFunArgs:
return self.funargs_table.retrieve(ix)
def get_funargs_opt(self, ix: int) -> Optional[CFunArgs]:
return self.get_funargs(ix) if ix >= 0 else None
def get_lhost(self, ix: int) -> CLHostBase:
return self.lhost_table.retrieve(ix)
def get_lval(self, ix: int) -> CLval:
return self.lval_table.retrieve(ix)
def get_offset(self, ix: int) -> COffsetBase:
return self.offset_table.retrieve(ix)
def get_typ(self, ix: int) -> CTypBase:
return self.typ_table.retrieve(ix)
def get_exp(self, ix: int) -> CExpBase:
return self.exp_table.retrieve(ix)
def get_exp_opt(self, ix: int) -> Optional[CExpBase]:
return self.get_exp(ix) if ix >= 0 else None
def get_typsig(self, ix: int) -> CTypsigTSBase:
return self.typsig_table.retrieve(ix)
def get_typesig_list(self, ix: int) -> CTypsigList:
return self.typsiglist_table.retrieve(ix)
def get_string(self, ix: int) -> str:
return self.string_table.retrieve(ix)
# --------Provide read_xml/write_xml service for semantics files ----------
def read_xml_funargs(self, node: ET.Element, tag: str = "iargs") -> CFunArgs:
xml_value = node.get(tag)
if xml_value:
return self.get_funargs(int(xml_value))
else:
raise Exception('xml node was missing the tag "' + tag + '"')
def write_xml_exp(self, node: ET.Element, exp: CExpBase, tag: str = "iexp") -> None:
node.set(tag, str(self.index_exp(exp)))
def read_xml_exp(self, node: ET.Element, tag: str = "iexp") -> CExpBase:
xml_value = node.get(tag)
if xml_value:
return self.get_exp(int(xml_value))
else:
raise Exception('xml node was missing the tag "' + tag + '"')
def write_xml_exp_opt(
self,
node: ET.Element,
exp: Optional[CExpBase],
tag: str = "iexp",
) -> None:
if exp is None:
return
self.write_xml_exp(node, exp)
def read_xml_exp_opt(self, node: ET.Element, tag: str = "iexp") -> Optional[CExpBase]:
xml_tag = node.get(tag)
if xml_tag is None:
return None
else:
return self.get_exp_opt(int(xml_tag))
# ----------------------- Initialize dictionary from file ----------------
def initialize(self, xnode, force=False):
if xnode is None:
return
for (t, f) in self.tables + self.string_tables:
t.reset()
node = xnode.find(t.name)
if node is None:
raise Exception("Missing node `" + t.name + "`")
f(node)
# --------------- stubs, overridden in file/global dictionary -------------
def index_compinfo_key(self, compinfo: "CCompInfo", _: object) -> int:
return compinfo.get_ckey()
def index_varinfo_vid(self, vid: int, _: object) -> int:
return vid
def convert_ckey(self, ckey: int, fid: int = -1) -> int:
return ckey
# -------------------- Index items by category ---------------------------
def index_attrparam(self, a: CAttrBase) -> int:
if a.is_int():
def f_int(index: int, key: Tuple[str, str]) -> CAttrInt:
return CAttrInt(self, index, a.tags, a.args)
return self.attrparam_table.add(IT.get_key(a.tags, a.args), f_int)
if a.is_str():
def f_str(index: int, key: Tuple[str, str]) -> CAttrStr:
return CAttrStr(self, index, a.tags, a.args)
return self.attrparam_table.add(IT.get_key(a.tags, a.args), f_str)
if a.is_cons():
args = [self.index_attrparam(p) for p in cast(CAttrCons, a).get_params()]
def f_cons(index: int, key: Tuple[str, str]) -> CAttrCons:
return CAttrCons(self, index, a.tags, args)
return self.attrparam_table.add(IT.get_key(a.tags, a.args), f_cons)
raise Exception('No case yet for attrparam "' + str(a) + '"')
def index_attribute(self, a: CAttribute) -> int:
args = [self.index_attrparam(p) for p in a.get_params()]
def f(index: int, key: Tuple[str, str]) -> CAttribute:
return CAttribute(self, index, a.tags, args)
return self.attribute_table.add(IT.get_key(a.tags, a.args), f)
def index_attributes(self, aa: CAttributes) -> int:
args = [self.index_attribute(a) for a in aa.get_attributes()]
def f(index: int, key: Tuple[str, str]) -> CAttributes:
return CAttributes(self, index, aa.tags, args)
return self.attributes_table.add(IT.get_key(aa.tags, aa.args), f)
def index_constant(self, c: CConstBase) -> int: # TBF
if c.is_int():
def f_int(index: int, key: Tuple[str, str]) -> CConstInt:
return CConstInt(self, index, c.tags, c.args)
return self.constant_table.add(IT.get_key(c.tags, c.args), f_int)
if c.is_str():
args = [self.index_string(cast(CConstStr, c).get_string())]
def f_str(index: int, key: Tuple[str, str]) -> CConstStr:
return CConstStr(self, index, c.tags, args)
return self.constant_table.add(IT.get_key(c.tags, c.args), f_str)
if c.is_chr():
def f_chr(index: int, key: Tuple[str, str]) -> CConstChr:
return CConstChr(self, index, c.tags, c.args)
return self.constant_table.add(IT.get_key(c.tags, c.args), f_chr)
if c.is_real():
def f_real(index: int, key: Tuple[str, str]) -> CConstReal:
return CConstReal(self, index, c.tags, c.args)
return self.constant_table.add(IT.get_key(c.tags, c.args), f_real)
raise Exception('No case yet for const "' + str(c) + '"')
def mk_exp_index(self, tags: List[str], args: List[int]) -> int:
def f(index: int, key: Tuple[str, str]) -> CExpBase:
return CD.construct_c_dictionary_record(self, index, tags, args, CExpBase)
return self.exp_table.add(IT.get_key(tags, args), f)
def mk_constant_index(self, tags: List[str], args: List[int]) -> int:
def f(index: int, key: Tuple[str, str]) -> CConstBase:
return CD.construct_c_dictionary_record(self, index, tags, args, CConstBase)
return self.constant_table.add(IT.get_key(tags, args), f)
def mk_typ_index(self, tags: List[str], args: List[int]) -> int:
def f(index: int, key: Tuple[str, str]) -> CTypBase:
return CD.construct_c_dictionary_record(self, index, tags, args, CTypBase)
return self.typ_table.add(IT.get_key(tags, args), f)
def mk_lhost_index(self, tags: List[str], args: List[int]) -> int:
def f(index: int, key: Tuple[str, str]) -> CLHostBase:
return CD.construct_c_dictionary_record(self, index, tags, args, CLHostBase)
return self.lhost_table.add(IT.get_key(tags, args), f)
def mk_lval_index(self, tags: List[str], args: List[int]) -> int:
def f(index: int, key: Tuple[str, str]) -> CLval:
return CLval(self, index, tags, args)
return self.lval_table.add(IT.get_key(tags, args), f)
def varinfo_to_exp_index(self, vinfo: "CVarInfo") -> int:
lhostix = self.mk_lhost_index(["var", vinfo.vname], [vinfo.get_real_vid()])
offsetix = self.mk_offset_index(["n"], [])
lvalix = self.mk_lval_index([], [lhostix, offsetix])
return self.mk_exp_index(["lval"], [lvalix])
def s_term_to_exp_index(self, t: "STerm", subst: Dict[Any, Any] = {}, fid: int = -1) -> int:
"""Create exp index from interface s_term"""
if t.is_return_value():
if "return" in subst:
return self.index_exp(subst["return"])
else:
raise Exception("Error in index_s_term: no return found")
if t.is_num_constant():
c = cast("STNumConstant", t).get_constant()
ctags = ["int", str(c), "iint"]
tags = ["const"]
args = [self.mk_constant_index(ctags, [])]
return self.mk_exp_index(tags, args)
if t.is_arg_value():
par = cast("STArgValue", t).get_parameter()
if par.is_global():
gname = par.get_name()
if gname in subst:
return self.index_exp(subst[gname])
else:
raise Exception(
"Error in index_s_term: global variable " + gname + " not found"
)
raise Exception("cdict missing:index_s_term: " + t.tags[0])
def s_term_bool_expr_to_exp_index(
self,
op: str,
t1: "STerm",
t2: "STerm",
subst: Dict[Any, Any] = {},
) -> int:
"""Create exp index from interface s_term expression"""
typtags = ["tint", "ibool"]
typix = self.mk_typ_index(typtags, [])
tags = ["binop", op]
args = [
self.s_term_to_exp_index(t1, subst),
self.s_term_to_exp_index(t2, subst),
typix,
]
return self.mk_exp_index(tags, args)
def index_exp(self, e: CExpBase, subst: Dict[Any, Any] = {}, fid: int = -1) -> int: # TBF
if e.is_constant():
args = [self.index_constant(cast(CExpConst, e).get_constant())]
def f_cexpconst(index: int, key: object) -> CExpConst:
return CExpConst(self, index, e.tags, args)
return self.exp_table.add(IT.get_key(e.tags, args), f_cexpconst)
if e.is_sizeof():
args = [self.index_typ(cast(CExpSizeOf, e).get_type())]
def f_cexpsizeof(index: int, key: object) -> CExpSizeOf:
return CExpSizeOf(self, index, e.tags, args)
return self.exp_table.add(IT.get_key(e.tags, args), f_cexpsizeof)
if e.is_sizeofe():
args = [self.index_exp(cast(CExpSizeOfE, e).get_exp(), subst=subst, fid=fid)]
def f_cexpsizeofe(index: int, key: object) -> CExpSizeOfE:
return CExpSizeOfE(self, index, e.tags, args)
return self.exp_table.add(IT.get_key(e.tags, args), f)
if e.is_sizeofstr():
args = [self.index_string(cast(CExpSizeOfStr, e).get_string())]
def f_cexpsizeofstr(index: int, key: object) -> CExpSizeOfStr:
return CExpSizeOfStr(self, index, e.tags, args)
return self.exp_table.add(IT.get_key(e.tags, args), f_cexpsizeofstr)
if e.is_unop():
args = [
self.index_exp(cast(CExpUnOp, e).get_exp(), subst=subst, fid=fid),
self.index_typ(cast(CExpUnOp, e).get_type()),
]
def f_cexpunop(index: int, key: object) -> CExpUnOp:
return CExpUnOp(self, index, e.tags, args)
return self.exp_table.add(IT.get_key(e.tags, args), f_cexpunop)
if e.is_binop():
args = [
self.index_exp(cast(CExpBinOp, e).get_exp1(), subst=subst, fid=fid),
self.index_exp(cast(CExpBinOp, e).get_exp2(), subst=subst, fid=fid),
self.index_typ(cast(CExpBinOp, e).get_type()),
]
def f_cexpbinop(index: int, key: object) -> CExpBinOp:
return CExpBinOp(self, index, e.tags, args)
return self.exp_table.add(IT.get_key(e.tags, args), f_cexpbinop)
if e.is_caste():
args = [
self.index_typ(cast(CExpCastE, e).get_type()),
self.index_exp(cast(CExpCastE, e).get_exp(), subst=subst, fid=fid),
]
def f(index: int, key: object) -> CExpCastE:
return CExpCastE(self, index, e.tags, args)
return self.exp_table.add(IT.get_key(e.tags, args), f)
if e.is_addrof():
args = [self.index_lval(cast(CExpAddrOf, e).get_lval(), subst=subst, fid=fid)]
def f_cexpaddrof(index: int, key: object) -> CExpAddrOf:
return CExpAddrOf(self, index, e.tags, args)
return self.exp_table.add(IT.get_key(e.tags, args), f_cexpaddrof)
if e.is_startof():
args = [self.index_lval(cast(CExpStartOf, e).get_lval(), subst=subst, fid=fid)]
def f_cexpstartof(index: int, key: object) -> CExpStartOf:
return CExpStartOf(self, index, e.tags, args)
return self.exp_table.add(IT.get_key(e.tags, args), f_cexpstartof)
if e.is_lval():
args = [self.index_lval(cast(CExpLval, e).get_lval(), subst=subst, fid=fid)]
def f_cexplval(index: int, key: object) -> CExpLval:
return CExpLval(self, index, e.tags, args)
return self.exp_table.add(IT.get_key(e.tags, args), f_cexplval)
raise Exception("cdict:no case yet for exp " + str(e))
def index_funarg(self, funarg: CFunArg) -> int:
tags: List[str] = [funarg.get_name()]
args: List[int] = [self.index_typ(funarg.get_type())]
def f(index: int, key: Tuple[str, str]) -> CFunArg:
return CFunArg(self, index, tags, args)
return self.funarg_table.add(IT.get_key(tags, args), f)
def index_funargs_opt(self, opt_funargs: Optional[CFunArgs]) -> Optional[int]:
if opt_funargs is None:
return None
tags: List[str] = []
args = [self.index_funarg(f) for f in opt_funargs.get_args()]
def f(index: int, key: Tuple[str, str]) -> CFunArgs:
return CFunArgs(self, index, tags, args)
return self.funargs_table.add(IT.get_key(tags, args), f)
def index_lhost(self, h: CLHostBase, subst: Dict[Any, Any] = {}, fid: int = -1) -> int:
if h.is_var():
args = [self.index_varinfo_vid(cast(CLHostVar, h).get_vid(), fid)]
def f_clhostvar(index: int, key: object) -> CLHostVar:
return CLHostVar(self, index, h.tags, args)
return self.lhost_table.add(IT.get_key(h.tags, args), f_clhostvar)
if h.is_mem():
args = [self.index_exp(cast(CLHostMem, h).get_exp(), subst=subst, fid=fid)]
def f(index: int, key: object) -> CLHostMem:
return CLHostMem(self, index, h.tags, args)
return self.lhost_table.add(IT.get_key(h.tags, args), f)
raise Exception("Unknown type of lhost: \"" + str(h) + "\"")
def index_lval(self, lval: CLval, subst: Dict[Any, Any] = {}, fid: int = -1) -> int:
args = [
self.index_lhost(lval.get_lhost(), subst=subst, fid=fid),
self.index_offset(lval.get_offset()),
]
def f(index: int, key: object) -> CLval:
return CLval(self, index, [], args)
return self.lval_table.add(IT.get_key([], args), f)
def mk_offset_index(self, tags: List[str], args: List[int]) -> int:
def f(index: int, key: Tuple[str, str]) -> COffsetBase:
return CD.construct_c_dictionary_record(self, index, tags, args, COffsetBase)
return self.offset_table.add(IT.get_key(tags, args), f)
def index_offset(self, o: COffsetBase, fid: int = -1) -> int:
if not o.has_offset():
def f_no_offset(index: int, key: Tuple[str, str]) -> CNoOffset:
return CNoOffset(self, index, o.tags, o.args)
return self.offset_table.add(IT.get_key(o.tags, o.args), f_no_offset)
if o.is_field():
ckey = self.convert_ckey(cast(CFieldOffset, o).get_ckey(), cast(Any, o.cd).cfile.index)
args = [ckey, self.index_offset(cast(CFieldOffset, o).get_offset(), fid)]
def f_field(index: int, key: Tuple[str, str]) -> CFieldOffset:
return CFieldOffset(self, index, o.tags, args)
return self.offset_table.add(IT.get_key(o.tags, args), f_field)
if o.is_index():
args = [
self.index_exp(cast(CIndexOffset, o).get_index_exp()),
self.index_offset(cast(CIndexOffset, o).get_offset(), fid),
]
def f_index(index: int, key: Tuple[str, str]) -> CIndexOffset:
return CIndexOffset(self, index, o.tags, args)
return self.offset_table.add(IT.get_key(o.tags, args), f_index)
raise UF.CHError("cdict: no case yet for " + str(o))
def mk_typ(self, tags: List[str], args: List[int]) -> int:
def f(index: int, key: Tuple[str, str]) -> CTypBase:
return CD.construct_c_dictionary_record(self, index, tags, args, CTypBase)
return self.typ_table.add(IT.get_key(tags, args), f)
def index_typ(self, t: CTypBase) -> int: # TBF
# omit attributes argument if there are no attributes
def ia(attrs: CAttributes) -> List[int]:
return [] if len(attrs.get_attributes()) == 0 else [self.index_attributes(attrs)]
if t.is_void():
tags = ["tvoid"]
args = ia(t.get_attributes())
def f_void(index: int, key: Tuple[str, str]) -> CTypVoid:
return CTypVoid(self, index, tags, args)
return self.typ_table.add(IT.get_key(tags, args), f_void)
elif t.is_int():
tags = ["tint", cast(CTypInt, t).get_kind()]
args = ia(t.get_attributes())
def f_int(index: int, key: Tuple[str, str]) -> CTypInt:
return CTypInt(self, index, tags, args)
return self.typ_table.add(IT.get_key(tags, args), f_int)
elif t.is_float():
tags = ["tfloat", cast(CTypFloat, t).get_kind()]
args = ia(t.get_attributes())
def f_float(index: int, key: Tuple[str, str]) -> CTypFloat:
return CTypFloat(self, index, tags, args)
return self.typ_table.add(IT.get_key(tags, args), f_float)
elif t.is_pointer():
tags = ["tptr"]
args = [self.index_typ(cast(CTypPtr, t).get_pointedto_type())] + ia(t.get_attributes())
def f_ptr(index: int, key: Tuple[str, str]) -> CTypPtr:
return CTypPtr(self, index, tags, args)
return self.typ_table.add(IT.get_key(tags, args), f_ptr)
elif t.is_named_type():
tags = ["tnamed", cast(CTypNamed, t).get_name()]
args = ia(t.get_attributes())
def f_named(index: int, key: Tuple[str, str]) -> CTypNamed:
return CTypNamed(self, index, tags, args)
return self.typ_table.add(IT.get_key(tags, args), f_named)
elif t.is_comp():
tags = ["tcomp"]
ckey = self.index_compinfo_key(
cast(CTypComp, t).get_struct(), cast(Any, t.cd).cfile.index
)
args = [ckey] + ia(t.get_attributes())
def f_comp(index: int, key: Tuple[str, str]) -> CTypComp:
return CTypComp(self, index, tags, args)
return self.typ_table.add(IT.get_key(tags, args), f_comp)
elif t.is_enum():
tags = t.tags
args = ia(t.get_attributes())
def f_enum(index: int, key: Tuple[str, str]) -> CTypEnum:
return CTypEnum(self, index, tags, args)
return self.typ_table.add(IT.get_key(tags, args), f_enum)
elif t.is_array():
tags = ["tarray"]
arraysize = (
self.index_exp(cast(CTypArray, t).get_array_size_expr())
if cast(CTypArray, t).has_array_size_expr()
else (-1)
)
args = [self.index_typ(cast(CTypArray, t).get_array_basetype()), arraysize] + ia(
t.get_attributes()
)
def f_array(index: int, key: Tuple[str, str]) -> CTypArray:
return CTypArray(self, index, tags, args)
return self.typ_table.add(IT.get_key(tags, args), f_array)
elif t.is_function():
index_funargs_opt = self.index_funargs_opt(cast(CTypFun, t).get_args())
ixfunargs = -1 if index_funargs_opt is None else index_funargs_opt
tags = ["tfun"]
args = [
self.index_typ(cast(CTypFun, t).get_return_type()),
ixfunargs,
(1 if cast(CTypFun, t).is_vararg() else 0),
] + ia(t.get_attributes())
def f_fun(index: int, key: Tuple[str, str]) -> CTypFun:
return CTypFun(self, index, tags, args)
return self.typ_table.add(IT.get_key(tags, args), f_fun)
elif t.is_builtin_vaargs():
tags = ["tbuiltinvaargs"]
args = ia(t.get_attributes())
def f_builtin_varargs(index: int, key: Tuple[str, str]) -> CTypBuiltinVaargs:
return CTypBuiltinVaargs(self, index, tags, args)
return self.typ_table.add(IT.get_key(tags, args), f_builtin_varargs)
else:
print("cdict: no case yet for " + str(t))
exit(1)
def index_typsig(self, t: object) -> None:
return None # TBD
def index_typsiglist(self, t: object) -> None:
return None # TBD
def index_string(self, s: str) -> int:
return self.string_table.add(s)
def write_xml(self, node: ET.Element) -> None:
def f(n: ET.Element, r: Any) -> None:
r.write_xml(n)
for (t, _) in self.tables:
tnode = ET.Element(t.name)
cast(IndexedTable[IndexedTableValue], t).write_xml(tnode, f)
node.append(tnode)
for (t, _) in self.string_tables:
tnode = ET.Element(t.name)
cast(StringIndexedTable, t).write_xml(tnode)
node.append(tnode)
def __str__(self) -> str:
lines = []
for (t, _) in self.tables:
if t.size() > 0:
lines.append(str(t))
return "\n".join(lines)
def _read_xml_attrparam_table(self, txnode: ET.Element) -> None:
def get_value(n: ET.Element) -> CAttrBase:
return CD.construct_c_dictionary_record(*((self,) + IT.get_rep(n)), CAttrBase)
self.attrparam_table.read_xml(txnode, "n", get_value)
def _read_xml_attribute_table(self, txnode: ET.Element) -> None:
def get_value(node: ET.Element) -> CAttribute:
rep = IT.get_rep(node)
args = (self,) + rep
return CAttribute(*args)
self.attribute_table.read_xml(txnode, "n", get_value)
def _read_xml_attributes_table(self, txnode: ET.Element) -> None:
def get_value(node: ET.Element) -> CAttributes:
rep = IT.get_rep(node)
args = (self,) + rep
return CAttributes(*args)
self.attributes_table.read_xml(txnode, "n", get_value)
def _read_xml_constant_table(self, txnode: ET.Element) -> None:
def get_value(n: ET.Element) -> CConstBase:
return CD.construct_c_dictionary_record(*((self,) + IT.get_rep(n)), CConstBase)
self.constant_table.read_xml(txnode, "n", get_value)
def _read_xml_exp_table(self, txnode: ET.Element) -> None:
def get_value(n: ET.Element) -> CExpBase:
return CD.construct_c_dictionary_record(*((self,) + IT.get_rep(n)), CExpBase)
self.exp_table.read_xml(txnode, "n", get_value)
def _read_xml_funarg_table(self, txnode: ET.Element) -> None:
def get_value(node: ET.Element) -> CFunArg:
rep = IT.get_rep(node)
args = (self,) + rep
return CFunArg(*args)
self.funarg_table.read_xml(txnode, "n", get_value)
def _read_xml_funargs_table(self, txnode: ET.Element) -> None:
def get_value(node: ET.Element) -> CFunArgs:
rep = IT.get_rep(node)
args = (self,) + rep
return CFunArgs(*args)
self.funargs_table.read_xml(txnode, "n", get_value)
def _read_xml_lhost_table(self, txnode: ET.Element) -> None:
def get_value(n: ET.Element) -> CLHostBase:
return CD.construct_c_dictionary_record(*((self,) + IT.get_rep(n)), CLHostBase)
self.lhost_table.read_xml(txnode, "n", get_value)
def _read_xml_lval_table(self, txnode: ET.Element) -> None:
def get_value(node: ET.Element) -> CLval:
rep = IT.get_rep(node)
args = (self,) + rep
return CLval(*args)
self.lval_table.read_xml(txnode, "n", get_value)
def _read_xml_offset_table(self, txnode: ET.Element) -> None:
def get_value(n: ET.Element) -> COffsetBase:
return CD.construct_c_dictionary_record(*((self,) + IT.get_rep(n)), COffsetBase)
self.offset_table.read_xml(txnode, "n", get_value)
def _read_xml_typ_table(self, txnode: ET.Element) -> None:
def get_value(n: ET.Element) -> CTypBase:
return CD.construct_c_dictionary_record(*((self,) + IT.get_rep(n)), CTypBase)
self.typ_table.read_xml(txnode, "n", get_value)
def _read_xml_typsig_table(self, txnode: ET.Element) -> None:
def get_value(n: ET.Element) -> CTypsigTSBase:
return CD.construct_c_dictionary_record(*((self,) + IT.get_rep(n)), CTypsigTSBase)
self.typsig_table.read_xml(txnode, "n", get_value)
def _read_xml_typsiglist_table(self, txnode: ET.Element) -> None:
def get_value(node: ET.Element) -> CTypsigList:
rep = IT.get_rep(node)
args = (self,) + rep
return CTypsigList(*args)
self.typsiglist_table.read_xml(txnode, "n", get_value)
def _read_xml_string_table(self, txnode: ET.Element) -> None:
self.string_table.read_xml(txnode)
| 40.685535 | 100 | 0.601608 |
fa7f8bf6b5630522b83f99f2a37422c8bf7af9d5 | 765 | py | Python | src/ch2_http_services/githib_api_from_python.py | OblongCheese/ConsumingServicesWithPython | a8057720f110c13a5f1e1256e4cc5d5f14386d15 | [
"MIT"
] | 89 | 2017-01-27T22:38:58.000Z | 2021-12-20T14:20:12.000Z | src/ch2_http_services/github_api_from_python.py | imbi7py/consuming_services_python_demos | de2359cc980b283df40ed79364df2651c493173e | [
"MIT"
] | 15 | 2017-02-22T17:51:10.000Z | 2021-06-25T19:02:52.000Z | src/ch2_http_services/github_api_from_python.py | imbi7py/consuming_services_python_demos | de2359cc980b283df40ed79364df2651c493173e | [
"MIT"
] | 47 | 2017-02-16T15:21:05.000Z | 2021-12-20T14:20:15.000Z | import requests
def main():
# mikeckennedy
# consuming_services_python_demos
user, repo = get_repo_info()
url = 'https://api.github.com/repos/{}/{}'.format(
user, repo
)
resp = requests.get(url)
if resp.status_code != 200:
print("Error accessing repo: {}".format(resp.status_code))
return
repo_data = resp.json()
clone = repo_data.get('clone_url', 'ERROR: NO DATA')
print("To clone {}'s repo named {}".format(user, repo))
print("The command is: ")
print()
print("git clone {}".format(clone))
def get_repo_info():
user = input("What is the username? ").strip()
repo = input("What is the repo name? ").strip()
return user, repo
if __name__ == '__main__':
main()
| 20.131579 | 66 | 0.603922 |
67659bddaf9fe77098282411ba061db90e1a5da7 | 68 | py | Python | examples/02-on-board-components/01-led-blink/benchlib-sd/bench.py | fragmuffin/upytester | 09b213ddcadff27ab72715024a3500a3569640ef | [
"MIT"
] | 4 | 2019-02-04T04:47:20.000Z | 2021-12-08T02:37:08.000Z | examples/02-on-board-components/01-led-blink/benchlib-sd/bench.py | fragmuffin/upytester | 09b213ddcadff27ab72715024a3500a3569640ef | [
"MIT"
] | 2 | 2019-04-22T22:09:24.000Z | 2020-09-08T04:46:38.000Z | examples/02-on-board-components/01-led-blink/benchlib-sd/bench.py | fragmuffin/upytester | 09b213ddcadff27ab72715024a3500a3569640ef | [
"MIT"
] | null | null | null | # import all libraries that define an @instruction
import customled
| 22.666667 | 50 | 0.823529 |
b74787c2f6e6157730bd582bb734ad1a12fec323 | 807 | py | Python | utilities/utilities.py | nissant/dgi-portfolio-tracker | 7c50bf622ba580a474e6183f886bca43b061b2ba | [
"MIT"
] | null | null | null | utilities/utilities.py | nissant/dgi-portfolio-tracker | 7c50bf622ba580a474e6183f886bca43b061b2ba | [
"MIT"
] | null | null | null | utilities/utilities.py | nissant/dgi-portfolio-tracker | 7c50bf622ba580a474e6183f886bca43b061b2ba | [
"MIT"
] | null | null | null | from datetime import datetime
def convert_sqlalchemy_list_to_dict_list(obj_list):
dict_list = []
for row in obj_list:
row_as_dict = {column: str(getattr(row, column)) for column in row.__table__.c.keys()}
dict_list.append(row_as_dict)
return dict_list
def set_records_update_time(db_records: list, created=False):
time_stamp = datetime.now()
for record in db_records:
if created:
record.created_at = time_stamp
record.last_updated = time_stamp
def insert_dictionary(conn, table_name, object_dict):
qmarks = ', '.join('?' * len(object_dict))
keys = object_dict.keys()
labels = ', '.join(keys)
qry = "Insert Into %s (%s) Values (%s)" % (table_name, labels, qmarks)
conn.execute(qry, [object_dict[key] for key in keys])
| 31.038462 | 94 | 0.67658 |
93dc4c9c3b8c1ade106e731315c8d9dbe64a1e74 | 216 | py | Python | api/api/db.py | rikonor/vanguard-api | 5462b2327cacad68bedb945dc323d534ebbdfeee | [
"MIT"
] | 61 | 2016-04-19T00:14:37.000Z | 2022-03-14T03:49:05.000Z | api/api/db.py | swordfish6975/vanguard-api | 46452e1ffbe175fa82c41f87d9a299be95a2008b | [
"MIT"
] | 4 | 2017-07-10T01:30:33.000Z | 2018-08-07T05:07:54.000Z | api/api/db.py | swordfish6975/vanguard-api | 46452e1ffbe175fa82c41f87d9a299be95a2008b | [
"MIT"
] | 17 | 2017-07-10T23:26:27.000Z | 2022-02-25T01:46:21.000Z | import os
from pymongo import MongoClient
MONGO_URL = os.environ.get("MONGO_URL")
if MONGO_URL is None:
raise EnvironmentError("Please provide a MONGO_URL")
CLIENT = MongoClient(MONGO_URL)
DB = CLIENT.vanguard
| 21.6 | 56 | 0.782407 |
fa70a6a489a1d0edcce0e8780913795cddad9ee8 | 4,490 | py | Python | mindspore/python/mindspore/ops/_op_impl/_custom_op/bessel_k0e.py | httpsgithu/mindspore | c29d6bb764e233b427319cb89ba79e420f1e2c64 | [
"Apache-2.0"
] | 1 | 2022-02-23T09:13:43.000Z | 2022-02-23T09:13:43.000Z | mindspore/python/mindspore/ops/_op_impl/_custom_op/bessel_k0e.py | 949144093/mindspore | c29d6bb764e233b427319cb89ba79e420f1e2c64 | [
"Apache-2.0"
] | null | null | null | mindspore/python/mindspore/ops/_op_impl/_custom_op/bessel_k0e.py | 949144093/mindspore | c29d6bb764e233b427319cb89ba79e420f1e2c64 | [
"Apache-2.0"
] | null | null | null | # Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""BesseK0e op"""
from tbe import dsl
from te import tvm
from te.platform.fusion_manager import fusion_manager
from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType
from .bessel_k0 import A, AA, B, BB, MAXNUM, TWO
bessel_k0e_op_info = TBERegOp("BesselK0e") \
.fusion_type("ELEMWISE") \
.async_flag(False) \
.binfile_name("bessel_k0e.so") \
.compute_cost(10) \
.kernel_name("bessel_k0e") \
.partial_flag(True) \
.op_pattern("formatAgnostic") \
.input(0, "x", False, "required", "all") \
.output(0, "y", False, "required", "all") \
.dtype_format(DataType.F16_None, DataType.F16_None) \
.dtype_format(DataType.F32_None, DataType.F32_None) \
.get_op_info()
@op_info_register(bessel_k0e_op_info)
def _bessel_k0e_tbe():
"""BesselK0e TBE register"""
return
def chebevl(x, n, coef, shape, dtype):
"""chebevl"""
broad_coef = dsl.broadcast(coef[0], shape, dtype)
broad_zero = dsl.broadcast(0, shape, dtype)
none_signal = None
for i in range(1, n):
none_signal = broad_zero
broad_zero = broad_coef
coef_i = dsl.broadcast(coef[i], shape, dtype)
broad_coef = dsl.vsub(dsl.vadd(dsl.vmul(x, broad_zero), coef_i), none_signal)
return dsl.vmuls(dsl.vsub(broad_coef, none_signal), 0.5)
def bessel_i0_compute(input_x):
"""bessel_i0_compute"""
dtype = input_x.dtype
shape = input_x.shape
k0e_has_improve_precision = False
if dtype != "float32":
input_x = dsl.cast_to(input_x, "float32")
dtype = "float32"
k0e_has_improve_precision = True
y = dsl.vabs(input_x)
y_le_eight_in = dsl.vmuls(y, 0.5)
y_le_eight_in = dsl.vadds(y_le_eight_in, -2.0)
y_le_eight = chebevl(y_le_eight_in, 30, AA, shape, dtype)
y_gt_eight_in = dsl.vadds(dsl.vmuls(dsl.vrec(y), 32.0), -2.0)
y_gt_eight = chebevl(y_gt_eight_in, 25, BB, shape, dtype)
y_gt_eight = dsl.vmul(y_gt_eight, dsl.vrsqrt(y))
res = dsl.vcmpsel(y, 8.0, 'le', y_le_eight, y_gt_eight)
res = dsl.vmul(res, dsl.vexp(y))
if k0e_has_improve_precision:
res = dsl.cast_to(res, "float16")
return res
@fusion_manager.register("bessel_k0e")
def bessel_k0e_compute(input_x, output_y, kernel_name="bessel_k0e"):
"""bessel_k0e_compute"""
shape = input_x.shape
dtype = input_x.dtype
has_improve_precision = False
if dtype != "float32":
input_x = dsl.cast_to(input_x, "float32")
dtype = "float32"
has_improve_precision = True
x_le_two = chebevl(dsl.vadds(dsl.vmul(input_x, input_x), -2.0), 10, A, shape, dtype)
x_le_two = dsl.vadd(dsl.vmul(bessel_i0_compute(input_x), dsl.vmuls(dsl.vlog(dsl.vmuls(input_x, 0.5)), -1.0)),
x_le_two)
x_le_two = dsl.vmul(dsl.vexp(input_x), x_le_two)
x_le_two = dsl.vcmpsel(input_x, 0.0, 'le', MAXNUM, x_le_two)
x_gt_two = dsl.vmul(dsl.vmul(dsl.vexp(dsl.vmuls(input_x, -1.0)), chebevl(dsl.vadds(dsl.vmuls(dsl.vrec(input_x),
8.0), -2.0), 25, B,
shape, dtype)), (dsl.vrsqrt(input_x)))
res = dsl.vcmpsel(input_x, TWO, 'le', x_le_two, x_gt_two)
if has_improve_precision:
res = dsl.cast_to(res, "float16")
return res
def bessel_k0e(x, output, kernel_name="bessel_k0e"):
"""bessel_k0e"""
data_x = tvm.placeholder(x.get("shape"), dtype=x.get("dtype"), name="data_x")
res = bessel_k0e_compute(data_x, output, kernel_name)
# auto schedule
with tvm.target.cce():
schedule = dsl.auto_schedule(res)
# operator build
config = {"name": kernel_name,
"tensor_list": [data_x, res]}
dsl.build(schedule, config)
| 34.806202 | 116 | 0.638307 |
871a2a80ed1942db0d167c2f7cb9c5dff9bcd76b | 568 | py | Python | test/tensorflow/iris-classifier/predictor.py | ourobouros/cortex | 1b3aaf909816b93f6a6e3edd0da8c10891e05be9 | [
"Apache-2.0"
] | 1 | 2022-02-23T08:45:19.000Z | 2022-02-23T08:45:19.000Z | test/tensorflow/iris-classifier/predictor.py | ourobouros/cortex | 1b3aaf909816b93f6a6e3edd0da8c10891e05be9 | [
"Apache-2.0"
] | null | null | null | test/tensorflow/iris-classifier/predictor.py | ourobouros/cortex | 1b3aaf909816b93f6a6e3edd0da8c10891e05be9 | [
"Apache-2.0"
] | null | null | null | # WARNING: you are on the master branch; please refer to examples on the branch corresponding to your `cortex version` (e.g. for version 0.24.*, run `git checkout -b 0.24` or switch to the `0.24` branch on GitHub)
labels = ["setosa", "versicolor", "virginica"]
class TensorFlowPredictor:
def __init__(self, tensorflow_client, config):
self.client = tensorflow_client
def predict(self, payload):
prediction = self.client.predict(payload)
predicted_class_id = int(prediction["class_ids"][0])
return labels[predicted_class_id]
| 40.571429 | 213 | 0.709507 |
9565e8fe9979221baeefad3d18dfe4174aeaf666 | 9,462 | py | Python | tests/test_parse/test_tree.py | xingjianleng/cogent3 | a85d08a948f6903e4e04eea8292f588cc0b4907e | [
"BSD-3-Clause"
] | null | null | null | tests/test_parse/test_tree.py | xingjianleng/cogent3 | a85d08a948f6903e4e04eea8292f588cc0b4907e | [
"BSD-3-Clause"
] | null | null | null | tests/test_parse/test_tree.py | xingjianleng/cogent3 | a85d08a948f6903e4e04eea8292f588cc0b4907e | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
"""Unit tests for tree parsers.
"""
from unittest import TestCase, main
from cogent3.core.tree import PhyloNode
from cogent3.parse.tree import DndParser, DndTokenizer, RecordError
# from cogent3.parse.newick import parse_string, TreeParseError as RecordError
# def DndParser(data, NodeClass=PhyloNode, unescape_name=True):
# if not unescape_name:
# raise NotImplementedError
# def constructor(children, name, attribs):
# return NodeClass(children = list(children or []), name=name, params=attribs)
# return parse_string(data, constructor)
__author__ = "Rob Knight"
__copyright__ = "Copyright 2007-2022, The Cogent Project"
__credits__ = ["Rob Knight", "Peter Maxwell", "Daniel McDonald"]
__license__ = "BSD-3"
__version__ = "2022.4.20a1"
__maintainer__ = "Gavin Huttley"
__email__ = "Gavin.Huttley@anu.edu.au"
__status__ = "Production"
sample = """
(
(
xyz:0.28124,
(
def:0.24498,
mno:0.03627)
:0.17710)
:0.04870,
abc:0.05925,
(
ghi:0.06914,
jkl:0.13776)
:0.09853);
"""
node_data_sample = """
(
(
xyz:0.28124,
(
def:0.24498,
mno:0.03627)
'A':0.17710)
B:0.04870,
abc:0.05925,
(
ghi:0.06914,
jkl:0.13776)
C:0.09853);
"""
minimal = "();"
no_names = "((,),(,));"
missing_tip_name = "((a,b),(c,));"
empty = "();"
single = "(abc:3);"
double = "(abc:3, def:4);"
onenest = "(abc:3, (def:4, ghi:5):6 );"
nodedata = "(abc:3, (def:4, ghi:5)jkl:6 );"
class DndTokenizerTests(TestCase):
"""Tests of the DndTokenizer factory function."""
def test_gdata(self):
"""DndTokenizer should work as expected on real data"""
exp = [
"(",
"(",
"xyz",
":",
"0.28124",
",",
"(",
"def",
":",
"0.24498",
",",
"mno",
":",
"0.03627",
")",
":",
"0.17710",
")",
":",
"0.04870",
",",
"abc",
":",
"0.05925",
",",
"(",
"ghi",
":",
"0.06914",
",",
"jkl",
":",
"0.13776",
")",
":",
"0.09853",
")",
";",
]
# split it up for debugging on an item-by-item basis
obs = list(DndTokenizer(sample))
self.assertEqual(len(obs), len(exp))
for i, j in zip(obs, exp):
self.assertEqual(i, j)
# try it all in one go
self.assertEqual(list(DndTokenizer(sample)), exp)
def test_nonames(self):
"""DndTokenizer should work as expected on trees with no names"""
exp = ["(", "(", ",", ")", ",", "(", ",", ")", ")", ";"]
obs = list(DndTokenizer(no_names))
self.assertEqual(obs, exp)
def test_missing_tip_name(self):
"""DndTokenizer should work as expected on trees with a missing name"""
exp = ["(", "(", "a", ",", "b", ")", ",", "(", "c", ",", ")", ")", ";"]
obs = list(DndTokenizer(missing_tip_name))
self.assertEqual(obs, exp)
def test_minimal(self):
"""DndTokenizer should work as expected a minimal tree without names"""
exp = ["(", ")", ";"]
obs = list(DndTokenizer(minimal))
self.assertEqual(obs, exp)
class DndParserTests(TestCase):
"""Tests of the DndParser factory function."""
def test_nonames(self):
"""DndParser should produce the correct tree when there are no names"""
obs = DndParser(no_names)
exp = PhyloNode()
exp.append(PhyloNode())
exp.append(PhyloNode())
exp.children[0].append(PhyloNode())
exp.children[0].append(PhyloNode())
exp.children[1].append(PhyloNode())
exp.children[1].append(PhyloNode())
self.assertEqual(str(obs), str(exp))
def test_minimal(self):
"""DndParser should produce the correct minimal tree"""
obs = DndParser(minimal)
exp = PhyloNode()
exp.append(PhyloNode())
self.assertEqual(str(obs), str(exp))
def test_missing_tip_name(self):
"""DndParser should produce the correct tree when missing a name"""
obs = DndParser(missing_tip_name)
exp = PhyloNode()
exp.append(PhyloNode())
exp.append(PhyloNode())
exp.children[0].append(PhyloNode(name="a"))
exp.children[0].append(PhyloNode(name="b"))
exp.children[1].append(PhyloNode(name="c"))
exp.children[1].append(PhyloNode())
self.assertEqual(str(obs), str(exp))
def test_gsingle(self):
"""DndParser should produce a single-child PhyloNode on minimal data"""
t = DndParser(single)
self.assertEqual(len(t), 1)
child = t[0]
self.assertEqual(child.name, "abc")
self.assertEqual(child.length, 3)
self.assertEqual(str(t), "(abc:3.0);")
def test_gdouble(self):
"""DndParser should produce a double-child PhyloNode from data"""
t = DndParser(double)
self.assertEqual(len(t), 2)
self.assertEqual(str(t), "(abc:3.0,def:4.0);")
def test_gonenest(self):
"""DndParser should work correctly with nested data"""
t = DndParser(onenest)
self.assertEqual(len(t), 2)
self.assertEqual(len(t[0]), 0) # first child is terminal
self.assertEqual(len(t[1]), 2) # second child has two children
self.assertEqual(str(t), "(abc:3.0,(def:4.0,ghi:5.0):6.0);")
def test_gnodedata(self):
"""DndParser should assign name to internal nodes correctly"""
t = DndParser(nodedata)
self.assertEqual(len(t), 2)
self.assertEqual(len(t[0]), 0) # first child is terminal
self.assertEqual(len(t[1]), 2) # second child has two children
self.assertEqual(str(t), "(abc:3.0,(def:4.0,ghi:5.0)jkl:6.0);")
info_dict = {}
for node in t.traverse():
info_dict[node.name] = node.length
self.assertEqual(info_dict["abc"], 3.0)
self.assertEqual(info_dict["def"], 4.0)
self.assertEqual(info_dict["ghi"], 5.0)
self.assertEqual(info_dict["jkl"], 6.0)
def test_data(self):
"""DndParser should work as expected on real data"""
t = DndParser(sample)
self.assertEqual(
str(t),
"((xyz:0.28124,(def:0.24498,mno:0.03627):0.1771):0.0487,abc:0.05925,(ghi:0.06914,jkl:0.13776):0.09853);",
)
tdata = DndParser(node_data_sample, unescape_name=True)
self.assertEqual(
str(tdata),
"((xyz:0.28124,(def:0.24498,mno:0.03627)A:0.1771)B:0.0487,abc:0.05925,(ghi:0.06914,jkl:0.13776)C:0.09853);",
)
def test_gbad(self):
"""DndParser should fail if parens unbalanced"""
left = "((abc:3)"
right = "(abc:3))"
self.assertRaises(RecordError, DndParser, left)
self.assertRaises(RecordError, DndParser, right)
def test_DndParser(self):
"""DndParser tests"""
t_str = "(A_a,(B:1.0,C),'D_e':0.5)E;"
tree_unesc = DndParser(t_str, PhyloNode, unescape_name=True)
tree_esc = DndParser(t_str, PhyloNode, unescape_name=False)
self.assertEqual(tree_unesc.name, "E")
self.assertEqual(tree_unesc.children[0].name, "A a")
self.assertEqual(tree_unesc.children[1].children[0].name, "B")
self.assertEqual(tree_unesc.children[1].children[0].length, 1.0)
self.assertEqual(tree_unesc.children[1].children[1].name, "C")
self.assertEqual(tree_unesc.children[2].name, "D_e")
self.assertEqual(tree_unesc.children[2].length, 0.5)
self.assertEqual(tree_esc.name, "E")
self.assertEqual(tree_esc.children[0].name, "A_a")
self.assertEqual(tree_esc.children[1].children[0].name, "B")
self.assertEqual(tree_esc.children[1].children[0].length, 1.0)
self.assertEqual(tree_esc.children[1].children[1].name, "C")
self.assertEqual(tree_esc.children[2].name, "'D_e'")
self.assertEqual(tree_esc.children[2].length, 0.5)
reload_test = tree_esc.get_newick(with_distances=True, escape_name=False)
obs = DndParser(reload_test, unescape_name=False)
self.assertEqual(
obs.get_newick(with_distances=True),
tree_esc.get_newick(with_distances=True),
)
reload_test = tree_unesc.get_newick(with_distances=True, escape_name=False)
obs = DndParser(reload_test, unescape_name=False)
self.assertEqual(
obs.get_newick(with_distances=True),
tree_unesc.get_newick(with_distances=True),
)
class PhyloNodeTests(TestCase):
"""Check that PhyloNode works the way I think"""
def test_gops(self):
"""Basic PhyloNode operations should work as expected"""
p = PhyloNode()
self.assertEqual(str(p), ";")
p.name = "abc"
self.assertEqual(str(p), "abc;")
p.length = 3
self.assertEqual(str(p), "abc:3;") # don't suppress branch from root
q = PhyloNode()
p.append(q)
self.assertEqual(str(p), "()abc:3;")
r = PhyloNode()
q.append(r)
self.assertEqual(str(p), "(())abc:3;")
r.name = "xyz"
self.assertEqual(str(p), "((xyz))abc:3;")
q.length = 2
self.assertEqual(str(p), "((xyz):2)abc:3;")
if __name__ == "__main__":
main()
| 31.858586 | 120 | 0.57398 |
6f619e914d2e4b6e8554ba2d9cbac7fd58779dd4 | 11,570 | py | Python | nikola/plugins/task/archive.py | ivanyschen/nikola | c75adc76f008af7ac37b009f71764f5bf0c48e87 | [
"MIT"
] | 1,901 | 2015-01-02T02:49:51.000Z | 2022-03-30T23:31:35.000Z | nikola/plugins/task/archive.py | ivanyschen/nikola | c75adc76f008af7ac37b009f71764f5bf0c48e87 | [
"MIT"
] | 1,755 | 2015-01-01T08:17:16.000Z | 2022-03-24T18:02:22.000Z | nikola/plugins/task/archive.py | ivanyschen/nikola | c75adc76f008af7ac37b009f71764f5bf0c48e87 | [
"MIT"
] | 421 | 2015-01-02T18:06:37.000Z | 2022-03-28T23:18:54.000Z | # -*- coding: utf-8 -*-
# Copyright © 2012-2021 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the
# Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice
# shall be included in all copies or substantial portions of
# the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Classify the posts in archives."""
import datetime
from collections import defaultdict
import natsort
import nikola.utils
from nikola.plugin_categories import Taxonomy
class Archive(Taxonomy):
"""Classify the post archives."""
name = "classify_archive"
classification_name = "archive"
overview_page_variable_name = "archive"
more_than_one_classifications_per_post = False
has_hierarchy = True
include_posts_from_subhierarchies = True
include_posts_into_hierarchy_root = True
subcategories_list_template = "list.tmpl"
template_for_classification_overview = None
always_disable_rss = True
always_disable_atom = True
apply_to_posts = True
apply_to_pages = False
minimum_post_count_per_classification_in_overview = 1
omit_empty_classifications = False
add_other_languages_variable = True
path_handler_docstrings = {
'archive_index': False,
'archive': """Link to archive path, name is the year.
Example:
link://archive/2013 => /archives/2013/index.html""",
'archive_atom': False,
'archive_rss': False,
}
def set_site(self, site):
"""Set Nikola site."""
# Sanity checks
if (site.config['CREATE_MONTHLY_ARCHIVE'] and site.config['CREATE_SINGLE_ARCHIVE']) and not site.config['CREATE_FULL_ARCHIVES']:
raise Exception('Cannot create monthly and single archives at the same time.')
# Finish setup
self.show_list_as_subcategories_list = not site.config['CREATE_FULL_ARCHIVES']
self.show_list_as_index = site.config['ARCHIVES_ARE_INDEXES']
self.template_for_single_list = "archiveindex.tmpl" if site.config['ARCHIVES_ARE_INDEXES'] else "archive.tmpl"
# Determine maximum hierarchy height
if site.config['CREATE_DAILY_ARCHIVE'] or site.config['CREATE_FULL_ARCHIVES']:
self.max_levels = 3
elif site.config['CREATE_MONTHLY_ARCHIVE']:
self.max_levels = 2
elif site.config['CREATE_SINGLE_ARCHIVE']:
self.max_levels = 0
else:
self.max_levels = 1
return super().set_site(site)
def get_implicit_classifications(self, lang):
"""Return a list of classification strings which should always appear in posts_per_classification."""
return ['']
def classify(self, post, lang):
"""Classify the given post for the given language."""
levels = [str(post.date.year).zfill(4), str(post.date.month).zfill(2), str(post.date.day).zfill(2)]
return ['/'.join(levels[:self.max_levels])]
def sort_classifications(self, classifications, lang, level=None):
"""Sort the given list of classification strings."""
if level in (0, 1):
# Years or months: sort descending
classifications.sort()
classifications.reverse()
def get_classification_friendly_name(self, classification, lang, only_last_component=False):
"""Extract a friendly name from the classification."""
classification = self.extract_hierarchy(classification)
if len(classification) == 0:
return self.site.MESSAGES[lang]['Archive']
elif len(classification) == 1:
return classification[0]
elif len(classification) == 2:
if only_last_component:
date_str = "{month}"
else:
date_str = "{month_year}"
return nikola.utils.LocaleBorg().format_date_in_string(
date_str,
datetime.date(int(classification[0]), int(classification[1]), 1),
lang)
else:
if only_last_component:
return str(classification[2])
return nikola.utils.LocaleBorg().format_date_in_string(
"{month_day_year}",
datetime.date(int(classification[0]), int(classification[1]), int(classification[2])),
lang)
def get_path(self, classification, lang, dest_type='page'):
"""Return a path for the given classification."""
components = [self.site.config['ARCHIVE_PATH'](lang)]
if classification:
components.extend(classification)
add_index = 'always'
else:
components.append(self.site.config['ARCHIVE_FILENAME'](lang))
add_index = 'never'
return [_f for _f in components if _f], add_index
def extract_hierarchy(self, classification):
"""Given a classification, return a list of parts in the hierarchy."""
return classification.split('/') if classification else []
def recombine_classification_from_hierarchy(self, hierarchy):
"""Given a list of parts in the hierarchy, return the classification string."""
return '/'.join(hierarchy)
def provide_context_and_uptodate(self, classification, lang, node=None):
"""Provide data for the context and the uptodate list for the list of the given classifiation."""
hierarchy = self.extract_hierarchy(classification)
kw = {
"messages": self.site.MESSAGES,
}
page_kind = "list"
if self.show_list_as_index:
if not self.show_list_as_subcategories_list or len(hierarchy) == self.max_levels:
page_kind = "index"
if len(hierarchy) == 0:
title = kw["messages"][lang]["Archive"]
elif len(hierarchy) == 1:
title = kw["messages"][lang]["Posts for year %s"] % hierarchy[0]
elif len(hierarchy) == 2:
title = nikola.utils.LocaleBorg().format_date_in_string(
kw["messages"][lang]["Posts for {month_year}"],
datetime.date(int(hierarchy[0]), int(hierarchy[1]), 1),
lang)
elif len(hierarchy) == 3:
title = nikola.utils.LocaleBorg().format_date_in_string(
kw["messages"][lang]["Posts for {month_day_year}"],
datetime.date(int(hierarchy[0]), int(hierarchy[1]), int(hierarchy[2])),
lang)
else:
raise Exception("Cannot interpret classification {}!".format(repr(classification)))
context = {
"title": title,
"pagekind": [page_kind, "archive_page"],
"create_archive_navigation": self.site.config["CREATE_ARCHIVE_NAVIGATION"],
"archive_name": classification
}
# Generate links for hierarchies
if context["create_archive_navigation"]:
if hierarchy:
# Up level link makes sense only if this is not the top-level
# page (hierarchy is empty)
parent = '/'.join(hierarchy[:-1])
context["up_archive"] = self.site.link('archive', parent, lang)
context["up_archive_name"] = self.get_classification_friendly_name(parent, lang)
else:
context["up_archive"] = None
context["up_archive_name"] = None
nodelevel = len(hierarchy)
flat_samelevel = self.archive_navigation[lang][nodelevel]
idx = flat_samelevel.index(classification)
if idx == -1:
raise Exception("Cannot find classification {0} in flat hierarchy!".format(classification))
previdx, nextidx = idx - 1, idx + 1
# If the previous index is -1, or the next index is 1, the previous/next archive does not exist.
context["previous_archive"] = self.site.link('archive', flat_samelevel[previdx], lang) if previdx != -1 else None
context["previous_archive_name"] = self.get_classification_friendly_name(flat_samelevel[previdx], lang) if previdx != -1 else None
context["next_archive"] = self.site.link('archive', flat_samelevel[nextidx], lang) if nextidx != len(flat_samelevel) else None
context["next_archive_name"] = self.get_classification_friendly_name(flat_samelevel[nextidx], lang) if nextidx != len(flat_samelevel) else None
context["archive_nodelevel"] = nodelevel
context["has_archive_navigation"] = bool(context["previous_archive"] or context["up_archive"] or context["next_archive"])
else:
context["has_archive_navigation"] = False
kw.update(context)
return context, kw
def postprocess_posts_per_classification(self, posts_per_classification_per_language, flat_hierarchy_per_lang=None, hierarchy_lookup_per_lang=None):
"""Rearrange, modify or otherwise use the list of posts per classification and per language."""
# Build a lookup table for archive navigation, if we’ll need one.
if self.site.config['CREATE_ARCHIVE_NAVIGATION']:
if flat_hierarchy_per_lang is None:
raise ValueError('Archives need flat_hierarchy_per_lang')
self.archive_navigation = {}
for lang, flat_hierarchy in flat_hierarchy_per_lang.items():
self.archive_navigation[lang] = defaultdict(list)
for node in flat_hierarchy:
if not self.site.config["SHOW_UNTRANSLATED_POSTS"]:
if not [x for x in posts_per_classification_per_language[lang][node.classification_name] if x.is_translation_available(lang)]:
continue
self.archive_navigation[lang][len(node.classification_path)].append(node.classification_name)
# We need to sort it. Natsort means it’s year 10000 compatible!
for k, v in self.archive_navigation[lang].items():
self.archive_navigation[lang][k] = natsort.natsorted(v, alg=natsort.ns.F | natsort.ns.IC)
return super().postprocess_posts_per_classification(posts_per_classification_per_language, flat_hierarchy_per_lang, hierarchy_lookup_per_lang)
def should_generate_classification_page(self, classification, post_list, lang):
"""Only generates list of posts for classification if this function returns True."""
return classification == '' or len(post_list) > 0
def get_other_language_variants(self, classification, lang, classifications_per_language):
"""Return a list of variants of the same classification in other languages."""
return [(other_lang, classification) for other_lang, lookup in classifications_per_language.items() if classification in lookup and other_lang != lang]
| 48.208333 | 159 | 0.661452 |
0d680df750990f4998b69715135786deee5970f9 | 4,006 | py | Python | AxfProject/axf/models.py | aeasringnar/axf_project | e62289c9ddd60345ba881c6f03039aae5ec42861 | [
"BSD-2-Clause"
] | 1 | 2019-11-26T09:31:49.000Z | 2019-11-26T09:31:49.000Z | AxfProject/axf/models.py | aeasringnar/axf_project | e62289c9ddd60345ba881c6f03039aae5ec42861 | [
"BSD-2-Clause"
] | 10 | 2020-03-24T17:15:32.000Z | 2022-03-11T23:52:17.000Z | AxfProject/axf/models.py | aeasringnar/axf_project | e62289c9ddd60345ba881c6f03039aae5ec42861 | [
"BSD-2-Clause"
] | null | null | null | from django.db import models
# Create your models here.
# 首页轮播数据
class Wheel(models.Model):
img = models.CharField(max_length=150)
name = models.CharField(max_length=20)
trackid = models.CharField(max_length=20)
# 首页导航数据
class Nav(models.Model):
img = models.CharField(max_length=150)
name = models.CharField(max_length=20)
trackid = models.CharField(max_length=20)
# 首页小轮播
class Mustbuy(models.Model):
img = models.CharField(max_length=150)
name = models.CharField(max_length=20)
trackid = models.CharField(max_length=20)
# 首页便利店 块等数据
class Shop(models.Model):
img = models.CharField(max_length=150)
name = models.CharField(max_length=20)
trackid = models.CharField(max_length=20)
# 主要信息
class MainShow(models.Model):
trackid = models.CharField(max_length=10)
name = models.CharField(max_length=20)
img = models.CharField(max_length=100)
categoryid = models.CharField(max_length=10)
brandname = models.CharField(max_length=20)
img1 = models.CharField(max_length=100)
childcid1 = models.CharField(max_length=10)
productid1 = models.CharField(max_length=10)
longname1 = models.CharField(max_length=50)
price1 = models.CharField(max_length=10)
marketprice1 = models.CharField(max_length=10)
img2 = models.CharField(max_length=100)
childcid2 = models.CharField(max_length=10)
productid2 = models.CharField(max_length=10)
longname2 = models.CharField(max_length=50)
price2 = models.CharField(max_length=10)
marketprice2 = models.CharField(max_length=10)
img3 = models.CharField(max_length=100)
childcid3 = models.CharField(max_length=10)
productid3 = models.CharField(max_length=10)
longname3 = models.CharField(max_length=50)
price3 = models.CharField(max_length=10)
marketprice3 = models.CharField(max_length=10)
# 分类模型
class FoodTypes(models.Model):
typeid = models.CharField(max_length=10)
typename = models.CharField(max_length=20)
typesort = models.IntegerField()
childtypenames = models.CharField(max_length=150)
# 商品模型类
class Goods(models.Model):
# 商品id
productid = models.CharField(max_length=10)
# 商品图片
productimg = models.CharField(max_length=150)
# 商品名称
productname = models.CharField(max_length=50)
# 商品长名称
productlongname = models.CharField(max_length=100)
# 是否精选
isxf = models.NullBooleanField(default=False)
# 是否买一赠一
pmdesc = models.CharField(max_length=10)
# 规格
specifics = models.CharField(max_length=20)
# 价格
price = models.CharField(max_length=10)
# 超市价格
marketprice = models.CharField(max_length=10)
# 组id
categoryid = models.CharField(max_length=10)
# 子类组id
childcid = models.CharField(max_length=10)
# 子类组名称
childcidname = models.CharField(max_length=10)
# 详情页id
dealerid = models.CharField(max_length=10)
# 库存
storenums = models.IntegerField()
# 销量
productnum = models.IntegerField()
# 购买者
# 简易用户模型
class userinfo(models.Model):
# 用户账户 要唯一性
useraccount = models.CharField(max_length=20,unique=True)
# 密码
upassword = models.CharField(max_length=300)
# 昵称
username = models.CharField(max_length=20)
# 手机号
userphone = models.CharField(max_length=20)
#用户地址
useradderss = models.CharField(max_length=200,null=False) #这里的null=False就是默认使当前字段为空
# 这个是错误的
class cart(models.Model):
userccount = models.CharField(max_length=20)
usergoods = models.CharField(max_length=20)
# 购物车1.0版本
class NewCart(models.Model):
nccount = models.CharField(max_length=20)
ngoodsid = models.CharField(max_length=20)
# 购物车2.0版本
class TwoCart(models.Model):
tccount = models.CharField(max_length=20)
tgoodid = models.CharField(max_length=20)
# 新增数量字段
tgoodnum = models.IntegerField()
# 购物车3.0版本
class Xcart(models.Model):
tccount = models.CharField(max_length=20)
tgoodid = models.ForeignKey(Goods)
# 新增数量字段
tgoodnum = models.IntegerField()
| 31.543307 | 88 | 0.71967 |
e84b62ff767745bfb4534c9182794001f5100a0d | 453 | py | Python | diventi/accounts/migrations/0312_auto_20200503_1142.py | flavoi/diven | 3173ca3ca3fbedc191b8eab3639a6bceb3c442c4 | [
"Apache-2.0"
] | 2 | 2019-06-27T16:00:17.000Z | 2020-08-14T07:46:05.000Z | diventi/accounts/migrations/0312_auto_20200503_1142.py | flavoi/diven | 3173ca3ca3fbedc191b8eab3639a6bceb3c442c4 | [
"Apache-2.0"
] | 26 | 2020-02-15T22:39:35.000Z | 2022-02-19T21:09:01.000Z | diventi/accounts/migrations/0312_auto_20200503_1142.py | flavoi/diven | 3173ca3ca3fbedc191b8eab3639a6bceb3c442c4 | [
"Apache-2.0"
] | 1 | 2021-11-12T22:30:15.000Z | 2021-11-12T22:30:15.000Z | # Generated by Django 2.2.12 on 2020-05-03 09:42
import diventi.accounts.models
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('accounts', '0311_auto_20200502_1924'),
]
operations = [
migrations.AlterModelManagers(
name='diventiuser',
managers=[
('objects', diventi.accounts.models.DiventiUserManager()),
],
),
]
| 21.571429 | 74 | 0.602649 |
4c7d390a78a24056d20c1ab25c005d36922ca254 | 4,659 | py | Python | google/ads/google_ads/v1/services/transports/paid_organic_search_term_view_service_grpc_transport.py | jiulongw/google-ads-python | 6f5256eb1eeb5a9a95c8cdb9b97988d3a676282e | [
"Apache-2.0"
] | 1 | 2019-11-30T23:42:39.000Z | 2019-11-30T23:42:39.000Z | google/ads/google_ads/v1/services/transports/paid_organic_search_term_view_service_grpc_transport.py | jiulongw/google-ads-python | 6f5256eb1eeb5a9a95c8cdb9b97988d3a676282e | [
"Apache-2.0"
] | null | null | null | google/ads/google_ads/v1/services/transports/paid_organic_search_term_view_service_grpc_transport.py | jiulongw/google-ads-python | 6f5256eb1eeb5a9a95c8cdb9b97988d3a676282e | [
"Apache-2.0"
] | 1 | 2020-03-13T00:14:31.000Z | 2020-03-13T00:14:31.000Z | # -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import google.api_core.grpc_helpers
from google.ads.google_ads.v1.proto.services import paid_organic_search_term_view_service_pb2_grpc
class PaidOrganicSearchTermViewServiceGrpcTransport(object):
"""gRPC transport class providing stubs for
google.ads.googleads.v1.services PaidOrganicSearchTermViewService API.
The transport provides access to the raw gRPC stubs,
which can be used to take advantage of advanced
features of gRPC.
"""
# The scopes needed to make gRPC calls to all of the methods defined
# in this service.
_OAUTH_SCOPES = ()
def __init__(self,
channel=None,
credentials=None,
address='googleads.googleapis.com:443'):
"""Instantiate the transport class.
Args:
channel (grpc.Channel): A ``Channel`` instance through
which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
address (str): The address where the service is hosted.
"""
# If both `channel` and `credentials` are specified, raise an
# exception (channels come with credentials baked in already).
if channel is not None and credentials is not None:
raise ValueError(
'The `channel` and `credentials` arguments are mutually '
'exclusive.', )
# Create the channel.
if channel is None:
channel = self.create_channel(
address=address,
credentials=credentials,
)
self._channel = channel
# gRPC uses objects called "stubs" that are bound to the
# channel and provide a basic method for each RPC.
self._stubs = {
'paid_organic_search_term_view_service_stub': paid_organic_search_term_view_service_pb2_grpc.PaidOrganicSearchTermViewServiceStub(channel),
}
@classmethod
def create_channel(
cls,
address='googleads.googleapis.com:443',
credentials=None,
**kwargs):
"""Create and return a gRPC channel object.
Args:
address (str): The host for the channel to use.
credentials (~.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
kwargs (dict): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
"""
return google.api_core.grpc_helpers.create_channel(
address,
credentials=credentials,
scopes=cls._OAUTH_SCOPES,
**kwargs
)
@property
def channel(self):
"""The gRPC channel used by the transport.
Returns:
grpc.Channel: A gRPC channel object.
"""
return self._channel
@property
def get_paid_organic_search_term_view(self):
"""Return the gRPC stub for :meth:`PaidOrganicSearchTermViewServiceClient.get_paid_organic_search_term_view`.
Returns the requested paid organic search term view in full detail.
Returns:
Callable: A callable which accepts the appropriate
deserialized request object and returns a
deserialized response object.
"""
return self._stubs[
'paid_organic_search_term_view_service_stub'].GetPaidOrganicSearchTermView
| 37.572581 | 151 | 0.644559 |
78e1781053bd81d72b18f4e80f4fb2f496b65c4f | 754 | py | Python | src/WeatherUnits/derived/wind.py | noblecloud/WeatherUnits | b5401f10355b4e89a2f07faa2f459d61764b38f0 | [
"MIT"
] | 1 | 2021-04-20T22:58:57.000Z | 2021-04-20T22:58:57.000Z | src/WeatherUnits/derived/wind.py | noblecloud/WeatherUnits | b5401f10355b4e89a2f07faa2f459d61764b38f0 | [
"MIT"
] | null | null | null | src/WeatherUnits/derived/wind.py | noblecloud/WeatherUnits | b5401f10355b4e89a2f07faa2f459d61764b38f0 | [
"MIT"
] | null | null | null | from ..length import Length
from ..time import Time
from .rate import DistanceOverTime
from ..base import NamedType
from ..others import Direction
__all__ = ['Wind']
@NamedType
class Wind(DistanceOverTime):
_numerator: Length
_denominator: Time
# TODO: Add support for setting both speed and direction
__direction: Direction = None
# def __init__(self, speed: DistanceOverTime = None, direction: Direction = None):
# if direction is not None:
# self.__direction = direction
# if
# super(Wind, self).__init__(DistanceOverTime)
@property
def direction(self):
return self.__direction
@direction.setter
def direction(self, value):
if isinstance(value, Direction):
self.__direction |= value
else:
self.__direction = value
| 22.176471 | 83 | 0.742706 |
23ba64e254be845a615ed1295237a77db5db92b2 | 230 | py | Python | Python/Mundo 2/ex048.py | henrique-tavares/Coisas | f740518b1bedec5b0ea8c12ae07a2cac21eb51ae | [
"MIT"
] | 1 | 2020-02-07T20:39:26.000Z | 2020-02-07T20:39:26.000Z | Python/Mundo 2/ex048.py | neptune076/Coisas | 85c064cc0e134465aaf6ef41acf747d47f108fc9 | [
"MIT"
] | null | null | null | Python/Mundo 2/ex048.py | neptune076/Coisas | 85c064cc0e134465aaf6ef41acf747d47f108fc9 | [
"MIT"
] | null | null | null | print("")
total = 0
cont = 0
for i in range(1, 500, 2):
if (i % 3 == 0):
total += i
cont += 1
print("A soma dos primeiros {} números impares múltiplos de 3 é igual a {}".format(cont, total), end="\n\n") | 19.166667 | 108 | 0.534783 |
84a1c0618384f921815f6116909b938426758691 | 42,116 | py | Python | tests/unit/states/dockerng_test.py | felixhummel/salt | b6d640da0db7a5a57709b48d8dc1d19509901e32 | [
"Apache-2.0"
] | null | null | null | tests/unit/states/dockerng_test.py | felixhummel/salt | b6d640da0db7a5a57709b48d8dc1d19509901e32 | [
"Apache-2.0"
] | null | null | null | tests/unit/states/dockerng_test.py | felixhummel/salt | b6d640da0db7a5a57709b48d8dc1d19509901e32 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
'''
Unit tests for the dockerng state
'''
# Import Python Libs
from __future__ import absolute_import
# Import Salt Testing Libs
from salttesting import skipIf, TestCase
from salttesting.helpers import ensure_in_syspath
from salttesting.mock import (
MagicMock,
Mock,
NO_MOCK,
NO_MOCK_REASON,
patch
)
ensure_in_syspath('../../')
# Import Salt Libs
from salt.exceptions import CommandExecutionError
from salt.modules import dockerng as dockerng_mod
from salt.states import dockerng as dockerng_state
dockerng_mod.__context__ = {'docker.docker_version': ''}
dockerng_mod.__salt__ = {}
dockerng_state.__context__ = {}
dockerng_state.__opts__ = {'test': False}
@skipIf(NO_MOCK, NO_MOCK_REASON)
class DockerngTestCase(TestCase):
'''
Validate dockerng state
'''
def test_running_with_no_predifined_volume(self):
'''
Test dockerng.running function with an image
that doens't have VOLUME defined.
The ``binds`` argument, should create a container
with respective volumes extracted from ``binds``.
'''
dockerng_create = Mock()
dockerng_start = Mock()
__salt__ = {'dockerng.list_containers': MagicMock(),
'dockerng.list_tags': MagicMock(),
'dockerng.pull': MagicMock(),
'dockerng.state': MagicMock(),
'dockerng.inspect_image': MagicMock(),
'dockerng.create': dockerng_create,
'dockerng.start': dockerng_start,
}
with patch.dict(dockerng_state.__dict__,
{'__salt__': __salt__}):
dockerng_state.running(
'cont',
image='image:latest',
binds=['/host-0:/container-0:ro'])
dockerng_create.assert_called_with(
'image:latest',
validate_input=False,
name='cont',
binds={'/host-0': {'bind': '/container-0', 'ro': True}},
volumes=['/container-0'],
validate_ip_addrs=False,
client_timeout=60)
dockerng_start.assert_called_with('cont')
def test_running_with_predifined_volume(self):
'''
Test dockerng.running function with an image
that already have VOLUME defined.
The ``binds`` argument, should create a container
with ``volumes`` extracted from ``binds``.
'''
dockerng_create = Mock()
dockerng_start = Mock()
dockerng_inspect_image = Mock(return_value={
'Id': 'abcd',
'Config': {'Config': {'Volumes': ['/host-1']}},
})
__salt__ = {'dockerng.list_containers': MagicMock(),
'dockerng.list_tags': MagicMock(),
'dockerng.pull': MagicMock(),
'dockerng.state': MagicMock(),
'dockerng.inspect_image': dockerng_inspect_image,
'dockerng.create': dockerng_create,
'dockerng.start': dockerng_start,
}
with patch.dict(dockerng_state.__dict__,
{'__salt__': __salt__}):
dockerng_state.running(
'cont',
image='image:latest',
binds=['/host-0:/container-0:ro'])
dockerng_create.assert_called_with(
'image:latest',
validate_input=False,
binds={'/host-0': {'bind': '/container-0', 'ro': True}},
volumes=['/container-0'],
validate_ip_addrs=False,
name='cont',
client_timeout=60)
dockerng_start.assert_called_with('cont')
def test_running_with_no_predifined_ports(self):
'''
Test dockerng.running function with an image
that doens't have EXPOSE defined.
The ``port_bindings`` argument, should create a container
with ``ports`` extracted from ``port_bindings``.
'''
dockerng_create = Mock()
dockerng_start = Mock()
dockerng_inspect_image = Mock(return_value={
'Id': 'abcd',
'Config': {'Config': {'ExposedPorts': {}}},
})
__salt__ = {'dockerng.list_containers': MagicMock(),
'dockerng.list_tags': MagicMock(),
'dockerng.pull': MagicMock(),
'dockerng.state': MagicMock(),
'dockerng.inspect_image': dockerng_inspect_image,
'dockerng.create': dockerng_create,
'dockerng.start': dockerng_start,
}
with patch.dict(dockerng_state.__dict__,
{'__salt__': __salt__}):
dockerng_state.running(
'cont',
image='image:latest',
port_bindings=['9090:9797/tcp'])
dockerng_create.assert_called_with(
'image:latest',
validate_input=False,
name='cont',
ports=[9797],
port_bindings={9797: [9090]},
validate_ip_addrs=False,
client_timeout=60)
dockerng_start.assert_called_with('cont')
def test_running_with_predifined_ports(self):
'''
Test dockerng.running function with an image
that expose ports (via Dockerfile EXPOSE statement).
Check that `ports` contains ports defined on Image and by
`port_bindings` argument.
Inside Dockerfile:
.. code-block::
EXPOSE 9898
In sls:
.. code-block:: yaml
container:
dockerng.running:
- port_bindings:
- '9090:9797/tcp'
'''
dockerng_create = Mock()
dockerng_start = Mock()
dockerng_inspect_image = Mock(return_value={
'Id': 'abcd',
'Config': {'ExposedPorts': {'9898/tcp': {}}}
})
__salt__ = {'dockerng.list_containers': MagicMock(),
'dockerng.list_tags': MagicMock(),
'dockerng.pull': MagicMock(),
'dockerng.state': MagicMock(),
'dockerng.inspect_image': dockerng_inspect_image,
'dockerng.create': dockerng_create,
'dockerng.start': dockerng_start,
}
with patch.dict(dockerng_state.__dict__,
{'__salt__': __salt__}):
dockerng_state.running(
'cont',
image='image:latest',
port_bindings=['9090:9797/tcp'])
dockerng_create.assert_called_with(
'image:latest',
validate_input=False,
name='cont',
ports=[9797],
port_bindings={9797: [9090]},
validate_ip_addrs=False,
client_timeout=60)
dockerng_start.assert_called_with('cont')
def test_running_with_udp_bindings(self):
'''
Check that `ports` contains ports defined from `port_bindings` with
protocol declaration passed as tuple. As stated by docker-py
documentation
https://docker-py.readthedocs.org/en/latest/port-bindings/
In sls:
.. code-block:: yaml
container:
dockerng.running:
- port_bindings:
- '9090:9797/udp'
is equivalent of:
.. code-block:: yaml
container:
dockerng.running:
- ports:
- 9797/udp
- port_bindings:
- '9090:9797/udp'
'''
dockerng_create = Mock()
dockerng_start = Mock()
dockerng_inspect_image = Mock(return_value={
'Id': 'abcd',
'Config': {'ExposedPorts': {}}
})
__salt__ = {'dockerng.list_containers': MagicMock(),
'dockerng.list_tags': MagicMock(),
'dockerng.pull': MagicMock(),
'dockerng.state': MagicMock(),
'dockerng.inspect_image': dockerng_inspect_image,
'dockerng.create': dockerng_create,
'dockerng.start': dockerng_start,
}
with patch.dict(dockerng_state.__dict__,
{'__salt__': __salt__}):
dockerng_state.running(
'cont',
image='image:latest',
port_bindings=['9090:9797/udp'])
dockerng_create.assert_called_with(
'image:latest',
validate_input=False,
name='cont',
ports=[(9797, 'udp')],
port_bindings={'9797/udp': [9090]},
validate_ip_addrs=False,
client_timeout=60)
dockerng_start.assert_called_with('cont')
def test_running_compare_images_by_id(self):
'''
Make sure the container is running
against expected image.
Here the local image is named 'image:latest' and the container
is also running against an image called 'image:latest'.
Therefore the image ids are diverging because the tag 'image:latest'
moved to a fresher image.
Thus this test make sure the old container is droped and recreated.
'''
new_fake_image_id = 'abcdefgh'
old_fake_image_id = '123456789'
dockerng_inspect_image = Mock(return_value={'Id': new_fake_image_id})
dockerng_inspect_container = Mock(
return_value={'Image': old_fake_image_id,
'Config': {'Image': 'image:latest'}})
dockerng_list_containers = Mock(return_value=['cont'])
dockerng__state = Mock(return_value='running')
dockerng_stop = Mock(return_value={'result': True})
dockerng_rm = Mock(return_value=['container-id'])
__salt__ = {'dockerng.list_containers': dockerng_list_containers,
'dockerng.inspect_container': dockerng_inspect_container,
'dockerng.inspect_image': dockerng_inspect_image,
'dockerng.list_tags': MagicMock(),
'dockerng.state': dockerng__state,
'dockerng.pull': MagicMock(return_value=new_fake_image_id),
'dockerng.create': MagicMock(return_value='new_container'),
'dockerng.start': MagicMock(),
'dockerng.stop': dockerng_stop,
'dockerng.rm': dockerng_rm,
}
with patch.dict(dockerng_state.__dict__,
{'__salt__': __salt__}):
ret = dockerng_state.running(
'cont',
image='image:latest',
)
dockerng_stop.assert_called_with('cont', timeout=10, unpause=True)
dockerng_rm.assert_called_with('cont')
self.assertEqual(ret, {'name': 'cont',
'comment': "Container 'cont' was replaced",
'result': True,
'changes': {'added': 'new_container',
'image': new_fake_image_id,
'removed': ['container-id']}
})
def test_image_present_already_local(self):
'''
According following sls,
.. code-block:: yaml
image:latest:
dockerng.image_present:
- force: true
if ``image:latest`` is already downloaded locally the state
should not report changes.
'''
dockerng_inspect_image = Mock(
return_value={'Id': 'abcdefghijk'})
dockerng_pull = Mock(
return_value={'Layers':
{'Already_Pulled': ['abcdefghijk'],
'Pulled': []},
'Status': 'Image is up to date for image:latest',
'Time_Elapsed': 1.1})
dockerng_list_tags = Mock(
return_value=['image:latest']
)
__salt__ = {'dockerng.list_tags': dockerng_list_tags,
'dockerng.pull': dockerng_pull,
'dockerng.inspect_image': dockerng_inspect_image,
}
with patch.dict(dockerng_state.__dict__,
{'__salt__': __salt__}):
ret = dockerng_state.image_present('image:latest', force=True)
self.assertEqual(ret,
{'changes': {},
'result': True,
'comment': "Image 'image:latest' was pulled, "
"but there were no changes",
'name': 'image:latest',
})
def test_image_present_and_force(self):
'''
According following sls,
.. code-block:: yaml
image:latest:
dockerng.image_present:
- force: true
if ``image:latest`` is not downloaded and force is true
should pull a new image successfuly.
'''
dockerng_inspect_image = Mock(
side_effect=CommandExecutionError(
'Error 404: No such image/container: image:latest'))
dockerng_pull = Mock(
return_value={'Layers':
{'Already_Pulled': ['abcdefghijk'],
'Pulled': ['abcdefghijk']},
'Status': "Image 'image:latest' was pulled",
'Time_Elapsed': 1.1})
dockerng_list_tags = Mock(
side_effect=[[], ['image:latest']]
)
__salt__ = {'dockerng.list_tags': dockerng_list_tags,
'dockerng.pull': dockerng_pull,
'dockerng.inspect_image': dockerng_inspect_image,
}
with patch.dict(dockerng_state.__dict__,
{'__salt__': __salt__}):
ret = dockerng_state.image_present('image:latest', force=True)
self.assertEqual(ret,
{'changes': {
'Layers': {'Already_Pulled': ['abcdefghijk'],
'Pulled': ['abcdefghijk']},
'Status': "Image 'image:latest' was pulled",
'Time_Elapsed': 1.1},
'result': True,
'comment': "Image 'image:latest' was pulled",
'name': 'image:latest',
})
def test_check_start_false(self):
'''
If start is False, then dockerng.running will not try
to start a container that is stopped.
'''
image_id = 'abcdefg'
dockerng_create = Mock()
dockerng_start = Mock()
dockerng_list_containers = Mock(return_value=['cont'])
dockerng_inspect_container = Mock(
return_value={
'Config': {
'Image': 'image:latest',
'Tty': False,
'Labels': {},
'Domainname': '',
'User': '',
'AttachStderr': True,
'AttachStdout': True,
'Hostname': 'saltstack-container',
'Env': [],
'WorkingDir': '/',
'Cmd': ['bash'],
'Volumes': {},
'Entrypoint': None,
'ExposedPorts': {},
'OpenStdin': False,
},
'HostConfig': {
'PublishAllPorts': False,
'Dns': [],
'Links': None,
'CpusetCpus': '',
'RestartPolicy': {'MaximumRetryCount': 0, 'Name': ''},
'CapAdd': None,
'NetworkMode': 'default',
'PidMode': '',
'MemorySwap': 0,
'ExtraHosts': None,
'PortBindings': None,
'LxcConf': None,
'DnsSearch': [],
'Privileged': False,
'Binds': None,
'Memory': 0,
'VolumesFrom': None,
'CpuShares': 0,
'CapDrop': None,
},
'NetworkSettings': {
'MacAddress': '00:00:00:00:00:01',
},
'Image': image_id})
dockerng_inspect_image = MagicMock(
return_value={
'Id': image_id,
'Config': {
'Hostname': 'saltstack-container',
'WorkingDir': '/',
'Cmd': ['bash'],
'Volumes': {},
'Entrypoint': None,
'ExposedPorts': {},
},
})
__salt__ = {'dockerng.list_containers': dockerng_list_containers,
'dockerng.inspect_container': dockerng_inspect_container,
'dockerng.inspect_image': dockerng_inspect_image,
'dockerng.list_tags': MagicMock(
return_value=['image:latest']),
'dockerng.pull': MagicMock(),
'dockerng.state': MagicMock(side_effect=['stopped',
'running']),
'dockerng.create': dockerng_create,
'dockerng.start': dockerng_start,
}
with patch.dict(dockerng_state.__dict__,
{'__salt__': __salt__}):
ret = dockerng_state.running(
'cont',
image='image:latest',
start=False,
)
self.assertEqual(ret, {'name': 'cont',
'comment': "Container 'cont' is already "
"configured as specified",
'changes': {},
'result': True,
})
def test_check_start_true(self):
'''
If start is True, then dockerng.running will try
to start a container that is stopped.
'''
image_id = 'abcdefg'
dockerng_create = Mock()
dockerng_start = Mock()
dockerng_list_containers = Mock(return_value=['cont'])
dockerng_inspect_container = Mock(
return_value={
'Config': {
'Image': 'image:latest',
'Tty': False,
'Labels': {},
'Domainname': '',
'User': '',
'AttachStderr': True,
'AttachStdout': True,
'Hostname': 'saltstack-container',
'Env': [],
'WorkingDir': '/',
'Cmd': ['bash'],
'Volumes': {},
'Entrypoint': None,
'ExposedPorts': {},
'OpenStdin': False,
},
'HostConfig': {
'PublishAllPorts': False,
'Dns': [],
'Links': None,
'CpusetCpus': '',
'RestartPolicy': {'MaximumRetryCount': 0, 'Name': ''},
'CapAdd': None,
'NetworkMode': 'default',
'PidMode': '',
'MemorySwap': 0,
'ExtraHosts': None,
'PortBindings': None,
'LxcConf': None,
'DnsSearch': [],
'Privileged': False,
'Binds': None,
'Memory': 0,
'VolumesFrom': None,
'CpuShares': 0,
'CapDrop': None,
},
'NetworkSettings': {
'MacAddress': '00:00:00:00:00:01',
},
'Image': image_id})
dockerng_inspect_image = MagicMock(
return_value={
'Id': image_id,
'Config': {
'Hostname': 'saltstack-container',
'WorkingDir': '/',
'Cmd': ['bash'],
'Volumes': {},
'Entrypoint': None,
'ExposedPorts': {},
},
})
__salt__ = {'dockerng.list_containers': dockerng_list_containers,
'dockerng.inspect_container': dockerng_inspect_container,
'dockerng.inspect_image': dockerng_inspect_image,
'dockerng.list_tags': MagicMock(),
'dockerng.pull': MagicMock(return_value=True),
'dockerng.state': MagicMock(side_effect=['stopped',
'running']),
'dockerng.create': dockerng_create,
'dockerng.start': dockerng_start,
}
with patch.dict(dockerng_state.__dict__,
{'__salt__': __salt__}):
ret = dockerng_state.running(
'cont',
image='image:latest',
start=True,
)
self.assertEqual(ret, {'name': 'cont',
'comment': "Container 'cont' changed state.",
'changes': {'state': {'new': 'running',
'old': 'stopped'},
'image': True},
'result': True,
})
def test_running_discard_wrong_environemnt_values(self):
'''
environment values should be string.
It is easy to write wrong sls this way
.. code-block:: yaml
container:
dockerng.running:
- environment:
- KEY: 1
instead of:
.. code-block:: yaml
container:
dockerng.running:
- environment:
- KEY: "1"
'''
__salt__ = {'dockerng.list_containers': MagicMock(),
'dockerng.list_tags': MagicMock(
return_value=['image:latest']),
'dockerng.inspect_image': MagicMock(),
'dockerng.pull': MagicMock(),
'dockerng.state': MagicMock(),
'dockerng.create': MagicMock(),
'dockerng.start': MagicMock(),
}
with patch.dict(dockerng_state.__dict__,
{'__salt__': __salt__}):
for wrong_value in (1, .2, (), [], {}):
ret = dockerng_state.running(
'cont',
image='image:latest',
environment=[{'KEY': wrong_value}])
self.assertEqual(ret,
{'changes': {},
'comment': 'Environment values must'
' be strings KEY=\'{0}\''.format(wrong_value),
'name': 'cont',
'result': False})
def test_running_with_labels(self):
'''
Test dockerng.running with labels parameter.
'''
dockerng_create = Mock()
__salt__ = {'dockerng.list_containers': MagicMock(),
'dockerng.list_tags': MagicMock(),
'dockerng.pull': MagicMock(),
'dockerng.state': MagicMock(),
'dockerng.inspect_image': MagicMock(),
'dockerng.create': dockerng_create,
}
with patch.dict(dockerng_state.__dict__,
{'__salt__': __salt__}):
dockerng_state.running(
'cont',
image='image:latest',
labels=['LABEL1', 'LABEL2'],
)
dockerng_create.assert_called_with(
'image:latest',
validate_input=False,
validate_ip_addrs=False,
name='cont',
labels=['LABEL1', 'LABEL2'],
client_timeout=60)
def test_running_with_labels_from_image(self):
'''
Test dockerng.running with labels parameter supports also
labels carried by the image.
'''
dockerng_create = Mock()
image_id = 'a' * 128
dockerng_inspect_image = MagicMock(
return_value={
'Id': image_id,
'Config': {
'Hostname': 'saltstack-container',
'WorkingDir': '/',
'Cmd': ['bash'],
'Volumes': {'/path': {}},
'Entrypoint': None,
'ExposedPorts': {},
'Labels': {'IMAGE_LABEL': 'image_foo',
'LABEL1': 'label1'},
},
})
__salt__ = {'dockerng.list_containers': MagicMock(),
'dockerng.list_tags': MagicMock(),
'dockerng.pull': MagicMock(),
'dockerng.state': MagicMock(),
'dockerng.inspect_image': dockerng_inspect_image,
'dockerng.create': dockerng_create,
}
with patch.dict(dockerng_state.__dict__,
{'__salt__': __salt__}):
dockerng_state.running(
'cont',
image='image:latest',
labels=[{'LABEL1': 'foo1'}, {'LABEL2': 'foo2'}],
)
dockerng_create.assert_called_with(
'image:latest',
validate_input=False,
validate_ip_addrs=False,
name='cont',
labels={'LABEL1': 'foo1', 'LABEL2': 'foo2'},
client_timeout=60)
def test_network_present(self):
'''
Test dockerng.network_present
'''
dockerng_create_network = Mock(return_value='created')
dockerng_connect_container_to_network = Mock(return_value='connected')
dockerng_inspect_container = Mock(return_value={'Id': 'abcd'})
__salt__ = {'dockerng.create_network': dockerng_create_network,
'dockerng.inspect_container': dockerng_inspect_container,
'dockerng.connect_container_to_network': dockerng_connect_container_to_network,
'dockerng.networks': Mock(return_value=[]),
}
with patch.dict(dockerng_state.__dict__,
{'__salt__': __salt__}):
ret = dockerng_state.network_present(
'network_foo',
containers=['container'],
)
dockerng_create_network.assert_called_with('network_foo', driver=None)
dockerng_connect_container_to_network.assert_called_with('abcd',
'network_foo')
self.assertEqual(ret, {'name': 'network_foo',
'comment': '',
'changes': {'connected': 'connected',
'created': 'created'},
'result': True})
def test_network_absent(self):
'''
Test dockerng.network_absent
'''
dockerng_remove_network = Mock(return_value='removed')
dockerng_disconnect_container_from_network = Mock(return_value='disconnected')
__salt__ = {'dockerng.remove_network': dockerng_remove_network,
'dockerng.disconnect_container_from_network': dockerng_disconnect_container_from_network,
'dockerng.networks': Mock(return_value=[{'Containers': {'container': {}}}]),
}
with patch.dict(dockerng_state.__dict__,
{'__salt__': __salt__}):
ret = dockerng_state.network_absent(
'network_foo',
)
dockerng_disconnect_container_from_network.assert_called_with('container',
'network_foo')
dockerng_remove_network.assert_called_with('network_foo')
self.assertEqual(ret, {'name': 'network_foo',
'comment': '',
'changes': {'disconnected': 'disconnected',
'removed': 'removed'},
'result': True})
def test_volume_present(self):
'''
Test dockerng.volume_present
'''
volumes = []
default_driver = 'dummy_default'
def create_volume(name, driver=None, driver_opts=None):
for v in volumes:
# volume_present should never try to add a conflicting
# volume
self.assertNotEqual(v['Name'], name)
if driver is None:
driver = default_driver
new = {'Name': name, 'Driver': driver}
volumes.append(new)
return new
def remove_volume(name):
old_len = len(volumes)
removed = [v for v in volumes if v['Name'] == name]
# volume_present should not have tried to remove a volume
# that didn't exist
self.assertEqual(1, len(removed))
volumes.remove(removed[0])
return removed[0]
dockerng_create_volume = Mock(side_effect=create_volume)
__salt__ = {'dockerng.create_volume': dockerng_create_volume,
'dockerng.volumes': Mock(return_value={'Volumes': volumes}),
'dockerng.remove_volume': Mock(side_effect=remove_volume),
}
with patch.dict(dockerng_state.__dict__,
{'__salt__': __salt__}):
ret = dockerng_state.volume_present(
'volume_foo',
)
dockerng_create_volume.assert_called_with('volume_foo',
driver=None,
driver_opts=None)
self.assertEqual(
{
'name': 'volume_foo',
'comment': '',
'changes': {
'created': {
'Driver': default_driver,
'Name': 'volume_foo',
},
},
'result': True,
},
ret)
self.assertEqual(len(volumes), 1)
self.assertEqual(volumes[0]['Name'], 'volume_foo')
self.assertIs(volumes[0]['Driver'], default_driver)
# run it again with the same arguments
orig_volumes = [volumes[0].copy()]
ret = dockerng_state.volume_present('volume_foo')
self.assertEqual(
{
'name': 'volume_foo',
'comment': "Volume 'volume_foo' already exists.",
'changes': {},
'result': True,
},
ret)
self.assertEqual(orig_volumes, volumes)
# run it again with a different driver but don't force
ret = dockerng_state.volume_present('volume_foo', driver='local')
self.assertEqual(
{
'name': 'volume_foo',
'comment': ("Driver for existing volume 'volume_foo'"
" ('dummy_default') does not match specified"
" driver ('local') and force is False"),
'changes': {},
'result': False,
},
ret)
self.assertEqual(orig_volumes, volumes)
# run it again with a different driver and force
ret = dockerng_state.volume_present(
'volume_foo', driver='local', force=True)
self.assertEqual(
{
'name': 'volume_foo',
'comment': "",
'changes': {
'removed': {
'Driver': default_driver,
'Name': 'volume_foo',
},
'created': {
'Driver': 'local',
'Name': 'volume_foo',
},
},
'result': True,
},
ret)
mod_orig_volumes = [orig_volumes[0].copy()]
mod_orig_volumes[0]['Driver'] = 'local'
self.assertEqual(mod_orig_volumes, volumes)
def test_volume_present_with_another_driver(self):
'''
Test dockerng.volume_present
'''
dockerng_create_volume = Mock(return_value='created')
dockerng_remove_volume = Mock(return_value='removed')
__salt__ = {'dockerng.create_volume': dockerng_create_volume,
'dockerng.remove_volume': dockerng_remove_volume,
'dockerng.volumes': Mock(return_value={
'Volumes': [{'Name': 'volume_foo',
'Driver': 'foo'}]}),
}
with patch.dict(dockerng_state.__dict__,
{'__salt__': __salt__}):
ret = dockerng_state.volume_present(
'volume_foo',
driver='bar',
force=True,
)
dockerng_remove_volume.assert_called_with('volume_foo')
dockerng_create_volume.assert_called_with('volume_foo',
driver='bar',
driver_opts=None)
self.assertEqual(ret, {'name': 'volume_foo',
'comment': '',
'changes': {'created': 'created',
'removed': 'removed'},
'result': True})
def test_volume_absent(self):
'''
Test dockerng.volume_absent
'''
dockerng_remove_volume = Mock(return_value='removed')
__salt__ = {'dockerng.remove_volume': dockerng_remove_volume,
'dockerng.volumes': Mock(return_value={
'Volumes': [{'Name': 'volume_foo'}]}),
}
with patch.dict(dockerng_state.__dict__,
{'__salt__': __salt__}):
ret = dockerng_state.volume_absent(
'volume_foo',
)
dockerng_remove_volume.assert_called_with('volume_foo')
self.assertEqual(ret, {'name': 'volume_foo',
'comment': '',
'changes': {'removed': 'removed'},
'result': True})
def test_removal_of_parameter_is_detected(self):
'''
Test dockerng.running with deleted parameter.
1. define your sls
.. code-block:: yaml
container:
dockerng.running:
- name: super-container
- binds:
- /path:/path:ro
2. run state.highstate
3. modify your sls by removing `- binds:`
.. code-block:: yaml
container:
dockerng.running:
- name: super-container
4. enjoy your new created container without mounted volumes.
'''
image_id = 'abcdefg'
dockerng_create = Mock(return_value=True)
dockerng_start = Mock()
dockerng_list_containers = Mock(return_value=['cont'])
dockerng_inspect_container = Mock(
side_effect=[{
'Config': {
'Image': 'image:latest',
'Tty': False,
'Labels': {},
'Domainname': '',
'User': '',
'AttachStderr': True,
'AttachStdout': True,
'Hostname': 'saltstack-container',
'Env': [],
'WorkingDir': '/',
'Cmd': ['bash'],
'Volumes': {'/path': {}},
'Entrypoint': None,
'ExposedPorts': {},
'OpenStdin': False,
},
'HostConfig': {
'PublishAllPorts': False,
'Dns': [],
'Links': None,
'CpusetCpus': '',
'RestartPolicy': {'MaximumRetryCount': 0, 'Name': ''},
'CapAdd': None,
'NetworkMode': 'default',
'PidMode': '',
'MemorySwap': 0,
'ExtraHosts': None,
'PortBindings': None,
'LxcConf': None,
'DnsSearch': [],
'Privileged': False,
'Binds': ['/path:/path:ro'],
'Memory': 0,
'VolumesFrom': None,
'CpuShares': 0,
'CapDrop': None,
},
'NetworkSettings': {
'MacAddress': '00:00:00:00:00:01',
},
'Image': image_id},
{'Config': {
'Image': 'image:latest',
'Tty': False,
'Labels': {},
'Domainname': '',
'User': '',
'AttachStderr': True,
'AttachStdout': True,
'Hostname': 'saltstack-container',
'Env': [],
'WorkingDir': '/',
'Cmd': ['bash'],
'Volumes': {'/path': {}},
'Entrypoint': None,
'ExposedPorts': {},
'OpenStdin': False,
},
'HostConfig': {
'PublishAllPorts': False,
'Dns': [],
'Links': None,
'CpusetCpus': '',
'RestartPolicy': {'MaximumRetryCount': 0, 'Name': ''},
'CapAdd': None,
'NetworkMode': 'default',
'PidMode': '',
'MemorySwap': 0,
'ExtraHosts': None,
'PortBindings': None,
'LxcConf': None,
'DnsSearch': [],
'Privileged': False,
'Binds': None,
'Memory': 0,
'VolumesFrom': None,
'CpuShares': 0,
'CapDrop': None,
},
'NetworkSettings': {
'MacAddress': '00:00:00:00:00:01',
},
'Image': image_id}]
)
dockerng_inspect_image = MagicMock(
return_value={
'Id': image_id,
'Config': {
'Hostname': 'saltstack-container',
'WorkingDir': '/',
'Cmd': ['bash'],
'Volumes': {'/path': {}},
'Entrypoint': None,
'ExposedPorts': {},
},
})
__salt__ = {'dockerng.list_containers': dockerng_list_containers,
'dockerng.inspect_container': dockerng_inspect_container,
'dockerng.inspect_image': dockerng_inspect_image,
'dockerng.list_tags': MagicMock(),
'dockerng.pull': MagicMock(return_value=True),
'dockerng.state': MagicMock(side_effect=['stopped',
'running']),
'dockerng.rm': MagicMock(return_value='cont'),
'dockerng.create': dockerng_create,
'dockerng.start': dockerng_start,
}
with patch.dict(dockerng_state.__dict__,
{'__salt__': __salt__}):
ret = dockerng_state.running(
'cont',
image='image:latest',
)
self.assertEqual(ret, {'name': 'cont',
'comment': "Container 'cont' changed state.."
" Container 'cont' was replaced.",
'changes': {
'diff': {'binds':
{'new': [],
'old': ['/path:/path:ro']}},
'image': True,
'removed': 'cont',
'state': {'new': 'running',
'old': 'stopped'},
'added': True,
},
'result': True,
})
dockerng_create.assert_called_with('image:latest',
validate_ip_addrs=False,
validate_input=False,
name='cont',
client_timeout=60)
if __name__ == '__main__':
from integration import run_tests
run_tests(DockerngTestCase, needs_daemon=False)
| 39.54554 | 109 | 0.453106 |
4de0a2257498e3996aece9200dd773e064f0acb3 | 1,076 | py | Python | setup.py | gibbsie/cdk-graviton2-alb-aga-route53 | c9d99a533acce6db3131ad68a623405a9fc0fe9b | [
"MIT-0"
] | 2 | 2021-06-09T01:50:35.000Z | 2021-06-11T09:20:29.000Z | setup.py | gibbsie/cdk-graviton2-alb-aga-route53 | c9d99a533acce6db3131ad68a623405a9fc0fe9b | [
"MIT-0"
] | null | null | null | setup.py | gibbsie/cdk-graviton2-alb-aga-route53 | c9d99a533acce6db3131ad68a623405a9fc0fe9b | [
"MIT-0"
] | 3 | 2021-06-09T03:37:54.000Z | 2021-11-29T04:55:09.000Z | import setuptools
with open("README.md") as fp:
long_description = fp.read()
setuptools.setup(
name="ec2_ialb_aga_custom_r53",
version="0.0.1",
description="An empty CDK Python app",
long_description=long_description,
long_description_content_type="text/markdown",
author="author",
package_dir={"": "ec2_ialb_aga_custom_r53"},
packages=setuptools.find_packages(where="ec2_ialb_aga_custom_r53"),
install_requires=[
"aws-cdk.core>=1.83.0",
],
python_requires=">=3.7",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: JavaScript",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Topic :: Software Development :: Code Generators",
"Topic :: Utilities",
"Typing :: Typed",
],
)
| 23.391304 | 71 | 0.618959 |
2ef19ebba558931765c69248099b13949ca76bc4 | 1,639 | py | Python | api/base/tests.py | neosergio/library_tracker | ab7cb28a55ebf14f2b94eadab3defed03fd66e07 | [
"MIT"
] | null | null | null | api/base/tests.py | neosergio/library_tracker | ab7cb28a55ebf14f2b94eadab3defed03fd66e07 | [
"MIT"
] | null | null | null | api/base/tests.py | neosergio/library_tracker | ab7cb28a55ebf14f2b94eadab3defed03fd66e07 | [
"MIT"
] | null | null | null | from django.test import TestCase
from rest_framework import status
from rest_framework.test import APIClient
from library.models import Library, Book
class BookTestCase(TestCase):
def test_book_creation(self):
client = APIClient()
book = {'title': 'A greatLibraryBookSerializer book',
'author_name': 'A great authoLibraryBookSerializerr',
'isbn_num': '123456',
'genre': 'drama',
'description': 'A great description for a great book'}
response = client.post(
'/api/v1/books/',
book,
format='json'
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
class LibraryBookTestCase(TestCase):
def setUp(self):
library = Library(
name='A amazing library',
city='NY',
state='NY',
postal_code='10001'
)
library.save()
book = Book(
title='Another great book',
author_name='A great author',
isbn_num='123123',
genre='novel',
description='A great description'
)
book.save()
self.library = library
self.book = book
def test_library_book_creation(self):
client = APIClient()
record_library_book = {
"library": self.library.pk,
"book": self.book.pk
}
response = client.post(
'/api/v1/library_books/',
record_library_book,
format='json'
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
| 28.258621 | 71 | 0.562538 |
2c07fc5d6f76602ae25a3db78d124b4961fd4787 | 6,321 | py | Python | handlers/pet.py | byn9826/Thousand-Day | cd1396cfb6110d1560aea705f2e24fec0b72d258 | [
"BSD-3-Clause"
] | 2 | 2017-04-30T02:39:13.000Z | 2017-05-05T13:11:54.000Z | handlers/pet.py | byn9826/Thousand-Day | cd1396cfb6110d1560aea705f2e24fec0b72d258 | [
"BSD-3-Clause"
] | null | null | null | handlers/pet.py | byn9826/Thousand-Day | cd1396cfb6110d1560aea705f2e24fec0b72d258 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
import mysql.connector
import datetime
#transfer owner to relative
#return 1 for success
#return 0 for error
def transferPet(petId, ownerId, relativeId, cnx):
petQuery = 'UPDATE pet set owner_id = %s, relative_id = %s WHERE pet_id = %s'
try:
petCursor = cnx.cursor()
petCursor.execute(petQuery, (relativeId, ownerId, petId))
cnx.commit()
return '1'
except mysql.connector.Error as err:
print('Something went wrong: {}'.format(err))
cnx.rollback()
return '0'
finally:
petCursor.close()
#add new pet
#return new id for success
#return 0 for error
def addPet(userId, petName, petGender, petType, petNature, cnx):
petReg = datetime.datetime.now().date()
addQuery = (
'INSERT INTO pet (pet_name, pet_gender, pet_type, pet_nature, pet_reg, owner_id) '
'VALUES (%s, %s, %s, %s, %s, %s)'
)
try:
addCursor = cnx.cursor()
addCursor.execute(addQuery, (petName, petGender, petType, petNature, petReg, userId))
cnx.commit()
newId = addCursor.lastrowid
return str(newId)
except mysql.connector.Error as err:
cnx.rollback()
print('Something went wrong: {}'.format(err))
return '0'
finally:
addCursor.close()
#get data for one pet
#return 0 for error
def onePet(petId, cnx):
petQuery = 'SELECT * FROM pet WHERE pet_id = %s'
try:
petCursor = cnx.cursor(dictionary=True)
petCursor.execute(petQuery, (petId, ))
return petCursor.fetchone()
#return 0 for db error
except mysql.connector.Error as err:
print('Something went wrong: {}'.format(err))
return '0'
finally:
petCursor.close()
#update pet name
def newName(petId, petName, cnx):
petQuery = 'UPDATE pet set pet_name = %s WHERE pet_id = %s'
try:
petCursor = cnx.cursor()
petCursor.execute(petQuery, (petName, petId))
cnx.commit()
return '1'
except mysql.connector.Error as err:
print('Something went wrong: {}'.format(err))
cnx.rollback()
return '0'
finally:
petCursor.close()
#delete relative of one pet
#return 0 for error
#return 1 for success
def deleteRelative(petId, cnx):
delQuery = 'UPDATE pet SET relative_id = %s WHERE pet_id = %s'
try:
delCursor = cnx.cursor()
delCursor.execute(delQuery, (None, petId))
cnx.commit()
return '1'
except mysql.connector.Error as err:
cnx.rollback()
print('Something went wrong: {}'.format(err))
#return 0 for error
return '0'
finally:
delCursor.close()
#add one user to one pet's relative
#return row affected
def addRelative(relativeId, petId, cnx):
addQuery = 'UPDATE pet SET relative_id = %s WHERE pet_id = %s AND relative_id is %s'
try:
addCursor = cnx.cursor()
addCursor.execute(addQuery, (relativeId, petId, None))
cnx.commit()
return str(addCursor.rowcount)
except mysql.connector.Error as err:
cnx.rollback()
print('Something went wrong: {}'.format(err))
#return 0 for error
return '0'
finally:
addCursor.close()
#get one pet's owner and relative id
#return 0 for error
def getBelong(petId, cnx):
petQuery = 'SELECT owner_id, relative_id FROM pet WHERE pet_id = %s'
try:
#return all pets info
petCursor = cnx.cursor()
petCursor.execute(petQuery, (petId, ))
return petCursor.fetchone()
#return 0 for db error
except mysql.connector.Error as err:
print('Something went wrong: {}'.format(err))
return '0'
finally:
petCursor.close()
#search all pets info belong to one user
#return pets id, name, gender, nature, type list in array if success
#return 0 for error
def searchPets(userId, cnx):
petsQuery = (
'SELECT pet_id, pet_name, pet_gender, pet_nature, pet_type FROM pet WHERE owner_id = %s OR relative_id = %s'
)
try:
#return all pets info
petsCursor = cnx.cursor(dictionary=True)
petsCursor.execute(petsQuery, (userId, userId))
return petsCursor.fetchall()
#return 0 for db error
except mysql.connector.Error as err:
print('Something went wrong: {}'.format(err))
return '0'
finally:
petsCursor.close()
#find all relatives for one user
#return relative id list for success
#return 0 for error
def findRelative(userId, cnx):
relativeQuery = 'SELECT owner_id, relative_id FROM pet WHERE owner_id = %s OR relative_id = %s'
try:
relativeCursor = cnx.cursor()
relativeCursor.execute(relativeQuery, (userId, userId))
relatives = relativeCursor.fetchall()
#remove duplicate user_id and current user id
s = []
for r in relatives:
s += r
relative = list(set(s))
return [x for x in relative if x != None and x != userId]
#return 0 for db error
except mysql.connector.Error as err:
print('Something went wrong: {}'.format(err))
return '0'
finally:
relativeCursor.close()
#filter pets based on type and nature
#return 0 for error
def filterPets(type, nature, cnx):
filterQuery = 'SELECT pet_id FROM pet WHERE pet_type = %s AND pet_nature = %s'
try:
filterCursor = cnx.cursor()
filterCursor.execute(filterQuery, (type, nature))
return filterCursor.fetchall()
#return 0 for db error
except mysql.connector.Error as err:
print('Something went wrong: {}'.format(err))
return '0'
finally:
filterCursor.close()
#get list of pets name from list of pets id
#return list of pet name and id
#return 0 for error
def petsName(petsId, cnx):
listQuery = 'SELECT pet_id, pet_name FROM pet WHERE pet_id IN (%s)'
listHolder = ', '.join(list(map(lambda x: '%s', petsId)))
try:
#Get all pet info
listQuery = listQuery % (listHolder)
listCursor = cnx.cursor(dictionary=True)
listCursor.execute(listQuery, petsId)
return listCursor.fetchall()
except mysql.connector.Error as err:
print('Something went wrong: {}'.format(err))
return '0'
finally:
listCursor.close()
| 31.763819 | 116 | 0.631387 |
69bad65f1808159b8f76583773033a47cbba4210 | 142 | py | Python | home/urls.py | alissatroiano/Hue | 1251755dfc4a06ea7168e2878bd3409284730141 | [
"FTL"
] | 1 | 2021-04-10T23:23:37.000Z | 2021-04-10T23:23:37.000Z | home/urls.py | alissatroiano/Hue | 1251755dfc4a06ea7168e2878bd3409284730141 | [
"FTL"
] | null | null | null | home/urls.py | alissatroiano/Hue | 1251755dfc4a06ea7168e2878bd3409284730141 | [
"FTL"
] | 1 | 2021-07-28T12:13:08.000Z | 2021-07-28T12:13:08.000Z | from django.contrib import admin
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='home'),
]
| 15.777778 | 39 | 0.704225 |
0e0125a9566208109a7eb595554f37be06cabe03 | 52,771 | py | Python | tensorflow/python/training/monitored_session.py | Zwysilence/tensorflow | b55001be83da044bb21d539d433dec6231eaec55 | [
"Apache-2.0"
] | 2 | 2018-10-17T21:46:24.000Z | 2020-07-19T16:02:12.000Z | tensorflow/python/training/monitored_session.py | Zwysilence/tensorflow | b55001be83da044bb21d539d433dec6231eaec55 | [
"Apache-2.0"
] | 1 | 2018-09-17T19:30:27.000Z | 2018-09-17T19:30:27.000Z | tensorflow/python/training/monitored_session.py | Zwysilence/tensorflow | b55001be83da044bb21d539d433dec6231eaec55 | [
"Apache-2.0"
] | 6 | 2018-12-20T01:35:20.000Z | 2020-07-10T17:29:57.000Z | # pylint: disable=g-bad-file-header
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A wrapper of Session API which runs hooks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import sys
import six
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.distribute import distribute_coordinator_context
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import lookup_ops
from tensorflow.python.ops import resources
from tensorflow.python.ops import variables
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.summary import summary
from tensorflow.python.training import basic_session_run_hooks
from tensorflow.python.training import coordinator
from tensorflow.python.training import queue_runner
from tensorflow.python.training import saver as training_saver
from tensorflow.python.training import session_manager as sm
from tensorflow.python.training import session_run_hook
from tensorflow.python.util import function_utils
from tensorflow.python.util.tf_export import tf_export
# The list of exceptions that we should recover from. Exceptions not in this
# list may terminate the job.
_PREEMPTION_ERRORS = (errors.AbortedError, errors.UnavailableError)
# Value that indicates no value was provided.
USE_DEFAULT = object()
@tf_export('train.Scaffold')
class Scaffold(object):
"""Structure to create or gather pieces commonly needed to train a model.
When you build a model for training you usually need ops to initialize
variables, a `Saver` to checkpoint them, an op to collect summaries for
the visualizer, and so on.
Various libraries built on top of the core TensorFlow library take care of
creating some or all of these pieces and storing them in well known
collections in the graph. The `Scaffold` class helps pick these pieces from
the graph collections, creating and adding them to the collections if needed.
If you call the scaffold constructor without any arguments, it will pick
pieces from the collections, creating default ones if needed when
`scaffold.finalize()` is called. You can pass arguments to the constructor to
provide your own pieces. Pieces that you pass to the constructor are not
added to the graph collections.
The following pieces are directly accessible as attributes of the `Scaffold`
object:
* `saver`: A `tf.train.Saver` object taking care of saving the variables.
Picked from and stored into the `SAVERS` collection in the graph by default.
* `init_op`: An op to run to initialize the variables. Picked from and
stored into the `INIT_OP` collection in the graph by default.
* `ready_op`: An op to verify that the variables are initialized. Picked
from and stored into the `READY_OP` collection in the graph by default.
* `ready_for_local_init_op`: An op to verify that global state has been
initialized and it is alright to run `local_init_op`. Picked from and
stored into the `READY_FOR_LOCAL_INIT_OP` collection in the graph by
default. This is needed when the initialization of local variables depends
on the values of global variables.
* `local_init_op`: An op to initialize the local variables. Picked
from and stored into the `LOCAL_INIT_OP` collection in the graph by default.
* `summary_op`: An op to run and merge the summaries in the graph. Picked
from and stored into the `SUMMARY_OP` collection in the graph by default.
* `global_step`: A tensor containing the global step counter. Picked
from and stored into the `GLOBAL_STEP` collection in the graph by default.
You can also pass the following additional pieces to the constructor:
* `init_feed_dict`: A session feed dictionary that should be used when
running the init op.
* `init_fn`: A callable to run after the init op to perform additional
initializations. The callable will be called as
`init_fn(scaffold, session)`.
"""
def __init__(self,
init_op=None,
init_feed_dict=None,
init_fn=None,
ready_op=None,
ready_for_local_init_op=None,
local_init_op=None,
summary_op=None,
saver=None,
copy_from_scaffold=None):
"""Create a scaffold.
Args:
init_op: Optional op for initializing variables.
init_feed_dict: Optional session feed dictionary to use when running the
init_op.
init_fn: Optional function to use to initialize the model after running
the init_op. Will be called as `init_fn(scaffold, session)`.
ready_op: Optional op to verify that the variables are initialized. Must
return an empty 1D string tensor when the variables are initialized, or
a non-empty 1D string tensor listing the names of the non-initialized
variables.
ready_for_local_init_op: Optional op to verify that the global variables
are initialized and `local_init_op` can be run. Must return an empty
1D string tensor when the global variables are initialized, or a
non-empty 1D string tensor listing the names of the non-initialized
global variables.
local_init_op: Optional op to initialize local variables.
summary_op: Optional op to gather all summaries. Must return a scalar
string tensor containing a serialized `Summary` proto.
saver: Optional `tf.train.Saver` object to use to save and restore
variables.
copy_from_scaffold: Optional scaffold object to copy fields from. Its
fields will be overwritten by the provided fields in this function.
"""
if copy_from_scaffold is not None:
if not isinstance(copy_from_scaffold, Scaffold):
raise TypeError('copy_from_scaffold is not a Scaffold instance.')
# We need _coalesce since Tensor is not converted to bool automatically,
# so the common idiom of (a or b) does not work.
coalesce = lambda a, b: a if a is not None else b
init_op = coalesce(init_op, copy_from_scaffold.init_op)
init_feed_dict = coalesce(init_feed_dict,
copy_from_scaffold.init_feed_dict)
# Use the original init_fn provided by the user to init the new Scaffold.
init_fn = coalesce(init_fn, copy_from_scaffold._user_init_fn) # pylint: disable=protected-access
ready_op = coalesce(ready_op, copy_from_scaffold.ready_op)
ready_for_local_init_op = coalesce(
ready_for_local_init_op, copy_from_scaffold.ready_for_local_init_op)
local_init_op = coalesce(local_init_op, copy_from_scaffold.local_init_op)
summary_op = coalesce(summary_op, copy_from_scaffold.summary_op)
saver = coalesce(saver, copy_from_scaffold.saver)
# NOTE(touts): modifying the init function to be passed the scaffold is a
# hack to make it easy to find the saver. Is there a better way?
self._user_init_fn = init_fn
if init_fn:
self._init_fn = lambda sess: init_fn(self, sess)
else:
self._init_fn = None
self._init_op = init_op
self._init_feed_dict = init_feed_dict
self._ready_op = ready_op
self._ready_for_local_init_op = ready_for_local_init_op
self._local_init_op = local_init_op
self._summary_op = summary_op
self._saver = saver
def finalize(self):
"""Creates operations if needed and finalizes the graph."""
if self._init_op is None:
def default_init_op():
return control_flow_ops.group(
variables.global_variables_initializer(),
resources.initialize_resources(resources.shared_resources()))
self._init_op = Scaffold.get_or_default(
'init_op',
ops.GraphKeys.INIT_OP,
default_init_op)
if self._ready_op is None:
def default_ready_op():
return array_ops.concat([
variables.report_uninitialized_variables(),
resources.report_uninitialized_resources()
], 0)
self._ready_op = Scaffold.get_or_default(
'ready_op', ops.GraphKeys.READY_OP,
default_ready_op)
if self._ready_for_local_init_op is None:
def default_ready_for_local_init_op():
return variables.report_uninitialized_variables(
variables.global_variables())
self._ready_for_local_init_op = Scaffold.get_or_default(
'ready_for_local_init_op', ops.GraphKeys.READY_FOR_LOCAL_INIT_OP,
default_ready_for_local_init_op)
if self._local_init_op is None:
self._local_init_op = Scaffold.get_or_default(
'local_init_op', ops.GraphKeys.LOCAL_INIT_OP,
Scaffold.default_local_init_op)
if self._summary_op is None:
self._summary_op = Scaffold.get_or_default('summary_op',
ops.GraphKeys.SUMMARY_OP,
summary.merge_all)
# pylint: disable=g-long-lambda
if self._saver is None:
self._saver = training_saver._get_saver_or_default() # pylint: disable=protected-access
# pylint: enable=g-long-lambda
self._saver.build()
ops.get_default_graph().finalize()
logging.info('Graph was finalized.')
return self
@property
def init_fn(self):
return self._init_fn
@property
def init_op(self):
return self._init_op
@property
def ready_op(self):
return self._ready_op
@property
def ready_for_local_init_op(self):
return self._ready_for_local_init_op
@property
def local_init_op(self):
return self._local_init_op
@property
def summary_op(self):
return self._summary_op
@property
def saver(self):
return self._saver
@property
def init_feed_dict(self):
return self._init_feed_dict
@staticmethod
def get_or_default(arg_name, collection_key, default_constructor):
"""Get from cache or create a default operation."""
elements = ops.get_collection(collection_key)
if elements:
if len(elements) > 1:
raise RuntimeError('More than one item in the collection "%s". '
'Please indicate which one to use by passing it to '
'the tf.Scaffold constructor as: '
'tf.Scaffold(%s=item to use)', collection_key,
arg_name)
return elements[0]
op = default_constructor()
if op is not None:
ops.add_to_collection(collection_key, op)
return op
@staticmethod
def default_local_init_op():
"""Returns an op that groups the default local init ops.
This op is used during session initialization when a Scaffold is
initialized without specifying the local_init_op arg. It includes
`tf.local_variables_initializer`, `tf.tables_initializer`, and also
initializes local session resources.
Returns:
The default Scaffold local init op.
"""
return control_flow_ops.group(
variables.local_variables_initializer(),
lookup_ops.tables_initializer(),
resources.initialize_resources(resources.local_resources()))
def _create_monitored_session_with_worker_context(worker_context, # pylint: disable=missing-docstring
scaffold,
checkpoint_dir=None,
hooks=None,
chief_only_hooks=None,
save_checkpoint_secs=None,
save_summaries_steps=None,
save_summaries_secs=None,
config=None,
stop_grace_period_secs=120,
log_step_count_steps=100,
max_wait_secs=7200,
save_checkpoint_steps=None,
summary_dir=None):
all_hooks = []
if hooks:
all_hooks.extend(hooks)
if chief_only_hooks and worker_context.is_chief:
all_hooks.extend(chief_only_hooks)
summary_dir = summary_dir or checkpoint_dir
if summary_dir and worker_context.should_save_summary:
if log_step_count_steps and log_step_count_steps > 0:
all_hooks.append(
basic_session_run_hooks.StepCounterHook(
output_dir=summary_dir, every_n_steps=log_step_count_steps))
if (save_summaries_steps and save_summaries_steps > 0) or (
save_summaries_secs and save_summaries_secs > 0):
all_hooks.append(
basic_session_run_hooks.SummarySaverHook(
scaffold=scaffold,
save_steps=save_summaries_steps,
save_secs=save_summaries_secs,
output_dir=summary_dir))
if checkpoint_dir and worker_context.should_checkpoint:
if (save_checkpoint_secs and save_checkpoint_secs > 0) or (
save_checkpoint_steps and save_checkpoint_steps > 0):
all_hooks.append(
basic_session_run_hooks.CheckpointSaverHook(
checkpoint_dir,
save_steps=save_checkpoint_steps,
save_secs=save_checkpoint_secs,
scaffold=scaffold))
session_creator = worker_context.session_creator(
scaffold,
config=config,
checkpoint_dir=checkpoint_dir,
max_wait_secs=max_wait_secs)
return MonitoredSession(
session_creator=session_creator,
hooks=all_hooks,
stop_grace_period_secs=stop_grace_period_secs)
@tf_export('train.MonitoredTrainingSession')
def MonitoredTrainingSession(master='', # pylint: disable=invalid-name
is_chief=True,
checkpoint_dir=None,
scaffold=None,
hooks=None,
chief_only_hooks=None,
save_checkpoint_secs=USE_DEFAULT,
save_summaries_steps=USE_DEFAULT,
save_summaries_secs=USE_DEFAULT,
config=None,
stop_grace_period_secs=120,
log_step_count_steps=100,
max_wait_secs=7200,
save_checkpoint_steps=USE_DEFAULT,
summary_dir=None):
"""Creates a `MonitoredSession` for training.
For a chief, this utility sets proper session initializer/restorer. It also
creates hooks related to checkpoint and summary saving. For workers, this
utility sets proper session creator which waits for the chief to
initialize/restore. Please check `tf.train.MonitoredSession` for more
information.
Args:
master: `String` the TensorFlow master to use.
is_chief: If `True`, it will take care of initialization and recovery the
underlying TensorFlow session. If `False`, it will wait on a chief to
initialize or recover the TensorFlow session.
checkpoint_dir: A string. Optional path to a directory where to restore
variables.
scaffold: A `Scaffold` used for gathering or building supportive ops. If
not specified, a default one is created. It's used to finalize the graph.
hooks: Optional list of `SessionRunHook` objects.
chief_only_hooks: list of `SessionRunHook` objects. Activate these hooks if
`is_chief==True`, ignore otherwise.
save_checkpoint_secs: The frequency, in seconds, that a checkpoint is saved
using a default checkpoint saver. If both `save_checkpoint_steps` and
`save_checkpoint_secs` are set to `None`, then the default checkpoint
saver isn't used. If both are provided, then only `save_checkpoint_secs`
is used. Default 600.
save_summaries_steps: The frequency, in number of global steps, that the
summaries are written to disk using a default summary saver. If both
`save_summaries_steps` and `save_summaries_secs` are set to `None`, then
the default summary saver isn't used. Default 100.
save_summaries_secs: The frequency, in secs, that the summaries are written
to disk using a default summary saver. If both `save_summaries_steps` and
`save_summaries_secs` are set to `None`, then the default summary saver
isn't used. Default not enabled.
config: an instance of `tf.ConfigProto` proto used to configure the session.
It's the `config` argument of constructor of `tf.Session`.
stop_grace_period_secs: Number of seconds given to threads to stop after
`close()` has been called.
log_step_count_steps: The frequency, in number of global steps, that the
global step/sec is logged.
max_wait_secs: Maximum time workers should wait for the session to
become available. This should be kept relatively short to help detect
incorrect code, but sometimes may need to be increased if the chief takes
a while to start up.
save_checkpoint_steps: The frequency, in number of global steps, that a
checkpoint is saved using a default checkpoint saver. If both
`save_checkpoint_steps` and `save_checkpoint_secs` are set to `None`, then
the default checkpoint saver isn't used. If both are provided, then only
`save_checkpoint_secs` is used. Default not enabled.
summary_dir: A string. Optional path to a directory where to
save summaries. If None, checkpoint_dir is used instead.
Returns:
A `MonitoredSession` object.
"""
if save_summaries_steps == USE_DEFAULT and save_summaries_secs == USE_DEFAULT:
save_summaries_steps = 100
save_summaries_secs = None
elif save_summaries_secs == USE_DEFAULT:
save_summaries_secs = None
elif save_summaries_steps == USE_DEFAULT:
save_summaries_steps = None
if (save_checkpoint_steps == USE_DEFAULT and
save_checkpoint_secs == USE_DEFAULT):
save_checkpoint_steps = None
save_checkpoint_secs = 600
elif save_checkpoint_secs == USE_DEFAULT:
save_checkpoint_secs = None
elif save_checkpoint_steps == USE_DEFAULT:
save_checkpoint_steps = None
scaffold = scaffold or Scaffold()
worker_context = distribute_coordinator_context.get_current_worker_context()
if worker_context:
return _create_monitored_session_with_worker_context(
worker_context,
scaffold,
checkpoint_dir=checkpoint_dir,
hooks=hooks,
chief_only_hooks=chief_only_hooks,
save_checkpoint_secs=save_checkpoint_secs,
save_summaries_steps=save_summaries_steps,
save_summaries_secs=save_summaries_secs,
config=config,
stop_grace_period_secs=stop_grace_period_secs,
log_step_count_steps=log_step_count_steps,
max_wait_secs=max_wait_secs,
save_checkpoint_steps=save_checkpoint_steps,
summary_dir=summary_dir)
if not is_chief:
session_creator = WorkerSessionCreator(
scaffold=scaffold,
master=master,
config=config,
max_wait_secs=max_wait_secs)
return MonitoredSession(
session_creator=session_creator,
hooks=hooks or [],
stop_grace_period_secs=stop_grace_period_secs)
all_hooks = []
if chief_only_hooks:
all_hooks.extend(chief_only_hooks)
session_creator = ChiefSessionCreator(
scaffold=scaffold,
checkpoint_dir=checkpoint_dir,
master=master,
config=config)
summary_dir = summary_dir or checkpoint_dir
if summary_dir:
if log_step_count_steps and log_step_count_steps > 0:
all_hooks.append(
basic_session_run_hooks.StepCounterHook(
output_dir=summary_dir, every_n_steps=log_step_count_steps))
if (save_summaries_steps and save_summaries_steps > 0) or (
save_summaries_secs and save_summaries_secs > 0):
all_hooks.append(
basic_session_run_hooks.SummarySaverHook(
scaffold=scaffold,
save_steps=save_summaries_steps,
save_secs=save_summaries_secs,
output_dir=summary_dir))
if checkpoint_dir:
if (save_checkpoint_secs and save_checkpoint_secs > 0) or (
save_checkpoint_steps and save_checkpoint_steps > 0):
all_hooks.append(
basic_session_run_hooks.CheckpointSaverHook(
checkpoint_dir,
save_steps=save_checkpoint_steps,
save_secs=save_checkpoint_secs,
scaffold=scaffold))
if hooks:
all_hooks.extend(hooks)
return MonitoredSession(
session_creator=session_creator,
hooks=all_hooks,
stop_grace_period_secs=stop_grace_period_secs)
@tf_export('train.SessionCreator')
class SessionCreator(object):
"""A factory for tf.Session."""
@abc.abstractmethod
def create_session(self):
raise NotImplementedError(
'create_session is not implemented for {}.'.format(self))
@tf_export('train.ChiefSessionCreator')
class ChiefSessionCreator(SessionCreator):
"""Creates a tf.Session for a chief."""
def __init__(self,
scaffold=None,
master='',
config=None,
checkpoint_dir=None,
checkpoint_filename_with_path=None):
"""Initializes a chief session creator.
Args:
scaffold: A `Scaffold` used for gathering or building supportive ops. If
not specified a default one is created. It's used to finalize the graph.
master: `String` representation of the TensorFlow master to use.
config: `ConfigProto` proto used to configure the session.
checkpoint_dir: A string. Optional path to a directory where to restore
variables.
checkpoint_filename_with_path: Full file name path to the checkpoint file.
"""
self._checkpoint_dir = checkpoint_dir
self._checkpoint_filename_with_path = checkpoint_filename_with_path
self._scaffold = scaffold or Scaffold()
self._session_manager = None
self._master = master
self._config = config
def _get_session_manager(self):
if self._session_manager:
return self._session_manager
self._session_manager = sm.SessionManager(
local_init_op=self._scaffold.local_init_op,
ready_op=self._scaffold.ready_op,
ready_for_local_init_op=self._scaffold.ready_for_local_init_op,
graph=ops.get_default_graph())
return self._session_manager
def create_session(self):
self._scaffold.finalize()
return self._get_session_manager().prepare_session(
self._master,
saver=self._scaffold.saver,
checkpoint_dir=self._checkpoint_dir,
checkpoint_filename_with_path=self._checkpoint_filename_with_path,
config=self._config,
init_op=self._scaffold.init_op,
init_feed_dict=self._scaffold.init_feed_dict,
init_fn=self._scaffold.init_fn)
@tf_export('train.WorkerSessionCreator')
class WorkerSessionCreator(SessionCreator):
"""Creates a tf.Session for a worker."""
def __init__(self,
scaffold=None,
master='',
config=None,
max_wait_secs=30 * 60):
"""Initializes a worker session creator.
Args:
scaffold: A `Scaffold` used for gathering or building supportive ops. If
not specified a default one is created. It's used to finalize the graph.
master: `String` representation of the TensorFlow master to use.
config: `ConfigProto` proto used to configure the session.
max_wait_secs: Maximum time to wait for the session to become available.
"""
self._scaffold = scaffold or Scaffold()
self._session_manager = None
self._master = master
self._config = config
self._max_wait_secs = max_wait_secs
def _get_session_manager(self):
if self._session_manager:
return self._session_manager
self._session_manager = sm.SessionManager(
local_init_op=self._scaffold.local_init_op,
ready_op=self._scaffold.ready_op,
ready_for_local_init_op=self._scaffold.ready_for_local_init_op,
graph=ops.get_default_graph())
return self._session_manager
def create_session(self):
self._scaffold.finalize()
return self._get_session_manager().wait_for_session(
self._master, config=self._config,
max_wait_secs=self._max_wait_secs
)
class _MonitoredSession(object):
"""See `MonitoredSession` or `SingularMonitoredSession`."""
def __init__(self, session_creator, hooks, should_recover,
stop_grace_period_secs=120):
"""Sets up a Monitored or Hooked Session.
Args:
session_creator: A factory object to create session. Typically a
`ChiefSessionCreator` or a `WorkerSessionCreator`.
hooks: An iterable of `SessionRunHook' objects.
should_recover: A bool. Indicates whether to recover from `AbortedError`
and `UnavailableError` or not.
stop_grace_period_secs: Number of seconds given to threads to stop after
`close()` has been called.
"""
self._graph_was_finalized = ops.get_default_graph().finalized
self._hooks = hooks or []
for h in self._hooks:
h.begin()
worker_context = distribute_coordinator_context.get_current_worker_context()
if not session_creator and worker_context:
session_creator = worker_context.session_creator()
# Create the session.
self._coordinated_creator = self._CoordinatedSessionCreator(
session_creator=session_creator or ChiefSessionCreator(),
hooks=self._hooks,
stop_grace_period_secs=stop_grace_period_secs)
if should_recover:
self._sess = _RecoverableSession(self._coordinated_creator)
else:
self._sess = self._coordinated_creator.create_session()
@property
def graph(self):
"""The graph that was launched in this session."""
if self._tf_sess() is None:
return None
return self._tf_sess().graph
def run(self, fetches, feed_dict=None, options=None, run_metadata=None):
"""Run ops in the monitored session.
This method is completely compatible with the `tf.Session.run()` method.
Args:
fetches: Same as `tf.Session.run()`.
feed_dict: Same as `tf.Session.run()`.
options: Same as `tf.Session.run()`.
run_metadata: Same as `tf.Session.run()`.
Returns:
Same as `tf.Session.run()`.
"""
return self._sess.run(fetches,
feed_dict=feed_dict,
options=options,
run_metadata=run_metadata)
def run_step_fn(self, step_fn):
"""Run ops using a step function.
Args:
step_fn: A function or a method with a single argument of type
`StepContext`. The function may use methods of the argument to
perform computations with access to a raw session.
The returned value of the `step_fn` will be returned from `run_step_fn`,
unless a stop is requested. In that case, the next `should_stop` call
will return True.
Example usage:
```python
with tf.Graph().as_default():
c = tf.placeholder(dtypes.float32)
v = tf.add(c, 4.0)
w = tf.add(c, 0.5)
def step_fn(step_context):
a = step_context.session.run(fetches=v, feed_dict={c: 0.5})
if a <= 4.5:
step_context.request_stop()
return step_context.run_with_hooks(fetches=w, feed_dict={c: 0.1})
with tf.MonitoredSession() as session:
while not session.should_stop():
a = session.run_step_fn(step_fn)
```
Hooks interact with the `run_with_hooks()` call inside the `step_fn`
as they do with a `MonitoredSession.run` call.
Returns:
Returns the returned value of `step_fn`.
Raises:
StopIteration: if `step_fn` has called `request_stop()`. It may be
caught by `with tf.MonitoredSession()` to close the session.
ValueError: if `step_fn` doesn't have a single argument called
`step_context`. It may also optionally have `self` for cases when it
belongs to an object.
"""
step_fn_arguments = function_utils.fn_args(step_fn)
if step_fn_arguments != ('step_context',) and step_fn_arguments != (
'self',
'step_context',
):
raise ValueError(
'`step_fn` may either have one `step_context` argument, or'
' `self` and `step_context` arguments if it\'s an instance'
' method. Got {} instead.'.format(step_fn_arguments))
# `self._sess` is either `_RecoverableSession` or a `_CoordinatedSession`.
# Setting `run_with_hooks` to `None` will cause `run_with_hooks` to be
# `_CoordinatedSession.run` downstream in either case. This allows
# `_PREEMPTION_ERRORS` to propage from within `step_fn` to
# `_RecoverableSession.run_step_fn`.
return self._sess.run_step_fn(step_fn, self._tf_sess(), run_with_hooks=None)
class StepContext(object):
"""Control flow instrument for the `step_fn` from `run_step_fn()`.
Users of `step_fn` may perform `run()` calls without running hooks
by accessing the `session`. A `run()` call with hooks may be performed
using `run_with_hooks()`. Computation flow can be interrupted using
`request_stop()`.
"""
def __init__(self, session, run_with_hooks_fn):
"""Initializes the `step_context` argument for a `step_fn` invocation.
Args:
session: An instance of `tf.Session`.
run_with_hooks_fn: A function for running fetches and hooks.
"""
self._session = session
self._run_with_hooks_fn = run_with_hooks_fn
@property
def session(self):
return self._session
def run_with_hooks(self, *args, **kwargs):
"""Same as `MonitoredSession.run`. Accepts the same arguments."""
return self._run_with_hooks_fn(*args, **kwargs)
def request_stop(self):
"""Exit the training loop by causing `should_stop()` to return `True`.
Causes `step_fn` to exit by raising an exception.
Raises:
StopIteration
"""
raise StopIteration('step_fn has requested the iterations to stop.')
def should_stop(self):
return self._sess is None or self._sess.should_stop()
def close(self):
self._close_internal()
def __enter__(self):
return self
def __exit__(self, exception_type, exception_value, traceback):
if exception_type in [errors.OutOfRangeError, StopIteration]:
exception_type = None
self._close_internal(exception_type)
# __exit__ should return True to suppress an exception.
return exception_type is None
class _CoordinatedSessionCreator(SessionCreator):
"""Factory for the _RecoverableSession."""
def __init__(self, session_creator, hooks, stop_grace_period_secs):
self._session_creator = session_creator
self._hooks = hooks
self.coord = None
self.tf_sess = None
self._stop_grace_period_secs = stop_grace_period_secs
def create_session(self):
"""Creates a coordinated session."""
# Keep the tf_sess for unit testing.
self.tf_sess = self._session_creator.create_session()
# We don't want coordinator to suppress any exception.
self.coord = coordinator.Coordinator(clean_stop_exception_types=[])
if ops.get_collection(ops.GraphKeys.QUEUE_RUNNERS):
queue_runner.start_queue_runners(sess=self.tf_sess, coord=self.coord)
# Inform the hooks that a new session has been created.
for hook in self._hooks:
hook.after_create_session(self.tf_sess, self.coord)
return _CoordinatedSession(
_HookedSession(self.tf_sess, self._hooks), self.coord,
self._stop_grace_period_secs)
def _close_internal(self, exception_type=None):
try:
if not exception_type:
for h in self._hooks:
h.end(self._coordinated_creator.tf_sess)
finally:
try:
if self._sess is None:
raise RuntimeError('Session is already closed.')
self._sess.close()
finally:
self._sess = None
self._coordinated_creator.tf_sess = None
self._coordinated_creator.coord = None
if not self._graph_was_finalized:
ops.get_default_graph()._unsafe_unfinalize() # pylint: disable=protected-access
def _is_closed(self):
"""Return True if the monitored session is closed. For tests only.
Returns:
A boolean.
"""
return self._coordinated_creator.tf_sess is None
def _tf_sess(self):
return self._coordinated_creator.tf_sess
@tf_export('train.MonitoredSession')
class MonitoredSession(_MonitoredSession):
"""Session-like object that handles initialization, recovery and hooks.
Example usage:
```python
saver_hook = CheckpointSaverHook(...)
summary_hook = SummarySaverHook(...)
with MonitoredSession(session_creator=ChiefSessionCreator(...),
hooks=[saver_hook, summary_hook]) as sess:
while not sess.should_stop():
sess.run(train_op)
```
Initialization: At creation time the monitored session does following things
in given order:
* calls `hook.begin()` for each given hook
* finalizes the graph via `scaffold.finalize()`
* create session
* initializes the model via initialization ops provided by `Scaffold`
* restores variables if a checkpoint exists
* launches queue runners
* calls `hook.after_create_session()`
Run: When `run()` is called, the monitored session does following things:
* calls `hook.before_run()`
* calls TensorFlow `session.run()` with merged fetches and feed_dict
* calls `hook.after_run()`
* returns result of `session.run()` asked by user
* if `AbortedError` or `UnavailableError` occurs, it recovers or
reinitializes the session before executing the run() call again
Exit: At the `close()`, the monitored session does following things in order:
* calls `hook.end()`
* closes the queue runners and the session
* suppresses `OutOfRange` error which indicates that all inputs have been
processed if the monitored_session is used as a context
How to set `tf.Session` arguments:
* In most cases you can set session arguments as follows:
```python
MonitoredSession(
session_creator=ChiefSessionCreator(master=..., config=...))
```
* In distributed setting for a non-chief worker, you can use following:
```python
MonitoredSession(
session_creator=WorkerSessionCreator(master=..., config=...))
```
See `MonitoredTrainingSession` for an example usage based on chief or worker.
Note: This is not a `tf.Session`. For example, it cannot do following:
* it cannot be set as default session.
* it cannot be sent to saver.save.
* it cannot be sent to tf.train.start_queue_runners.
Args:
session_creator: A factory object to create session. Typically a
`ChiefSessionCreator` which is the default one.
hooks: An iterable of `SessionRunHook' objects.
Returns:
A MonitoredSession object.
"""
def __init__(self, session_creator=None, hooks=None,
stop_grace_period_secs=120):
super(MonitoredSession, self).__init__(
session_creator, hooks, should_recover=True,
stop_grace_period_secs=stop_grace_period_secs)
@tf_export('train.SingularMonitoredSession')
class SingularMonitoredSession(_MonitoredSession):
"""Session-like object that handles initialization, restoring, and hooks.
Please note that this utility is not recommended for distributed settings.
For distributed settings, please use `tf.train.MonitoredSession`. The
differences between `MonitoredSession` and `SingularMonitoredSession` are:
* `MonitoredSession` handles `AbortedError` and `UnavailableError` for
distributed settings, but `SingularMonitoredSession` does not.
* `MonitoredSession` can be created in `chief` or `worker` modes.
`SingularMonitoredSession` is always created as `chief`.
* You can access the raw `tf.Session` object used by
`SingularMonitoredSession`, whereas in MonitoredSession the raw session is
private. This can be used:
- To `run` without hooks.
- To save and restore.
* All other functionality is identical.
Example usage:
```python
saver_hook = CheckpointSaverHook(...)
summary_hook = SummarySaverHook(...)
with SingularMonitoredSession(hooks=[saver_hook, summary_hook]) as sess:
while not sess.should_stop():
sess.run(train_op)
```
Initialization: At creation time the hooked session does following things
in given order:
* calls `hook.begin()` for each given hook
* finalizes the graph via `scaffold.finalize()`
* create session
* initializes the model via initialization ops provided by `Scaffold`
* restores variables if a checkpoint exists
* launches queue runners
Run: When `run()` is called, the hooked session does following things:
* calls `hook.before_run()`
* calls TensorFlow `session.run()` with merged fetches and feed_dict
* calls `hook.after_run()`
* returns result of `session.run()` asked by user
Exit: At the `close()`, the hooked session does following things in order:
* calls `hook.end()`
* closes the queue runners and the session
* suppresses `OutOfRange` error which indicates that all inputs have been
processed if the `SingularMonitoredSession` is used as a context.
"""
def __init__(self,
hooks=None,
scaffold=None,
master='',
config=None,
checkpoint_dir=None,
stop_grace_period_secs=120,
checkpoint_filename_with_path=None):
"""Creates a SingularMonitoredSession.
Args:
hooks: An iterable of `SessionRunHook' objects.
scaffold: A `Scaffold` used for gathering or building supportive ops. If
not specified a default one is created. It's used to finalize the graph.
master: `String` representation of the TensorFlow master to use.
config: `ConfigProto` proto used to configure the session.
checkpoint_dir: A string. Optional path to a directory where to restore
variables.
stop_grace_period_secs: Number of seconds given to threads to stop after
`close()` has been called.
checkpoint_filename_with_path: A string. Optional path to a checkpoint
file from which to restore variables.
"""
session_creator = ChiefSessionCreator(
scaffold=scaffold,
master=master,
config=config,
checkpoint_dir=checkpoint_dir,
checkpoint_filename_with_path=checkpoint_filename_with_path)
super(SingularMonitoredSession, self).__init__(
session_creator, hooks, should_recover=False,
stop_grace_period_secs=stop_grace_period_secs)
def raw_session(self):
"""Returns underlying `TensorFlow.Session` object."""
return self._tf_sess()
class _WrappedSession(object):
"""Wrapper around a `tf.Session`.
This wrapper is used as a base class for various session wrappers
that provide additional functionality such as monitoring, coordination,
and recovery.
In addition to the methods exported by `SessionInterface` the wrapper
provides a method to check for stop and never raises exceptions from
calls to `close()`.
"""
def __init__(self, sess):
"""Creates a `_WrappedSession`.
Args:
sess: A `tf.Session` or `_WrappedSession` object. The wrapped session.
"""
self._sess = sess
self._wrapped_is_stoppable = isinstance(self._sess, _WrappedSession)
@property
def graph(self):
return self._sess.graph
@property
def sess_str(self):
return self._sess.sess_str
def should_stop(self):
"""Return true if this session should not be used anymore.
Always return True if the session was closed.
Returns:
True if the session should stop, False otherwise.
"""
if self._check_stop():
return True
if self._sess:
return self._wrapped_is_stoppable and self._sess.should_stop()
return True
def _check_stop(self):
"""Hook for subclasses to provide their own stop condition.
Returns:
True if the session should stop, False otherwise.
"""
return False
def close(self):
if self._sess:
try:
self._sess.close()
except _PREEMPTION_ERRORS:
pass
finally:
self._sess = None
def run(self, *args, **kwargs):
return self._sess.run(*args, **kwargs)
def run_step_fn(self, step_fn, raw_session, run_with_hooks):
# `_RecoverableSession` sets `run_with_hooks` to `_CoordinatedSession.run`.
# It is `None` when called from `_CoordinatedSession`. In that case
# `self.run` is `_CoordinatedSession.run`.
run_with_hooks = run_with_hooks or self.run
return step_fn(_MonitoredSession.StepContext(raw_session, run_with_hooks))
class _RecoverableSession(_WrappedSession):
"""A wrapped session that recreates a session upon certain kinds of errors.
The constructor is passed a SessionCreator object, not a session.
Calls to `run()` are delegated to the wrapped session. If a call raises the
exception `tf.errors.AbortedError` or `tf.errors.UnavailableError`, the
wrapped session is closed, and a new one is created by calling the factory
again.
"""
def __init__(self, sess_creator):
"""Create a new `_RecoverableSession`.
The value returned by calling `sess_creator.create_session()` will be the
session wrapped by this recoverable session.
Args:
sess_creator: A 'SessionCreator' to be wrapped by recoverable.
"""
self._sess_creator = sess_creator
_WrappedSession.__init__(self, self._create_session())
def _create_session(self):
while True:
try:
return self._sess_creator.create_session()
except _PREEMPTION_ERRORS as e:
logging.info('An error was raised while a session was being created. '
'This may be due to a preemption of a connected worker '
'or parameter server. A new session will be created. '
'Error: %s', e)
def _check_stop(self):
try:
if self._sess:
return self._sess._check_stop() # pylint: disable=protected-access
else:
return True
except _PREEMPTION_ERRORS as e:
logging.info('An error was raised while considering whether the '
'session is complete. This may be due to a preemption in '
'a connected worker or parameter server. The current '
'session will be closed and a new session will be '
'created. Error: %s', e)
self.close()
self._sess = self._create_session()
# Since we have just recreated the session, the overall computation should
# not stop:
return False
except Exception: # pylint: disable=broad-except
# `should_stop` should return True instead of raising an exception.
return True
def run(self, fetches, feed_dict=None, options=None, run_metadata=None):
while True:
try:
if not self._sess:
self._sess = self._create_session()
return self._sess.run(fetches,
feed_dict=feed_dict,
options=options,
run_metadata=run_metadata)
except _PREEMPTION_ERRORS as e:
logging.info('An error was raised. This may be due to a preemption in '
'a connected worker or parameter server. The current '
'session will be closed and a new session will be '
'created. Error: %s', e)
self.close()
self._sess = None
def run_step_fn(self, step_fn, raw_session, run_with_hooks):
while True:
try:
if not self._sess:
self._sess = self._create_session()
run_with_hooks = self._sess.run
return self._sess.run_step_fn(step_fn, raw_session, run_with_hooks)
except _PREEMPTION_ERRORS as e:
logging.info('An error was raised. This may be due to a preemption in '
'a connected worker or parameter server. The current '
'session will be closed and a new session will be '
'created. Error: %s', e)
self.close()
self._sess = None
class _CoordinatedSession(_WrappedSession):
"""A wrapped session that works with a `tf.Coordinator`.
Calls to `run()` are delegated to the wrapped session. If a call
raises an exception, the exception is reported to the coordinator.
In addition, after each call to `run()` this session ask the coordinator if
the session should stop. In that case it will will join all the threads
registered with the coordinator before returning.
If the coordinator was requested to stop with an exception, that exception
will be re-raised from the call to `run()`.
"""
def __init__(self, sess, coord, stop_grace_period_secs=120):
"""Create a new `_CoordinatedSession`.
Args:
sess: A `tf.Session` object. The wrapped session.
coord: A `tf.train.Coordinator` object.
stop_grace_period_secs: Number of seconds given to threads to stop after
`close()` has been called.
"""
_WrappedSession.__init__(self, sess)
self._coord = coord
self._stop_grace_period_secs = stop_grace_period_secs
def _check_stop(self):
# If the coordinator was asked to stop due to an exception, then it needs
# to be propagated to this stack.
self._coord.raise_requested_exception()
# At this point, no exceptions are recorded in the coordinator.
return self._coord.should_stop()
def close(self):
self._coord.request_stop()
try:
self._coord.join(
stop_grace_period_secs=self._stop_grace_period_secs,
ignore_live_threads=True)
finally:
try:
_WrappedSession.close(self)
except Exception: # pylint: disable=broad-except
# We intentionally suppress exceptions from the close() here since
# useful exceptions are already reported by join().
pass
def run(self, *args, **kwargs):
try:
return self._sess.run(*args, **kwargs)
except _PREEMPTION_ERRORS:
raise
except Exception: # pylint: disable=broad-except
# A non-preemption error could have been caused by a preemption error
# in the coordinator. If this is the case, raise that exception instead,
# since it's the root cause. Otherwise, stick to the `original_exc_info`.
original_exc_info = sys.exc_info()
try:
self._coord.raise_requested_exception()
except _PREEMPTION_ERRORS:
raise
except Exception: # pylint: disable=broad-except
raise six.reraise(*original_exc_info)
else:
raise six.reraise(*original_exc_info)
class _HookedSession(_WrappedSession):
"""A _WrappedSession that calls hooks during calls to run().
The list of hooks to call is passed in the constructor. Before each call
to `run()` the session calls the `before_run()` method of the hooks, which
can return additional ops or tensors to run. These are added to the arguments
of the call to `run()`.
When the `run()` call finishes, the session calls the `after_run()` methods of
the hooks, passing the values returned by the `run()` call corresponding to
the ops and tensors that each hook requested.
If any call to the hooks, requests stop via run_context the session will be
marked as needing to stop and its `should_stop()` method will now return
`True`.
"""
def __init__(self, sess, hooks):
"""Initializes a _HookedSession object.
Args:
sess: A `tf.Session` or a `_WrappedSession` object.
hooks: An iterable of `SessionRunHook' objects.
"""
_WrappedSession.__init__(self, sess)
self._hooks = hooks
self._should_stop = False
def _check_stop(self):
"""See base class."""
return self._should_stop
def run(self, fetches, feed_dict=None, options=None, run_metadata=None):
"""See base class."""
if self.should_stop():
raise RuntimeError('Run called even after should_stop requested.')
actual_fetches = {'caller': fetches}
run_context = session_run_hook.SessionRunContext(
original_args=session_run_hook.SessionRunArgs(fetches, feed_dict),
session=self._sess)
options = options or config_pb2.RunOptions()
feed_dict = self._call_hook_before_run(run_context, actual_fetches,
feed_dict, options)
# Do session run.
run_metadata = run_metadata or config_pb2.RunMetadata()
outputs = _WrappedSession.run(self,
fetches=actual_fetches,
feed_dict=feed_dict,
options=options,
run_metadata=run_metadata)
for hook in self._hooks:
hook.after_run(
run_context,
session_run_hook.SessionRunValues(
results=outputs[hook] if hook in outputs else None,
options=options,
run_metadata=run_metadata))
self._should_stop = self._should_stop or run_context.stop_requested
return outputs['caller']
def _call_hook_before_run(self, run_context, fetch_dict, user_feed_dict,
options):
"""Calls hooks.before_run and handles requests from hooks."""
hook_feeds = {}
for hook in self._hooks:
request = hook.before_run(run_context)
if request is not None:
if request.fetches is not None:
fetch_dict[hook] = request.fetches
if request.feed_dict:
self._raise_if_feeds_intersects(
hook_feeds, request.feed_dict,
'Same tensor is fed by two hooks.')
hook_feeds.update(request.feed_dict)
if request.options:
self._merge_run_options(options, request.options)
if not hook_feeds:
return user_feed_dict
if not user_feed_dict:
return hook_feeds
self._raise_if_feeds_intersects(
user_feed_dict, hook_feeds,
'Same tensor is fed by a SessionRunHook and user.')
hook_feeds.update(user_feed_dict)
return hook_feeds
def _raise_if_feeds_intersects(self, feeds1, feeds2, message):
intersection = set(feeds1.keys()) & set(feeds2.keys())
if intersection:
raise RuntimeError(message + ' Conflict(s): ' + str(list(intersection)))
def _merge_run_options(self, options, incoming_options):
"""Merge two instances of RunOptions into the first one.
During the merger, the numerical fields including trace_level,
timeout_in_ms, inter_op_thread_pool are set to the larger one of the two.
The boolean value is set to the logical OR of the two.
debug_tensor_watch_opts of the original options is extended with that from
the incoming one.
Args:
options: The options to merge into.
incoming_options: The options to be merged into the first argument.
"""
options.trace_level = max(options.trace_level, incoming_options.trace_level)
options.timeout_in_ms = max(options.timeout_in_ms,
incoming_options.timeout_in_ms)
options.inter_op_thread_pool = max(options.inter_op_thread_pool,
incoming_options.inter_op_thread_pool)
options.output_partition_graphs = max(
options.output_partition_graphs,
incoming_options.output_partition_graphs)
options.debug_options.debug_tensor_watch_opts.extend(
incoming_options.debug_options.debug_tensor_watch_opts)
options.debug_options.reset_disk_byte_usage = (
options.debug_options.reset_disk_byte_usage or
incoming_options.debug_options.reset_disk_byte_usage)
| 38.518978 | 103 | 0.686741 |
6bf8b50ed4e88dc3f606d6ee49f836358ed07537 | 1,537 | py | Python | src/tests/test_view.py | aminul91/linktutor_restapi | 2077119f9771b31e2aa49f914ebd76451fe3b53c | [
"MIT"
] | null | null | null | src/tests/test_view.py | aminul91/linktutor_restapi | 2077119f9771b31e2aa49f914ebd76451fe3b53c | [
"MIT"
] | null | null | null | src/tests/test_view.py | aminul91/linktutor_restapi | 2077119f9771b31e2aa49f914ebd76451fe3b53c | [
"MIT"
] | null | null | null | from django.test import TestCase, Client
from django.urls import reverse
from app.models import *
from app.views import *
from rest_framework import status
class TestViews(TestCase):
def setUp(self):
self.client = Client()
self.tutorials_url = reverse('tutorials')
self.tutorials_insert = reverse('tutorial_insert')
def test_link_list_POST(self):
tutorial1 = tutorial_types.objects.create(
type_name = "Movie_hit",
type_value = 15,
)
language1 = language_types.objects.create(
language_name = "eng",
language_value = 31,
)
response = self.client.post('/tutorial_insert/', {'links_name': 'Bongo Academy',
'links_path': 'bvhfggf',
'categ_name': 'Movie',
'type_value': tutorial1.type_value,
'language_value': language1.language_value,
'language_type': 'english'})
print(response.status_code)
self.assertEqual(response.status_code,status.HTTP_201_CREATED)
print("seven")
def test_project_list_GET(self):
response = self.client.get(self.tutorials_url)
self.assertEqual(response.status_code,200)
print("one")
| 32.702128 | 102 | 0.514639 |
89ab10ccbbed64e5be6982c9b90210183f7bff94 | 839 | py | Python | setup.py | ravil-mobile/gemmforge | 6381584c2d1ce77eaa938de02bc4f130f19cb2e4 | [
"MIT"
] | null | null | null | setup.py | ravil-mobile/gemmforge | 6381584c2d1ce77eaa938de02bc4f130f19cb2e4 | [
"MIT"
] | 2 | 2021-02-01T16:31:22.000Z | 2021-05-05T13:44:43.000Z | setup.py | ravil-mobile/gemmforge | 6381584c2d1ce77eaa938de02bc4f130f19cb2e4 | [
"MIT"
] | null | null | null | import setuptools
with open("gemmforge/VERSION", "r") as version_file:
current_version = version_file.read().strip()
with open("README.md", "r") as fh:
long_description = fh.read()
install_requires = ['numpy']
setuptools.setup(
name="gemmforge",
version=current_version,
license="MIT",
author="Ravil Dorozhinskii",
author_email="ravil.aviva.com@gmail.com",
description="GPU-GEMM generator",
long_description=long_description,
long_description_content_type="text/markdown",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
url="https://github.com/ravil-mobile/gemmforge/wiki",
python_requires='>=3.5',
install_requires=install_requires,
include_package_data=True,
)
| 27.064516 | 55 | 0.725864 |
f2154da2dfd794145bac3af970caae4219bdf09d | 415 | py | Python | Problemset/maximum-subarray/maximum-subarray.py | worldwonderer/algorithm | 083178b2d987de7f6020aceca869a353c0b4b1f3 | [
"MIT"
] | 1 | 2021-01-30T01:52:46.000Z | 2021-01-30T01:52:46.000Z | Problemset/maximum-subarray/maximum-subarray.py | worldwonderer/algorithm | 083178b2d987de7f6020aceca869a353c0b4b1f3 | [
"MIT"
] | 1 | 2021-12-15T14:54:06.000Z | 2021-12-15T14:54:06.000Z | Problemset/maximum-subarray/maximum-subarray.py | worldwonderer/algorithm | 083178b2d987de7f6020aceca869a353c0b4b1f3 | [
"MIT"
] | 2 | 2021-04-19T03:32:18.000Z | 2021-06-22T07:06:01.000Z |
# @Title: 最大子序和 (Maximum Subarray)
# @Author: 18015528893
# @Date: 2021-02-18 20:24:39
# @Runtime: 52 ms
# @Memory: 15.3 MB
class Solution:
def maxSubArray(self, nums: List[int]) -> int:
max_sum = float('-inf')
s = 0
for num in nums:
if s < 0:
s = num
else:
s += num
max_sum = max(max_sum, s)
return max_sum
| 20.75 | 50 | 0.489157 |
9a70fb6db710f49e265a3fa449cd01cec281accb | 10,380 | py | Python | src/transformers/models/hubert/convert_hubert_original_pytorch_checkpoint_to_pytorch.py | bugface/transformers | ba286fe7d51db12ad663effac83bed8199dd7141 | [
"Apache-2.0"
] | 5 | 2020-09-01T09:15:48.000Z | 2020-09-15T03:25:05.000Z | src/transformers/models/hubert/convert_hubert_original_pytorch_checkpoint_to_pytorch.py | bugface/transformers | ba286fe7d51db12ad663effac83bed8199dd7141 | [
"Apache-2.0"
] | 2 | 2022-03-08T04:58:59.000Z | 2022-03-19T03:45:14.000Z | src/transformers/models/hubert/convert_hubert_original_pytorch_checkpoint_to_pytorch.py | bugface/transformers | ba286fe7d51db12ad663effac83bed8199dd7141 | [
"Apache-2.0"
] | 3 | 2020-08-20T04:46:25.000Z | 2020-10-14T08:39:13.000Z | # coding=utf-8
# Copyright 2021 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Convert Hubert checkpoint."""
import argparse
import json
import os
import fairseq
import torch
from fairseq.data import Dictionary
from transformers import (
HubertConfig,
HubertForCTC,
HubertModel,
Wav2Vec2CTCTokenizer,
Wav2Vec2FeatureExtractor,
Wav2Vec2Processor,
logging,
)
logging.set_verbosity_info()
logger = logging.get_logger(__name__)
MAPPING = {
"post_extract_proj": "feature_projection.projection",
"encoder.pos_conv.0": "encoder.pos_conv_embed.conv",
"self_attn.k_proj": "encoder.layers.*.attention.k_proj",
"self_attn.v_proj": "encoder.layers.*.attention.v_proj",
"self_attn.q_proj": "encoder.layers.*.attention.q_proj",
"self_attn.out_proj": "encoder.layers.*.attention.out_proj",
"self_attn_layer_norm": "encoder.layers.*.layer_norm",
"fc1": "encoder.layers.*.feed_forward.intermediate_dense",
"fc2": "encoder.layers.*.feed_forward.output_dense",
"final_layer_norm": "encoder.layers.*.final_layer_norm",
"encoder.layer_norm": "encoder.layer_norm",
"w2v_model.layer_norm": "feature_projection.layer_norm",
"w2v_encoder.proj": "lm_head",
"mask_emb": "masked_spec_embed",
}
def set_recursively(hf_pointer, key, value, full_name, weight_type):
for attribute in key.split("."):
hf_pointer = getattr(hf_pointer, attribute)
if weight_type is not None:
hf_shape = getattr(hf_pointer, weight_type).shape
else:
hf_shape = hf_pointer.shape
assert hf_shape == value.shape, (
f"Shape of hf {key + '.' + weight_type if weight_type is not None else ''} is {hf_shape}, but should be"
f" {value.shape} for {full_name}"
)
if weight_type == "weight":
hf_pointer.weight.data = value
elif weight_type == "weight_g":
hf_pointer.weight_g.data = value
elif weight_type == "weight_v":
hf_pointer.weight_v.data = value
elif weight_type == "bias":
hf_pointer.bias.data = value
else:
hf_pointer.data = value
logger.info(f"{key + '.' + weight_type if weight_type is not None else ''} was initialized from {full_name}.")
def recursively_load_weights(fairseq_model, hf_model, is_finetuned):
unused_weights = []
fairseq_dict = fairseq_model.state_dict()
feature_extractor = hf_model.hubert.feature_extractor if is_finetuned else hf_model.feature_extractor
for name, value in fairseq_dict.items():
is_used = False
if "conv_layers" in name:
load_conv_layer(
name,
value,
feature_extractor,
unused_weights,
hf_model.config.feat_extract_norm == "group",
)
is_used = True
else:
for key, mapped_key in MAPPING.items():
mapped_key = "hubert." + mapped_key if (is_finetuned and mapped_key != "lm_head") else mapped_key
if key in name or (key.split("w2v_model.")[-1] == name.split(".")[0] and not is_finetuned):
is_used = True
if "*" in mapped_key:
layer_index = name.split(key)[0].split(".")[-2]
mapped_key = mapped_key.replace("*", layer_index)
if "weight_g" in name:
weight_type = "weight_g"
elif "weight_v" in name:
weight_type = "weight_v"
elif "weight" in name:
weight_type = "weight"
elif "bias" in name:
weight_type = "bias"
else:
weight_type = None
set_recursively(hf_model, mapped_key, value, name, weight_type)
continue
if not is_used:
unused_weights.append(name)
logger.warning(f"Unused weights: {unused_weights}")
def load_conv_layer(full_name, value, feature_extractor, unused_weights, use_group_norm):
name = full_name.split("conv_layers.")[-1]
items = name.split(".")
layer_id = int(items[0])
type_id = int(items[1])
if type_id == 0:
if "bias" in name:
assert value.shape == feature_extractor.conv_layers[layer_id].conv.bias.data.shape, (
f"{full_name} has size {value.shape}, but"
f" {feature_extractor.conv_layers[layer_id].conv.bias.data.shape} was found."
)
feature_extractor.conv_layers[layer_id].conv.bias.data = value
logger.info(f"Feat extract conv layer {layer_id} was initialized from {full_name}.")
elif "weight" in name:
assert value.shape == feature_extractor.conv_layers[layer_id].conv.weight.data.shape, (
f"{full_name} has size {value.shape}, but"
f" {feature_extractor.conv_layers[layer_id].conv.weight.data.shape} was found."
)
feature_extractor.conv_layers[layer_id].conv.weight.data = value
logger.info(f"Feat extract conv layer {layer_id} was initialized from {full_name}.")
elif (type_id == 2 and not use_group_norm) or (type_id == 2 and layer_id == 0 and use_group_norm):
if "bias" in name:
assert value.shape == feature_extractor.conv_layers[layer_id].layer_norm.bias.data.shape, (
f"{full_name} has size {value.shape}, but {feature_extractor[layer_id].layer_norm.bias.data.shape} was"
" found."
)
feature_extractor.conv_layers[layer_id].layer_norm.bias.data = value
logger.info(f"Feat extract layer norm weight of layer {layer_id} was initialized from {full_name}.")
elif "weight" in name:
assert value.shape == feature_extractor.conv_layers[layer_id].layer_norm.weight.data.shape, (
f"{full_name} has size {value.shape}, but"
f" {feature_extractor[layer_id].layer_norm.weight.data.shape} was found."
)
feature_extractor.conv_layers[layer_id].layer_norm.weight.data = value
logger.info(f"Feat extract layer norm weight of layer {layer_id} was initialized from {full_name}.")
else:
unused_weights.append(full_name)
@torch.no_grad()
def convert_hubert_checkpoint(
checkpoint_path, pytorch_dump_folder_path, config_path=None, dict_path=None, is_finetuned=True
):
"""
Copy/paste/tweak model's weights to transformers design.
"""
if config_path is not None:
config = HubertConfig.from_pretrained(config_path)
else:
config = HubertConfig()
if is_finetuned:
if dict_path:
target_dict = Dictionary.load(dict_path)
# important change bos & pad token id since CTC symbol is <pad> and
# not <s> as in fairseq
config.bos_token_id = target_dict.pad_index
config.pad_token_id = target_dict.bos_index
config.eos_token_id = target_dict.eos_index
config.vocab_size = len(target_dict.symbols)
vocab_path = os.path.join(pytorch_dump_folder_path, "vocab.json")
if not os.path.isdir(pytorch_dump_folder_path):
logger.error("--pytorch_dump_folder_path ({}) should be a directory".format(pytorch_dump_folder_path))
return
os.makedirs(pytorch_dump_folder_path, exist_ok=True)
with open(vocab_path, "w", encoding="utf-8") as vocab_handle:
json.dump(target_dict.indices, vocab_handle)
tokenizer = Wav2Vec2CTCTokenizer(
vocab_path,
unk_token=target_dict.unk_word,
pad_token=target_dict.pad_word,
bos_token=target_dict.bos_word,
eos_token=target_dict.eos_word,
word_delimiter_token="|",
do_lower_case=False,
)
return_attention_mask = True if config.feat_extract_norm == "layer" else False
feature_extractor = Wav2Vec2FeatureExtractor(
feature_size=1,
sampling_rate=16000,
padding_value=0,
do_normalize=True,
return_attention_mask=return_attention_mask,
)
processor = Wav2Vec2Processor(feature_extractor=feature_extractor, tokenizer=tokenizer)
processor.save_pretrained(pytorch_dump_folder_path)
hf_wav2vec = HubertForCTC(config)
else:
hf_wav2vec = HubertModel(config)
if is_finetuned:
model, _, _ = fairseq.checkpoint_utils.load_model_ensemble_and_task(
[checkpoint_path], arg_overrides={"data": "/".join(dict_path.split("/")[:-1])}
)
else:
model, _, _ = fairseq.checkpoint_utils.load_model_ensemble_and_task([checkpoint_path])
model = model[0].eval()
recursively_load_weights(model, hf_wav2vec, is_finetuned)
hf_wav2vec.save_pretrained(pytorch_dump_folder_path)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--pytorch_dump_folder_path", default=None, type=str, help="Path to the output PyTorch model.")
parser.add_argument("--checkpoint_path", default=None, type=str, help="Path to fairseq checkpoint")
parser.add_argument("--dict_path", default=None, type=str, help="Path to dict of fine-tuned model")
parser.add_argument("--config_path", default=None, type=str, help="Path to hf config.json of model to convert")
parser.add_argument(
"--not_finetuned", action="store_true", help="Whether the model to convert is a fine-tuned model or not"
)
args = parser.parse_args()
convert_hubert_checkpoint(
args.checkpoint_path, args.pytorch_dump_folder_path, args.config_path, args.dict_path, not args.not_finetuned
)
| 41.52 | 119 | 0.644894 |
d7dce94107e3e7f8f49c9e3d30df78a9fa5c71f1 | 398 | py | Python | setup.py | gnopuz83/lassonet | d4c765d46d34b97bb87fc54d44970098d05b738b | [
"BSD-3-Clause"
] | null | null | null | setup.py | gnopuz83/lassonet | d4c765d46d34b97bb87fc54d44970098d05b738b | [
"BSD-3-Clause"
] | null | null | null | setup.py | gnopuz83/lassonet | d4c765d46d34b97bb87fc54d44970098d05b738b | [
"BSD-3-Clause"
] | null | null | null | from setuptools import setup, find_packages
setup(
name='lassonetfab',
packages=find_packages(),
url='https://github.com/gnopuz83/lassonet',
description='This is a lassonet implementation',
long_description=open('README.md').read(),
dependency_links = [
"git+git@github.com:gnopuz83/lassonet.git",
],
package = ["lassonetfab"],
include_package_data=True,
) | 28.428571 | 52 | 0.693467 |
6184f299506760aaa770f283797f653b6abb3a89 | 16,670 | py | Python | tests/test_mycroft_skills_manager.py | j1nx/mycroft-skills-manager | 214b4a4ffeba37b95f2f07bde2a85ddb7c0e3c84 | [
"Apache-2.0"
] | null | null | null | tests/test_mycroft_skills_manager.py | j1nx/mycroft-skills-manager | 214b4a4ffeba37b95f2f07bde2a85ddb7c0e3c84 | [
"Apache-2.0"
] | null | null | null | tests/test_mycroft_skills_manager.py | j1nx/mycroft-skills-manager | 214b4a4ffeba37b95f2f07bde2a85ddb7c0e3c84 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2018 Mycroft AI, Inc.
#
# This file is part of Mycroft Skills Manager
# (see https://github.com/MatthewScholefield/mycroft-light).
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import os
from os.path import dirname, join
import tempfile
from pathlib import Path
from shutil import copyfile, rmtree
from unittest import TestCase
from unittest.mock import call, Mock, patch
from msm import MycroftSkillsManager, AlreadyInstalled, AlreadyRemoved
from msm.exceptions import MsmException
from msm.skill_state import device_skill_state_hash
class TestMycroftSkillsManager(TestCase):
def setUp(self):
temp_dir = tempfile.mkdtemp()
self.temp_dir = Path(temp_dir)
self.skills_json_path = self.temp_dir.joinpath('skills.json')
self.skills_dir = self.temp_dir.joinpath('skills')
self._build_fake_skills()
self._mock_skills_json_path()
self._mock_skill_entry()
self._mock_skill_repo()
copyfile(join(dirname(__file__), 'skills_test.json'),
str(self.skills_json_path))
self.msm = MycroftSkillsManager(
platform='default',
skills_dir=str(self.temp_dir.joinpath('skills')),
repo=self.skill_repo_mock,
versioned=True
)
def _build_fake_skills(self):
foo_skill_dir = self.skills_dir.joinpath('skill-foo')
foo_skill_dir.mkdir(parents=True)
foo_skill_dir.joinpath('__init__.py').touch()
bar_skill_dir = self.skills_dir.joinpath('skill-bar')
bar_skill_dir.mkdir(parents=True)
bar_skill_dir.joinpath('__init__.py').touch()
def _mock_log(self):
log_patch = patch('msm.mycroft_skills_manager.LOG')
self.addCleanup(log_patch.stop)
self.log_mock = log_patch.start()
def _mock_skills_json_path(self):
savedatapath_patch = patch('msm.skill_state.get_state_path')
self.skills_json_path_mock = savedatapath_patch.start()
self.skills_json_path_mock.return_value = str(
self.temp_dir.joinpath('skills.json')
)
self.addCleanup(savedatapath_patch.stop)
def _mock_skill_entry(self):
skill_entry_patch = patch(
'msm.mycroft_skills_manager.SkillEntry.install',
spec=True
)
self.addCleanup(skill_entry_patch.stop)
self.skill_entry_mock = skill_entry_patch.start()
def _mock_skill_repo(self):
skill_repo_patch = patch(
'msm.mycroft_skills_manager.SkillRepo',
spec=True
)
self.addCleanup(skill_repo_patch.stop)
self.skill_repo_mock = skill_repo_patch.start()
self.skill_repo_mock.skills_meta_info = {
'https://skill_foo_url': None
}
def teardown(self):
rmtree(str(self.temp_dir))
def test_device_skill_state(self):
"""Contents of skills.json are loaded into memory"""
state = self.msm.device_skill_state
initial_state = [
dict(
name='skill-foo',
origin='default',
beta=False,
status='active',
installed=12345,
updated=0,
installation='installed',
skill_gid='@|skill-foo'
),
dict(
name='skill-bar',
origin='default',
beta=False,
status='active',
installed=23456,
updated=0,
installation='installed',
skill_gid='@|skill-bar'
)
]
self.assertListEqual(initial_state, state['skills'])
self.assertListEqual([], state['blacklist'])
self.assertEqual(2, state['version'])
new_hash = device_skill_state_hash(self.msm.device_skill_state)
self.assertEqual(new_hash, self.msm.device_skill_state_hash)
def test_build_device_skill_state(self):
"""No skill.json file so build one."""
os.remove(str(self.skills_json_path))
self.msm._device_skill_state = None
self.msm._init_skills_data()
state = self.msm.device_skill_state
initial_state = [
dict(
name='skill-bar',
origin='non-msm',
beta=False,
status='active',
installed=0,
updated=0,
installation='installed',
skill_gid='@|skill-bar'
),
dict(
name='skill-foo',
origin='non-msm',
beta=False,
status='active',
installed=0,
updated=0,
installation='installed',
skill_gid='@|skill-foo'
)
]
self.assertTrue(self.skills_json_path.exists())
with open(str(self.skills_json_path)) as skills_json:
device_skill_state = json.load(skills_json)
self.assertListEqual(sorted(initial_state, key=lambda x: x['name']),
sorted(device_skill_state['skills'], key=lambda x:x['name']))
self.assertListEqual(sorted(initial_state, key=lambda x: x['name']),
sorted(device_skill_state['skills'], key=lambda x: x['name']))
self.assertListEqual([], state['blacklist'])
self.assertListEqual([], device_skill_state['blacklist'])
self.assertEqual(2, state['version'])
self.assertEqual(2, device_skill_state['version'])
new_hash = device_skill_state_hash(self.msm.device_skill_state)
self.assertEqual(new_hash, self.msm.device_skill_state_hash)
def test_remove_from_device_skill_state(self):
"""Remove a file no longer installed from the device's skill state.
Delete skill-bar from the local skills. This should trigger it being
removed from the device skill state.
"""
del(self.msm.local_skills['skill-bar'])
self.msm._device_skill_state = None
state = self.msm.device_skill_state
initial_state = [
dict(
name='skill-foo',
origin='default',
beta=False,
status='active',
installed=12345,
updated=0,
installation='installed',
skill_gid='@|skill-foo'
)
]
self.assertListEqual(initial_state, state['skills'])
self.assertListEqual([], state['blacklist'])
self.assertEqual(2, state['version'])
def test_skill_list(self):
"""The skill.list() method is called."""
all_skills = self.msm.list()
skill_names = [skill.name for skill in all_skills]
self.assertIn('skill-foo', skill_names)
self.assertIn('skill-bar', skill_names)
self.assertEqual(2, len(all_skills))
self.assertIsNone(self.msm._local_skills)
self.assertIsNone(self.msm._default_skills)
self.assertEqual(all_skills, self.msm._all_skills)
def test_install(self):
"""Install a skill
Test that the install method was called on the skill being installed
and that the new skill was added to the device's skill state.
"""
skill_to_install = self.skill_entry_mock()
skill_to_install.name = 'skill-test'
skill_to_install.skill_gid = 'test-skill|99.99'
skill_to_install.is_beta = False
with patch('msm.mycroft_skills_manager.isinstance') as isinstance_mock:
isinstance_mock.return_value = True
with patch('msm.mycroft_skills_manager.time') as time_mock:
time_mock.time.return_value = 100
self.msm.install(skill_to_install, origin='voice')
with open(str(self.skills_json_path)) as skills_json:
device_skill_state = json.load(skills_json)
skill_test_state = dict(
name='skill-test',
origin='voice',
beta=False,
status='active',
installed=100,
updated=0,
installation='installed',
skill_gid='test-skill|99.99'
)
self.assertIn(skill_test_state, device_skill_state['skills'])
self.assertListEqual(
[call.install(None)],
skill_to_install.method_calls
)
def test_already_installed(self):
"""Attempt install of skill already on the device.
When this happens, an AlreadyInstalled exception is raised and the
device skill state is not modified.
"""
skill_to_install = self.skill_entry_mock()
skill_to_install.name = 'skill-foo'
skill_to_install.skill_gid = 'skill-foo|99.99'
skill_to_install.is_beta = False
skill_to_install.install = Mock(side_effect=AlreadyInstalled())
pre_install_hash = device_skill_state_hash(
self.msm.device_skill_state
)
with patch('msm.mycroft_skills_manager.isinstance') as isinstance_mock:
isinstance_mock.return_value = True
with self.assertRaises(AlreadyInstalled):
self.msm.install(skill_to_install)
self.assertIsNotNone(self.msm._local_skills)
self.assertIn('all_skills', self.msm._cache)
post_install_hash = device_skill_state_hash(
self.msm.device_skill_state
)
self.assertEqual(pre_install_hash, post_install_hash)
def test_install_failure(self):
"""Install attempt fails for whatever reason
When an install fails, the installation will raise a MsmException. The
skill install will be saved to the device skill state as failed and
the error that caused the exception will be included in the state.
"""
skill_to_install = self.skill_entry_mock()
skill_to_install.name = 'skill-test'
skill_to_install.skill_gid = 'skill-test|99.99'
skill_to_install.is_beta = False
skill_to_install.install = Mock(side_effect=MsmException('RED ALERT!'))
with patch('msm.mycroft_skills_manager.isinstance') as isinstance_mock:
with self.assertRaises(MsmException):
isinstance_mock.return_value = True
self.msm.install(skill_to_install, origin='cli')
with open(str(self.skills_json_path)) as skills_json:
device_skill_state = json.load(skills_json)
skill_test_state = dict(
name='skill-test',
origin='cli',
beta=False,
status='error',
installed=0,
updated=0,
installation='failed',
skill_gid='skill-test|99.99',
failure_message='RED ALERT!'
)
self.assertIn(skill_test_state, self.msm.device_skill_state['skills'])
self.assertIn(skill_test_state, device_skill_state['skills'])
self.assertListEqual(
[call.install(None)],
skill_to_install.method_calls
)
def test_remove(self):
"""Remove a skill
Test that the remove method was called on the skill being installed
and that the new skill was removed from the device's skill state.
"""
skill_to_remove = self.skill_entry_mock()
skill_to_remove.name = 'skill-foo'
pre_install_hash = device_skill_state_hash(
self.msm.device_skill_state
)
with patch('msm.mycroft_skills_manager.isinstance') as isinstance_mock:
isinstance_mock.return_value = True
self.msm.remove(skill_to_remove)
with open(str(self.skills_json_path)) as skills_json:
device_skill_state = json.load(skills_json)
skill_names = [skill['name'] for skill in device_skill_state['skills']]
self.assertNotIn('skill_foo', skill_names)
skill_names = [
skill['name'] for skill in self.msm.device_skill_state['skills']
]
self.assertNotIn('skill_foo', skill_names)
self.assertListEqual([call.remove()], skill_to_remove.method_calls)
self.assertNotIn('all_skills', self.msm._cache)
self.assertIsNone(self.msm._local_skills)
post_install_hash = device_skill_state_hash(
self.msm.device_skill_state
)
self.assertNotEqual(pre_install_hash, post_install_hash)
def test_already_removed(self):
"""Attempt removal of skill already removed from the device.
When this happens, an AlreadyRemoved exception is raised and the
device skill state is not modified.
"""
skill_to_remove = self.skill_entry_mock()
skill_to_remove.name = 'skill-foo'
skill_to_remove.remove = Mock(side_effect=AlreadyRemoved())
pre_install_hash = device_skill_state_hash(
self.msm.device_skill_state
)
with patch('msm.mycroft_skills_manager.isinstance') as isinstance_mock:
isinstance_mock.return_value = True
with self.assertRaises(AlreadyRemoved):
self.msm.remove(skill_to_remove)
self.assertListEqual([call.remove()], skill_to_remove.method_calls)
self.assertIsNotNone(self.msm._local_skills)
self.assertIn('all_skills', self.msm._cache)
post_install_hash = device_skill_state_hash(
self.msm.device_skill_state
)
self.assertEqual(pre_install_hash, post_install_hash)
def test_remove_failure(self):
"""Skill removal attempt fails for whatever reason
When n removal fails, a MsmException is raised. The removal will not
be saved to the device skill state.
"""
skill_to_remove = self.skill_entry_mock()
skill_to_remove.name = 'skill-test'
skill_to_remove.remove = Mock(side_effect=MsmException('RED ALERT!'))
pre_install_hash = device_skill_state_hash(
self.msm.device_skill_state
)
with patch('msm.mycroft_skills_manager.isinstance') as isinstance_mock:
isinstance_mock.return_value = True
with self.assertRaises(MsmException):
self.msm.remove(skill_to_remove)
self.assertListEqual(
[call.remove()],
skill_to_remove.method_calls
)
self.assertIsNotNone(self.msm._local_skills)
self.assertIn('all_skills', self.msm._cache)
post_install_hash = device_skill_state_hash(
self.msm.device_skill_state
)
self.assertEqual(pre_install_hash, post_install_hash)
def test_update(self):
"""Remove a skill
Test that the remove method was called on the skill being installed
and that the new skill was removed from the device's skill state.
"""
skill_to_update = self.skill_entry_mock()
skill_to_update.name = 'skill-foo'
skill_to_update.is_beta = False
pre_install_hash = device_skill_state_hash(
self.msm.device_skill_state
)
with patch('msm.mycroft_skills_manager.time') as time_mock:
time_mock.time.return_value = 100
self.msm.update(skill_to_update)
with open(str(self.skills_json_path)) as skills_json:
device_skill_state = json.load(skills_json)
skill_names = [skill['name'] for skill in device_skill_state['skills']]
self.assertIn('skill-foo', skill_names)
for skill in self.msm.device_skill_state['skills']:
if skill['name'] == 'skill-foo':
self.assertEqual(100, skill['updated'])
self.assertListEqual([call.update()], skill_to_update.method_calls)
self.assertNotIn('all_skills', self.msm._cache)
self.assertIsNone(self.msm._local_skills)
post_install_hash = device_skill_state_hash(
self.msm.device_skill_state
)
self.assertNotEqual(pre_install_hash, post_install_hash)
| 38.410138 | 79 | 0.636533 |
bfb54aa15c7cd0c22efab84ae1925eb8fe1694ae | 3,041 | py | Python | src/tankoh2/contourcreator.py | sfreund-DLR/tankoh2 | 92ff080f7034a7eb1cdabed5089c79fd01af4d11 | [
"MIT",
"BSD-3-Clause"
] | null | null | null | src/tankoh2/contourcreator.py | sfreund-DLR/tankoh2 | 92ff080f7034a7eb1cdabed5089c79fd01af4d11 | [
"MIT",
"BSD-3-Clause"
] | 27 | 2021-11-03T19:53:00.000Z | 2022-03-28T12:43:30.000Z | src/tankoh2/contourcreator.py | sfreund-DLR/tankoh2 | 92ff080f7034a7eb1cdabed5089c79fd01af4d11 | [
"MIT",
"BSD-3-Clause"
] | null | null | null | """This module creates dome contours"""
import numpy as np
from matplotlib import pyplot as plt
from tankoh2.exception import Tankoh2Error
from tankoh2.service import indent
from tankoh2.service import log
def getCountourConical(rPolarOpening, rSmall, rLarge, lConical, domeType ='circular'):
"""Calculates the countour of a dome and a attached conical structure
ATTENTION:
- This method is not yet finished!
- It continas some hardcoded values like xOffset, rOffset
- dydxConical must be iteratively identified which changes xOffset, rOffset.
Or a fully analytical solution must be found
- Only tested for dydxConical=1
- extend for other dome types
rPolarOpening
←-→
..-- --..
circle 1 .-~ ~-. rSmall
/ \ ↑
/ \ | lConical
/ \ ↓
circle 2 | | rLarge
| |
| |
:return: vectors x,r: r starts at zylinder radius decreasing, x is increasing
"""
allowedDomeTypes = ['circular']
if domeType not in allowedDomeTypes:
raise Tankoh2Error(f'Wrong input for domeType "{domeType}". Valid types: {allowedDomeTypes}')
if not all([val >0 for val in [rSmall, rLarge, lConical]]):
raise Tankoh2Error('All input values must be larger than zero')
if rSmall >= rLarge:
raise Tankoh2Error('rSmall >= rLarge')
numPoints = 40
# 1: circle at polar opening
# find location where dr/dx of circular section is same dr/dx of conical section
dydxConical = (rLarge - rSmall) / lConical
# drdx=-x/np.sqrt(rSmall**2-x**2)
x1SameDydx = dydxConical * rSmall
rCirc1 = np.sqrt(x1SameDydx**2 + rSmall**2)
alphaSmallR = np.arccos(x1SameDydx /rCirc1)
alphaPolarOpening = np.arcsin(rPolarOpening /rCirc1)
angles = np.linspace(alphaPolarOpening, alphaSmallR, numPoints)
x1 = np.cos(angles) * rCirc1
x1 = 2 * x1[-1] - x1 # x must be increasing
r1 = np.sin(angles) * rCirc1
# 2: conical section
xOffset, rOffset = 100,100
x2, r2 = np.linspace([x1[-1], r1[-1]], [x1[-1]+xOffset, r1[-1]+rOffset], numPoints, False).T[:,1:]
# 3: circle at zylinder
angles = np.linspace(alphaSmallR, np.pi/2, numPoints)
x3 = np.cos(angles) * rCirc1
x3 = 2 * x3[0] - x3
r3 = np.sin(angles) * rCirc1
# put it together
x = np.concatenate([x1, x2, x3])
r = np.concatenate([r1, r2, r3])
print(indent(np.array([x,r]).T, delim=' '))
r = r[::-1]
x = x[::-1]
x = x[0] - x
if 0:
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
ax.plot(x, r)
plt.show()
log.error('This method is not fully implemented and uses hardcoded values')
return x, r
if __name__ == '__main__':
getCountourConical(20 ,60 ,100 ,40)
| 30.108911 | 102 | 0.573167 |
ddf0490c2e4a1f50d15877c002e1ed1c70730961 | 1,956 | py | Python | setup.py | MeiNogizaka/netmiko | 43859b8ca0689d053966b4def4f56f02dc00c844 | [
"MIT"
] | 1 | 2019-10-16T19:02:30.000Z | 2019-10-16T19:02:30.000Z | setup.py | MeiNogizaka/netmiko | 43859b8ca0689d053966b4def4f56f02dc00c844 | [
"MIT"
] | null | null | null | setup.py | MeiNogizaka/netmiko | 43859b8ca0689d053966b4def4f56f02dc00c844 | [
"MIT"
] | 1 | 2019-10-16T19:02:32.000Z | 2019-10-16T19:02:32.000Z | from setuptools import setup
from setuptools import find_packages
import os
import re
with open("README.md", "r") as fs:
long_description = fs.read()
def find_version(*file_paths):
"""
This pattern was modeled on a method from the Python Packaging User Guide:
https://packaging.python.org/en/latest/single_source_version.html
We read instead of importing so we don't get import errors if our code
imports from dependencies listed in install_requires.
"""
base_module_file = os.path.join(*file_paths)
with open(base_module_file) as f:
base_module_data = f.read()
version_match = re.search(
r"^__version__ = ['\"]([^'\"]*)['\"]", base_module_data, re.M
)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
setup(
name="netmiko",
version=find_version("netmiko", "__init__.py"),
description="Multi-vendor library to simplify Paramiko SSH connections to network devices",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/ktbyers/netmiko",
author="Kirk Byers",
author_email="ktbyers@twb-tech.com",
license="MIT",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
packages=find_packages(exclude=("test*",)),
install_requires=[
"setuptools>=38.4.0",
"paramiko>=2.4.2",
"scp>=0.10.0",
"pyyaml",
"pyserial",
"textfsm",
'enum34; python_version == "2.7"',
'ipaddress; python_version == "2.7"',
],
extras_require={"test": ["pytest>=3.2.5"]},
)
| 31.548387 | 95 | 0.634458 |
a0a13fbde39f30e42b1d6843afb68dda89198383 | 27,430 | py | Python | tensorflow/python/ops/cond_v2.py | rickyzhang82/tensorflow | 397fdb37bc923f1f56d0224617cfd1b1dfd76d99 | [
"Apache-2.0"
] | 3 | 2021-01-19T20:24:09.000Z | 2021-01-19T21:40:05.000Z | tensorflow/python/ops/cond_v2.py | rickyzhang82/tensorflow | 397fdb37bc923f1f56d0224617cfd1b1dfd76d99 | [
"Apache-2.0"
] | null | null | null | tensorflow/python/ops/cond_v2.py | rickyzhang82/tensorflow | 397fdb37bc923f1f56d0224617cfd1b1dfd76d99 | [
"Apache-2.0"
] | 1 | 2019-01-27T09:44:57.000Z | 2019-01-27T09:44:57.000Z | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""cond_v2 and gradient.
This is a version of cond that emits a single If op, as well as the gradient
function for If ops produced by cond_v2. This will eventually replace the
current tf.cond implementation once it reaches feature and performance parity.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import func_graph as func_graph_module
from tensorflow.python.framework import function_def_to_graph
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_util
from tensorflow.python.ops import control_flow_util_v2 as util
from tensorflow.python.ops import gen_dataset_ops
from tensorflow.python.ops import gen_functional_ops
from tensorflow.python.ops import gen_resource_variable_ops
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import math_ops
from tensorflow.python.util import nest
# NOTE(skyewm): TensorFlow uses protected class methods and fields to signify
# that they aren't part of the official public API. These protected members
# often need to be used by implementation code however. Rather than litter the
# code with pylint comments, we ignore protected access violations for
# readability.
# pylint: disable=protected-access
def cond_v2(pred, true_fn, false_fn, name="cond"):
"""Like tf.cond, except emits a single If op."""
if isinstance(pred, bool):
raise TypeError("pred must not be a Python bool", pred)
if not name:
name = "cond"
with ops.name_scope(name) as scope:
true_name = util.unique_fn_name(scope, "true")
false_name = util.unique_fn_name(scope, "false")
# Automatic control dependencies are added in defuns, but not in v1
# graphs. Propagate that behavior here.
add_control_dependencies = ops.get_default_graph()._add_control_dependencies
pred = ops.convert_to_tensor(pred)
true_graph = func_graph_module.func_graph_from_py_func(
true_name,
true_fn, [], {},
func_graph=util.CondBranchFuncGraph(
true_name, collections=ops.get_default_graph()._collections), # pylint: disable=protected-access
add_control_dependencies=add_control_dependencies,
op_return_value=pred)
false_graph = func_graph_module.func_graph_from_py_func(
false_name,
false_fn, [], {},
func_graph=util.CondBranchFuncGraph(
false_name, collections=ops.get_default_graph()._collections), # pylint: disable=protected-access
add_control_dependencies=add_control_dependencies,
op_return_value=pred)
return _build_cond(pred, true_graph, false_graph,
true_graph.external_captures,
false_graph.external_captures,
name=scope)
@ops.RegisterGradient("If")
def _IfGrad(op, *grads): # pylint: disable=invalid-name
"""The gradient of an If op produced by cond_v2."""
# Get the if operator (this logic handles the case where op is a MockOp)
if_op = op.outputs[0].op
true_graph, false_graph = _get_func_graphs(if_op)
# Note: op.graph != ops.get_default_graph() when we are computing the gradient
# of a nested cond.
assert true_graph.outer_graph == if_op.graph
assert false_graph.outer_graph == if_op.graph
# Create grad functions that compute the gradient of the true/false forward
# graphs. These functions will capture tensors from the forward pass
# functions.
true_grad_graph = _create_grad_func(
true_graph, grads, util.unique_grad_fn_name(true_graph.name))
false_grad_graph = _create_grad_func(
false_graph, grads, util.unique_grad_fn_name(false_graph.name))
if (true_grad_graph.if_op_needs_rewrite or
false_grad_graph.if_op_needs_rewrite):
# Modify 'op' to output the intermediates needed by the grad functions. Note
# that all needed intermediates are wrapped in optionals. Each optional
# intermediate output will have a value iff its corresponding branch is
# taken.
# NOTE(skyewm): if there are any active sessions, this modification to `op`
# may make them unrunnable!
if control_flow_util.InXlaContext(ops.get_default_graph()):
# XLA does not yet support optionals, so output intermediates directly and
# make them match via FakeParams, which can be converted to zeros in XLA.
# TODO(skyewm,jpienaar): can XLA support optionals?
true_intermediates = true_grad_graph.xla_intermediates
false_intermediates = false_grad_graph.xla_intermediates
extra_true_outputs, extra_false_outputs = _make_intermediates_match_xla(
true_graph, false_graph, true_intermediates, false_intermediates)
else:
true_intermediates = true_grad_graph.wrapped_intermediates
false_intermediates = false_grad_graph.wrapped_intermediates
# Make outputs match by adding none optionals.
extra_true_outputs, extra_false_outputs = _make_intermediates_match(
true_graph, false_graph, true_intermediates, false_intermediates)
true_graph.outputs.extend(extra_true_outputs)
false_graph.outputs.extend(extra_false_outputs)
# TODO(skyewm): indicate it's an internal bug if this fails.
_check_same_outputs(true_graph, false_graph)
true_graph.name += "_rewritten"
false_graph.name += "_rewritten"
if_op._set_func_attr("then_branch", util.create_new_tf_function(true_graph))
if_op._set_func_attr("else_branch",
util.create_new_tf_function(false_graph))
if_op._set_type_list_attr("Tout", true_graph.output_types)
if_op._set_shape_list_attr("output_shapes", true_graph.output_shapes)
if_op._add_outputs(
[t.dtype for t in extra_true_outputs],
[t.shape for t in extra_true_outputs])
# Resolve references to forward graph tensors in grad graphs and ensure
# they are in-scope, i.e., belong to one of outer graphs of the grad graph.
true_grad_inputs = _resolve_grad_inputs(true_graph, true_grad_graph)
false_grad_inputs = _resolve_grad_inputs(false_graph, false_grad_graph)
# This modifies true_grad_graph and false_grad_graph.
_make_output_composite_tensors_match(true_grad_graph, false_grad_graph)
outputs = _build_cond(if_op.inputs[0], true_grad_graph, false_grad_graph,
true_grad_inputs, false_grad_inputs)
# The predicate has no gradient.
return [None] + outputs
def _build_cond(pred, true_graph, false_graph, true_inputs, false_inputs,
name=None):
"""Creates an If op from the specified predicate, branch functions and inputs.
Note that this modifies true_graph and false_graph to make the inputs match,
and to output all intermediates values so they're available for the gradient
computation.
true_graph and false_graph need not have the same input types, but they must
have the same outpute types.
Args:
pred: boolean Tensor
true_graph: FuncGraph
false_graph: FuncGraph
true_inputs: a list of Tensors to be passed to true_graph as input.
false_inputs: a list of Tensors to be passed to false_graph as input.
name: the name for the If op.
Returns:
A list of Tensors which are the outputs of the If op. Does not include added
intermediate outputs.
"""
_check_same_outputs(true_graph, false_graph)
# Add inputs to true_graph and false_graph to make them match. Note that
# this modifies true_graph and false_graph.
cond_inputs = _make_inputs_match(true_graph, false_graph,
true_inputs, false_inputs)
# Create the If op.
tensors = gen_functional_ops._if( # pylint: disable=protected-access
pred,
cond_inputs, [t.dtype for t in true_graph.outputs],
util.create_new_tf_function(true_graph),
util.create_new_tf_function(false_graph),
output_shapes=_get_output_shapes(true_graph.outputs,
false_graph.outputs),
name=name)
# TODO(b/110167197) this approach requires cond_v2 to have at least 1 output
if_op = tensors[0].op
util.maybe_set_lowering_attr(if_op)
# Return identities for each output of the If op, rather than the output of
# the If op directly. This makes pruning work if the output of cond() is
# fetched: the lowering pass converts the If outputs into IdentityN outputs,
# which if fetched will cause all ops in the taken branch to be run (since
# it takes all merge ops as input). After lowering, each output identity op
# will end up with only the appropriate merge op as input.
# TODO(b/79984175): this doesn't have to be a tuple once we covert to the
# correct output structure
tensors = [array_ops.identity(t) for t in tensors]
# Prevent fetching since the variant outputs can't be fetched directly.
if_op.graph.prevent_fetching(if_op)
return func_graph_module.pack_sequence_as(true_graph.structured_outputs,
tensors)
def _get_func_graphs(if_op):
"""Returns `FuncGraph`s for the input op branches.
Args:
if_op: The _If Operation.
Returns:
A 2-tuple of the `FuncGraph`s of the then_branch and else_branch.
"""
def _get_func_graph_for_branch(branch_name):
"""Generates and returns a FuncGraph for the given branch."""
inputs = if_op.inputs[1:] # First input is pred.
input_shapes = [t.shape for t in inputs]
func_name = if_op.get_attr(branch_name).name
fdef = if_op.graph._get_function(func_name).definition
# `if_op.graph` may not be the same as `ops.get_default_graph()` e.g.
# in the case of nested if ops or when the gradient is being computed
# from inside a Defun. We build the `func_graph` with `if_op.graph` as its
# `outer_graph`. This resembles how the `FuncGraph` was built in the
# forward pass. We need this so that we can resolve references to tensors
# in `func_graph` from its gradient graph in `_resolve_grad_inputs`.
with if_op.graph.as_default():
func_graph = function_def_to_graph.function_def_to_graph(
fdef, input_shapes)
func_graph.captures = collections.OrderedDict(zip(inputs,
func_graph.inputs))
# Set the if op so that the gradient code can use it.
func_graph._if = if_op
return func_graph
return (_get_func_graph_for_branch("then_branch"),
_get_func_graph_for_branch("else_branch"))
def _grad_fn(func_graph, grads):
"""The gradient function for each conditional branch.
This function builds the gradient graph of the corresponding forward-pass
conditional branch in `func_graph`. This is done by differentiating
func_graph's outputs w.r.t. its inputs.
Args:
func_graph: FuncGraph. The corresponding forward-pass function.
grads: The list of input gradient Tensors.
Returns:
The output gradient Tensors.
"""
# Filter out untrainable function outputs.
# NOTE(skyewm): If we don't do this, the untrainable tensors can sometimes
# cause _GradientsHelper to raise an exception (e.g. the implementation
# doesn't expect 'ys' to contain boolean tensors).
assert len(func_graph.outputs) == len(grads)
ys = []
grad_ys = []
for y, grad_y in zip(func_graph.outputs, grads):
if not gradients_impl.IsTrainable(y):
continue
ys.append(y)
grad_ys.append(grad_y)
# Build the gradient graph. Note that this builds the gradient computation of
# func_graph in the current graph, which requires capturing tensors from
# func_graph. The captured func_graph tensors are resolved to external tensors
# in _resolve_grad_inputs.
result = gradients_impl._GradientsHelper(
ys, func_graph.inputs, grad_ys=grad_ys,
src_graph=func_graph)
# Functions can't return None; replace Nones with zero tensors.
# TODO(b/80444525): don't return anything here and make _IfGrad return None if
# both branches have zero gradient.
for i in range(len(result)):
if result[i] is None:
if func_graph.inputs[i].dtype == dtypes.resource:
result[i] = array_ops.zeros(
gen_resource_variable_ops.variable_shape(func_graph.inputs[i]))
else:
result[i] = array_ops.zeros_like(func_graph.inputs[i])
return result
def _create_grad_func(func_graph, grads, name):
"""Returns the FuncGraph representation of _grad_fn."""
return func_graph_module.func_graph_from_py_func(
name,
lambda: _grad_fn(func_graph, grads), [], {},
func_graph=_CondGradFuncGraph(name, func_graph))
def _resolve_grad_inputs(cond_graph, grad_graph):
"""Returns the tensors to pass as inputs to `grad_graph`.
The `grad_graph` may have external references to
1. Its outer graph containing the input gradients. These references are kept
as is.
2. Tensors in the forward pass graph. These tensors may not be "live"
when the gradient is being computed. We replace such references by their
corresponding tensor in `cond_graph.outer_graph`. In the case of nested
control flow or functions, the gradient logic handling
`grad_graph.outer_graph` will make sure the tensor from
`cond_graph.outer_graph` is also correctly captured.
Args:
cond_graph: FuncGraph. The forward-pass function.
grad_graph: FuncGraph. The gradients function.
Returns:
A list of inputs tensors to be passed to grad_graph.
"""
new_inputs = []
for t in grad_graph.external_captures:
# `t` must either be in `grad_graph.outer_graph` or in the forward
# `cond_graph`.
if t.graph != grad_graph.outer_graph:
assert t.graph == cond_graph
# `internal_captures` are not treated as intermediates and hence not added
# to If op outputs. So we get the outer tensor corresponding to those
# from the list of `external_captures`.
try:
t = t.graph._if.outputs[t.graph.outputs.index(t)]
except ValueError:
index = t.graph.internal_captures.index(t)
t = t.graph.external_captures[index]
# Note: We rely on the capturing logic of the gradient If op graph to
# correctly capture the tensors in `cond_graph.outer_graph`. Both cond_v2
# and while_v2 handle this while building their gradient functions.
assert t.graph == cond_graph.outer_graph
new_inputs.append(t)
return new_inputs
def _get_intermediates(func_graph):
"""Returns all tensors in `func_graph` that aren't inputs or outputs."""
intermediates = []
for op in func_graph.get_operations():
for t in op.outputs:
if t in func_graph.inputs: continue
if t in func_graph.outputs: continue
intermediates.append(t)
return intermediates
def _separate_unique_inputs(true_inputs, false_inputs):
"""Separates tensors appearing only in true_inputs or false_inputs, or both.
Args:
true_inputs: list of Tensors
false_inputs: list of Tensors
Returns:
Three lists of Tensors:
1. The tensors that appear in both true_inputs and false_inputs
2. The tensors that only appear in true_inputs
3. The tensors that only appear in false_inputs
"""
true_inputs = set(true_inputs)
false_inputs = set(false_inputs)
shared_inputs = true_inputs.intersection(false_inputs)
true_only_inputs = true_inputs - false_inputs
false_only_inputs = false_inputs - true_inputs
return list(shared_inputs), list(true_only_inputs), list(false_only_inputs)
def _make_intermediates_match(true_graph, false_graph,
true_optionals, false_optionals):
"""Returns new optionals lists that have matching signatures.
This is done by mirroring each list in the other using none optionals.
There is no merging of like optionals.
Args:
true_graph: FuncGraph
false_graph: FuncGraph
true_optionals: a list of optional Tensors from true_graph
false_optionals: a list of optional Tensors from false_graph
Returns:
A new list of Tensors in true_graph and a new list of Tensors in
false_graph. The two lists have the same number of Tensors, all of which
will be optionals of the same shape/type.
"""
new_true_optionals = (true_optionals +
_create_none_optionals(true_graph, false_optionals))
new_false_optionals = (_create_none_optionals(false_graph, true_optionals)
+ false_optionals)
return new_true_optionals, new_false_optionals
def _make_intermediates_match_xla(true_graph, false_graph, true_intermediates,
false_intermediates):
"""Like _make_intermediates_match but for the XLA case."""
new_true_intermediates = (true_intermediates +
_create_fakeparams(true_graph, false_intermediates))
new_false_intermediates = (_create_fakeparams(false_graph, true_intermediates)
+ false_intermediates)
return new_true_intermediates, new_false_intermediates
def _make_inputs_match(true_graph, false_graph, true_inputs, false_inputs):
"""Modifies true_graph and false_graph so they have the same input signature.
This method reorders and/or adds parameters to true_graph and false_graph so
they have the same input signature, and updates the 'inputs' and 'captured'
fields of both graphs accordingly. It uses the input tensors from the outer
graph to avoid duplicating shared arguments.
Args:
true_graph: FuncGraph
false_graph: FuncGraph
true_inputs: a list of Tensors in the outer graph. The inputs for
true_graph.
false_inputs: a list of Tensors in the outer graph. The inputs for
false_graph.
Returns:
A new list of Tensors from the outer graph that are the new inputs for both
true_graph and false_graph. This is a deduped version of true_inputs +
false_inputs.
"""
shared_inputs, true_only_inputs, false_only_inputs = _separate_unique_inputs(
true_inputs, false_inputs)
new_inputs = shared_inputs + true_only_inputs + false_only_inputs
true_input_to_param = dict(zip(true_inputs, true_graph.inputs))
false_input_to_param = dict(zip(false_inputs, false_graph.inputs))
true_graph.inputs = (
[true_input_to_param[t] for t in shared_inputs] +
[true_input_to_param[t] for t in true_only_inputs] +
_create_dummy_inputs(true_graph, false_only_inputs))
false_graph.inputs = (
[false_input_to_param[t] for t in shared_inputs] +
_create_dummy_inputs(false_graph, true_only_inputs) +
[false_input_to_param[t] for t in false_only_inputs])
# Rewrite the FuncGraphs' state to reflect the new inputs.
true_graph.captures = collections.OrderedDict(zip(new_inputs,
true_graph.inputs))
false_graph.captures = collections.OrderedDict(zip(new_inputs,
false_graph.inputs))
return new_inputs
def _make_output_composite_tensors_match(true_graph, false_graph):
"""Rewrites {true,false}_graph's outputs to use the same _TensorLike classes.
Currently the only transformation implemented is turning a Tensor into an
equivalent IndexedSlices if the other branch returns an IndexedSlices.
Updates {true,false}_graph.{outputs,structured_outputs}.
Args:
true_graph: FuncGraph
false_graph: FuncGraph
Raises:
TypeError: if a pair of outputs cannot be rewritten.
"""
# Note: since this is only used for gradient graphs, we do not expect the
# outputs to be structured (e.g. nested lists), and thus do not need to use
# nest.flatten, etc.
true_outputs = list(true_graph.structured_outputs)
false_outputs = list(false_graph.structured_outputs)
assert len(true_outputs) == len(false_outputs)
for idx, (true_out, false_out) in enumerate(zip(true_outputs, false_outputs)):
if type(true_out) == type(false_out): # pylint: disable=unidiomatic-typecheck
continue
if (isinstance(true_out, ops.IndexedSlices) and
isinstance(false_out, ops.Tensor)):
with false_graph.as_default():
false_outputs[idx] = math_ops._as_indexed_slices(false_out)
elif (isinstance(true_out, ops.Tensor) and
isinstance(false_out, ops.IndexedSlices)):
with true_graph.as_default():
true_outputs[idx] = math_ops._as_indexed_slices(true_out)
else:
raise TypeError(
"Cannot reconcile tf.cond %i-th outputs:\n"
" true_fn returned: %s\n"
" false_fn returned: %s" % (idx, true_out, false_out))
true_graph.structured_outputs = true_outputs
true_graph.outputs = func_graph_module.flatten(true_outputs)
false_graph.structured_outputs = false_outputs
false_graph.outputs = func_graph_module.flatten(false_outputs)
def _wrap_intermediates(func_graph, intermediates):
with func_graph.as_default():
return [gen_dataset_ops.optional_from_value([t]) for t in intermediates]
def _create_dummy_inputs(func_graph, template_tensors):
"""Creates tensors in func_graph to represent template_tensors.
Args:
func_graph: FuncGraph.
template_tensors: a list of tensors in the outer graph.
Returns:
A list of tensors in func_graph.
"""
with func_graph.as_default():
return [array_ops.placeholder(t.dtype, shape=t.shape)
for t in template_tensors]
def _create_none_optionals(func_graph, template_tensors):
"""Creates none optionals in func_graph to represent template_tensors.
Args:
func_graph: FuncGraph.
template_tensors: a list of tensors in func_graph.
Returns:
A list of tensors in func_graph.
"""
with func_graph.as_default():
return [gen_dataset_ops.optional_none() for _ in template_tensors]
def _create_fakeparams(func_graph, template_tensors):
"""Create FakeParams for the XLA case."""
with func_graph.as_default():
return [gen_functional_ops.fake_param(dtype=t.dtype, shape=t.shape)
for t in template_tensors]
def _check_same_outputs(true_graph, false_graph):
"""Raises an error if true_graph and false_graph have different outputs."""
def error(error_detail):
raise TypeError(
"true_fn and false_fn arguments to tf.cond must have the same number, "
"type, and overall structure of return values.\n"
"\n"
"true_fn output: %s\n"
"false_fn output: %s\n"
"\n"
"Error details:\n"
"%s" % (true_graph.structured_outputs, false_graph.structured_outputs,
error_detail))
try:
nest.assert_same_structure(true_graph.structured_outputs,
false_graph.structured_outputs,
expand_composites=True)
except (ValueError, TypeError) as e:
error(str(e))
assert len(true_graph.outputs) == len(false_graph.outputs)
for true_out, false_out in zip(true_graph.outputs, false_graph.outputs):
if true_out.dtype != false_out.dtype:
error("%s and %s have different types" % (true_out, false_out))
def _get_output_shapes(true_graph_outputs, false_graph_outputs):
output_shapes = [
t_out.shape.most_specific_compatible_shape(f_out.shape)
for t_out, f_out in zip(true_graph_outputs, false_graph_outputs)
]
return output_shapes
class _CondGradFuncGraph(util.CondBranchFuncGraph):
"""FuncGraph for the gradient function of the branch of an If op.
Handles wrapping and unwrapping intermediate values that are captured by the
gradient computation in optionals.
Attributes:
if_op_needs_rewrite: True if any intermediates were captured, meaning the
forward If op needs to be written to output the wrapped intermediates.
"""
def __init__(self, name, forward_graph):
super(_CondGradFuncGraph, self).__init__(
name, collections=ops.get_default_graph()._collections) # pylint: disable=protected-access
self.if_op_needs_rewrite = False
self._forward_graph = forward_graph
# Maps from forward intermediate tensor -> the unwrapped captured
# intermediate.
self._indirect_captures = {}
# Maps unwrapped intermediate -> optional-wrapped intermediate in the
# forward graph.
self._wrapped_intermediates = collections.OrderedDict()
# Raw intermediates captured from the forward graph. Populated iff we're in
# an XLA context.
self._xla_intermediates = []
@property
def wrapped_intermediates(self):
"""The optional-wrapped intermediates captured from the forward graph."""
return list(self._wrapped_intermediates.values())
@property
def xla_intermediates(self):
"""Raw intermediates captured from the forward graph if XLA is enabled."""
return self._xla_intermediates
def _capture_helper(self, tensor, name):
if (tensor.graph is not self._forward_graph or
tensor in self._forward_graph.inputs or
tensor in self._forward_graph.outputs):
return super(_CondGradFuncGraph, self)._capture_helper(tensor, name)
if control_flow_util.InXlaContext(ops.get_default_graph()):
# XLA does not yet support optionals, so capture intermediates directly.
# TODO(skyewm,jpienaar): can XLA support optionals?
if tensor not in self.captures:
self.xla_intermediates.append(tensor)
self.if_op_needs_rewrite = True
return super(_CondGradFuncGraph, self)._capture_helper(tensor, name)
captured_tensor = self._indirect_captures.get(tensor)
if captured_tensor is not None:
return captured_tensor
# 'tensor' is an uncaptured intermediate in the forward graph. We wrap it in
# an optional in the forward graph and capture the optional normally. We
# then unwrap the captured optional value in the gradient graph to get the
# raw intermediate value.
if tensor not in self._wrapped_intermediates:
# If the gradient has already been computed for this If op, 'tensor' may
# already be wrapped.
for consumer in tensor.consumers():
if (consumer.type == "OptionalFromValue"
and consumer.outputs[0] in self._forward_graph.outputs):
optional = consumer.outputs[0]
break
else:
# 'tensor' hasn't been wrapped, do it now.
with self._forward_graph.as_default():
optional = gen_dataset_ops.optional_from_value([tensor])
self.if_op_needs_rewrite = True
self._wrapped_intermediates[tensor] = optional
optional = self._wrapped_intermediates[tensor]
captured_optional = super(_CondGradFuncGraph, self)._capture_helper(
optional, name)
captured_tensor = gen_dataset_ops.optional_get_value(
captured_optional, [tensor.dtype], [tensor.shape])[0]
self._indirect_captures[tensor] = captured_tensor
return captured_tensor
| 40.338235 | 110 | 0.728728 |
b3efc3e3bbe4b6748d696ffb9359c8612d436c8a | 2,997 | py | Python | _scrapy/amazon/amazon/spiders/products_spider.py | dbo1001/TFG-Amazon-Scraper | 4b310acda121b7226e4efdd7166bb08a86de0f64 | [
"Apache-2.0"
] | 3 | 2018-10-16T10:39:51.000Z | 2020-11-18T14:15:56.000Z | _scrapy/amazon/amazon/spiders/products_spider.py | dbo1001/TFG-Amazon-Scraper | 4b310acda121b7226e4efdd7166bb08a86de0f64 | [
"Apache-2.0"
] | 43 | 2018-11-05T18:48:29.000Z | 2019-07-09T13:17:01.000Z | _scrapy/amazon/amazon/spiders/products_spider.py | dbo1001/TFG-Amazon-Scraper | 4b310acda121b7226e4efdd7166bb08a86de0f64 | [
"Apache-2.0"
] | 2 | 2019-07-14T17:04:13.000Z | 2021-06-06T07:13:51.000Z | # -*- coding: utf-8 -*-
import scrapy
import json
import re
import js2xml
from urllib.parse import urljoin
from amazon.items import AmazonItem
class ProductsSpider(scrapy.Spider):
name = 'products_spider'
allowed_domains = ['amazon.com']
list_urls = []
try:
with open('./urls_output.json') as json_data:
urls = json.load(json_data)
for i in range(len(urls)):
list_urls = list_urls + urls[i]['page_urls']
start_urls = list_urls
except:
print('Error con el archivo de entrada.')
# Función que extrae el precio mínimo de un rango de precios aplicando expresion regular
def get_min_price(self, price_range):
min = re.findall(r'\$(.*?)\ ', price_range)
if len(min) is 0:
return re.findall(r'(?<=\$).+$', price_range)
return min
# Función que extrae el precio máximo de un rango de precios aplicando expresion regular
def get_max_price(self, price_range):
max = re.findall(r'(?<= \$).+$', price_range)
if len(max) is 0:
return re.findall(r'(?<=\$).+$', price_range)
return max
def parse(self, response):
item = AmazonItem()
# ASIN
item['asin'] = re.search(r"/\w{10}/", response.url).group(0).strip('/')
# Sexo
item['sex'] = 'Female' if response.url[-1] is 'F' else 'Male'
# Rango de precios
try:
pricerange = response.xpath('//*[@id="priceblock_ourprice"]/text()').extract()[0]
item['min_price'] = self.get_min_price(pricerange)[0]
item['max_price'] = self.get_max_price(pricerange)[0]
except IndexError:
item['min_price'] = 'N/A'
item['max_price'] = 'N/A'
# Valoración
try:
item['rating'] = response.xpath('//*[@id="acrPopover"]/span[1]/a/i[1]/span/text()').extract()[0][:3]
except IndexError:
item['rating'] = 'N/A'
# Número de valoraciones
try:
item['reviews'] = int(response.xpath('//*[@id="acrCustomerReviewText"]/text()').extract()[0].split(' ')[0].replace(',',''))
except IndexError:
item['reviews'] = 'N/A'
# Marca
try:
item['brand'] = response.xpath('//*[@id="bylineInfo"]/text()').extract()[0] # Nombre de la marca sin imagen
except IndexError:
try:
item['brand'] = response.xpath('//*[@id="bylineInfo_feature_div"]/div/a/@href').extract()[0].split('/')[1] # Nombre de la marca con imagen
except:
item['brand'] = 'N/A'
# Descripción
try:
item['description'] = response.xpath('//*[@id="productDescription"]/p/text()').extract()[0]
except IndexError:
print('There is no description.')
item['description'] = 'N/A'
# Imágenes
# Los enlaces a las imágenes se extraen con la ayuda de una librería llamada js2xml.
# Esta librería facilita la extracción de elementos embebidos en código JavaScript usando XPath.
js = response.xpath("//script[contains(text(), 'register(\"ImageBlockATF\"')]/text()").extract_first()
xml = js2xml.parse(js)
selector = scrapy.Selector(root=xml)
item['image_urls'] = selector.xpath('//property[@name="colorImages"]//property[@name="hiRes"]/string/text()').extract()
# OUTPUT
yield item | 31.547368 | 142 | 0.65699 |
213a09cceadc55068677412896f9cfc4fdd0ad2f | 1,451 | py | Python | setup.py | shakraw/pyheif | f80fcc5e534350fdfee526eff15d4cc3a27ade4a | [
"Apache-2.0"
] | null | null | null | setup.py | shakraw/pyheif | f80fcc5e534350fdfee526eff15d4cc3a27ade4a | [
"Apache-2.0"
] | null | null | null | setup.py | shakraw/pyheif | f80fcc5e534350fdfee526eff15d4cc3a27ade4a | [
"Apache-2.0"
] | null | null | null | import os
import codecs
from setuptools import setup, find_packages
with open("README.md", "r") as fh:
long_description = fh.read()
def read(rel_path):
here = os.path.abspath(os.path.dirname(__file__))
with codecs.open(os.path.join(here, rel_path), "r") as fp:
return fp.read()
def get_version(rel_path):
for line in read(rel_path).splitlines():
if line.startswith("__version__"):
delim = '"' if '"' in line else "'"
return line.split(delim)[1]
else:
raise RuntimeError("Unable to find version string.")
version = get_version("pyheif/__init__.py")
setup(
name="pyheif",
version=version,
packages=["pyheif"],
package_data={"pyheif": ["data/*"]},
install_requires=["cffi>=1.0.0"],
setup_requires=["cffi>=1.0.0"],
cffi_modules=["libheif/libheif_build.py:ffibuilder"],
author="David Poirier",
author_email="david-poirier-csn@users.noreply.github.com",
description="Python 3.6+ interface to libheif library",
long_description=long_description,
long_description_content_type="text/markdown",
python_requires=">= 3.6",
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX :: Linux",
],
keywords="heif heic",
url="https://github.com/david-poirier-csn/pyheif",
)
| 29.612245 | 62 | 0.64714 |
38cc60380200a4c5b4c1ba71be756f4f655919e7 | 473 | py | Python | tests/util/test_reflect.py | wenhaizhu/FBPCS | cf103135acf44e879dab7c9819a5a8f0e22ef702 | [
"MIT"
] | null | null | null | tests/util/test_reflect.py | wenhaizhu/FBPCS | cf103135acf44e879dab7c9819a5a8f0e22ef702 | [
"MIT"
] | null | null | null | tests/util/test_reflect.py | wenhaizhu/FBPCS | cf103135acf44e879dab7c9819a5a8f0e22ef702 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import unittest
from fbpcs.util.reflect import get_class
from fbpcs.util.s3path import S3Path
TEST_CLASS_PATH = "fbpcs.util.s3path.S3Path"
class TestReflect(unittest.TestCase):
def test_get_class(self):
self.assertEqual(S3Path, get_class(TEST_CLASS_PATH))
| 26.277778 | 65 | 0.769556 |
435b76e51b0bb09f6f81181aa78ebbcd60a88b6c | 40,671 | py | Python | pydicomutils/IODs/EnhancedSRTID1500.py | sectra-medical/pydicomutils | aa2bdfa43c7aac11907e72a1cd172996901b9096 | [
"MIT"
] | 8 | 2020-03-16T17:09:23.000Z | 2022-02-02T15:31:40.000Z | pydicomutils/IODs/EnhancedSRTID1500.py | sectra-medical/pydicomutils | aa2bdfa43c7aac11907e72a1cd172996901b9096 | [
"MIT"
] | null | null | null | pydicomutils/IODs/EnhancedSRTID1500.py | sectra-medical/pydicomutils | aa2bdfa43c7aac11907e72a1cd172996901b9096 | [
"MIT"
] | 1 | 2021-06-21T20:08:19.000Z | 2021-06-21T20:08:19.000Z | import os
import random
from datetime import datetime
from pydicom import Dataset, read_file, uid
from .IOD import IOD, IODTypes, SOP_CLASS_UID_MODALITY_DICT
from .modules.specific_sr_modules import SRDocumentSeriesModule, SRDocumentGeneralModule
from .modules.specific_sr_modules import SRDocumentContentModule
from .sequences.Sequences import generate_sequence, generate_CRPES_sequence, update_and_insert_additional_DICOM_attributes_in_ds
from .sequences.Sequences import generate_reference_sop_sequence_json
class EnhancedSRTID1500(IOD):
"""Implementation of the Basic SR Text IOD
"""
def __init__(self):
super().__init__(IODTypes.EnhancedSR)
def create_empty_iod(self):
"""Creates and empty IOD with the required DICOM tags but no values
Parameters
----------
"""
super().create_empty_iod()
self.copy_required_dicom_attributes(Dataset(), include_optional=True)
def copy_required_dicom_attributes(self, dataset_to_copy_from,
include_iod_specific=True,
include_optional=False):
"""Copies required DICOM attributes from provided dataset
Parameters
----------
dataset_to_copy_from : Dataset to copy DICOM attributes from
include_iod_specific : Include IOD specific DICOM attributes in copy (True)
include_optional : Include optional DICOM attributes in copy (False)
"""
super().copy_required_dicom_attributes(dataset_to_copy_from,
include_optional)
if include_iod_specific:
sr_specific_modules = [SRDocumentSeriesModule(),
SRDocumentGeneralModule(),
SRDocumentContentModule()]
for module in sr_specific_modules:
module.copy_required_dicom_attributes(dataset_to_copy_from,
self.dataset)
if include_optional:
module.copy_optional_dicom_attributes(dataset_to_copy_from,
self.dataset)
def initiate(self, referenced_dcm_files=None):
"""Initiate the IOD by setting some dummy values for required attributes
Keyword Arguments:
referenced_dcm_files {[dcm_file1, dcm_file2, ...]} -- List of file paths (default: {None})
"""
super().initiate()
if referenced_dcm_files:
# some attributes to inherit from referenced dcm files
ds = read_file(referenced_dcm_files[0])
self.dataset.PatientID = ds.PatientID
self.dataset.PatientName = ds.PatientName
self.dataset.PatientSex = ds.PatientSex
self.dataset.PatientBirthDate = ds.PatientBirthDate if "PatientBirthDate" in ds else ""
self.dataset.StudyInstanceUID = ds.StudyInstanceUID
self.dataset.StudyID = ds.StudyID
self.dataset.AccessionNumber = ds.AccessionNumber
if "StudyDescription" in ds:
self.dataset.StudyDescription = ds.StudyDescription
self.dataset.StudyDate = ds.StudyDate
self.dataset.StudyTime = ds.StudyTime
# sr document series module
self.dataset.Modality = SOP_CLASS_UID_MODALITY_DICT[self.iod_type]
self.dataset.SeriesInstanceUID = uid.generate_uid()
# sr document general module
self.dataset.InstanceNumber = str(1)
self.dataset.CompletionFlag = "COMPLETE"
self.dataset.VerificationFlag = "UNVERIFIED"
self.dataset.ContentDate = datetime.now().strftime("%Y%m%d")
self.dataset.ContentTime = datetime.now().strftime("%H%M%S")
if referenced_dcm_files:
self.dataset.CurrentRequestedProcedureEvidenceSequence = generate_CRPES_sequence(
referenced_dcm_files)
self.dataset.PreliminaryFlag = "FINAL"
# sr document content module
self.dataset.ValueType = "CONTAINER"
self.dataset.ConceptNameCodeSequence = generate_sequence("ConceptNameCodeSequence", [{
"CodeValue": "126000",
"CodingSchemeDesignator": "DCM",
"CodeMeaning": "Imaging Measurement Report"}])
self.dataset.ContentTemplateSequence = generate_sequence("ContentTemplateSequence", [{
"MappingResource": "DCMR",
"MappingResourceUID": "1.2.840.10008.8.1.1",
"TemplateIdentifier": "1500"}])
self.dataset.ContinuityOfContent = "SEPARATE"
self.dataset.ContentSequence = generate_sequence("ContentSequence", [
{
"RelationshipType": "HAS CONCEPT MOD",
"ValueType": "CODE",
"ConceptNameCodeSequence": [{
"CodeValue": "121049",
"CodingSchemeDesignator": "DCM",
"CodeMeaning": "Language of Content Item and Descendants"}],
"ConceptCodeSequence": [{
"CodeValue": "eng",
"CodingSchemeDesignator": "RFC5646",
"CodeMeaning": "English"}],
"ContentSequence": [
{
"RelationshipType": "HAS CONCEPT MOD",
"ValueType": "CODE",
"ConceptNameCodeSequence": [{
"CodeValue": "121046",
"CodingSchemeDesignator": "DCM",
"CodeMeaning": "Country of Language"}],
"ConceptCodeSequence": [{
"CodeValue": "US",
"CodingSchemeDesignator": "ISO3166_1",
"CodeMeaning": "United States"}],
}
]
},
{
"RelationshipType": "HAS CONCEPT MOD",
"ValueType": "CODE",
"ConceptNameCodeSequence": [{
"CodeValue": "121058",
"CodingSchemeDesignator": "DCM",
"CodeMeaning": "Procedure Reported"}],
"ConceptCodeSequence": [{
"CodeValue": "363679005",
"CodingSchemeDesignator": "SCT",
"CodeMeaning": "Imaging Procedure"}]
},
{
"RelationshipType": "CONTAINS",
"ValueType": "CONTAINER",
"ConceptNameCodeSequence": [{
"CodeValue": "111028",
"CodingSchemeDesignator": "DCM",
"CodeMeaning": "Image Library"}],
"ContinuityOfContent": "SEPARATE",
"ContentSequence": [{
"RelationshipType": "CONTAINS",
"ValueType": "CONTAINER",
"ConceptNameCodeSequence": [{
"CodeValue": "126200",
"CodingSchemeDesignator": "DCM",
"CodeMeaning": "Image Library Group"}],
"ContinuityOfContent": "SEPARATE",
"ContentSequence": [generate_reference_sop_sequence_json(dcm_file) for dcm_file in referenced_dcm_files]
}]
},
{
"RelationshipType": "CONTAINS",
"ValueType": "CONTAINER",
"ConceptNameCodeSequence": [{
"CodeValue": "126010",
"CodingSchemeDesignator": "DCM",
"CodeMeaning": "Imaging Measurements"}],
"ContinuityOfContent": "SEPARATE",
"ContentSequence": []
}
])
def initiate_measurement_group(self):
"""Initiate a measurement group
Returns:
[type] -- [description]
"""
ds = Dataset()
ds.RelationshipType = "CONTAINS"
ds.ValueType = "CONTAINER"
ds.ConceptNameCodeSequence = generate_sequence("ConceptNameCodeSequence", [{
"CodeValue": "125007",
"CodingSchemeDesignator": "DCM",
"CodeMeaning": "Measurement Group"}])
ds.ContinuityOfContent = "SEPARATE",
# ds.ContentTemplateSequence = generate_sequence("ContentTemplateSequence", [{
# "MappingResource": "DCMR",
# "MappingResourceUID": "1.2.840.10008.8.1.1",
# "TemplateIdentifier": "1411"}])
return ds
def initiate_content_sequence(self, tracking_id, tracking_uid,
finding, finding_site):
"""Initiate a content sequence
Arguments:
tracking_id {[type]} -- [description]
tracking_uid {[type]} -- [description]
finding {[type]} -- [description]
finding_site {[type]} -- [description]
"""
return generate_sequence("ContentSequence", [
{
"RelationshipType": "HAS OBS CONTEXT",
"ValueType": "TEXT",
"ConceptNameCodeSequence": [{
"CodeValue": "112039",
"CodingSchemeDesignator": "DCM",
"CodeMeaning": "Tracking Identifier"}],
"TextValue": tracking_id
},
{
"RelationshipType": "HAS OBS CONTEXT",
"ValueType": "UIDREF",
"ConceptNameCodeSequence": [{
"CodeValue": "112040",
"CodingSchemeDesignator": "DCM",
"CodeMeaning": "Tracking Unique Identifier"}],
"UID": tracking_uid
},
{
"RelationshipType": "CONTAINS",
"ValueType": "CODE",
"ConceptNameCodeSequence": [{
"CodeValue": "121071",
"CodingSchemeDesignator": "DCM",
"CodeMeaning": "Finding"}],
"ConceptCodeSequence": [{
"CodeValue": finding[0],
"CodingSchemeDesignator": finding[1],
"CodeMeaning": finding[2]}]
},
{
"RelationshipType": "HAS CONCEPT MOD",
"ValueType": "CODE",
"ConceptNameCodeSequence": [{
"CodeValue": "363698007",
"CodingSchemeDesignator": "SCT",
"CodeMeaning": "Finding Site"}],
"ConceptCodeSequence": [{
"CodeValue": finding_site[0],
"CodingSchemeDesignator": finding_site[1],
"CodeMeaning": finding_site[2]}]
}])
def add_qualitative_evaluations(self, ds, qualitative_evaluations):
"""Add a qualitative evaluation
Arguments:
ds {[type]} -- [description]
qualitative_evaluations {[type]} -- [description]
"""
for item in qualitative_evaluations:
if item is None:
continue
if "text_value" in item:
ds.ContentSequence.append(update_and_insert_additional_DICOM_attributes_in_ds(Dataset(),
{
"RelationshipType": "CONTAINS",
"ValueType": "TEXT",
"ConceptNameCodeSequence": [{
"CodeValue": "C00034375",
"CodingSchemeDesignator": "UMLS",
"CodeMeaning": "Qualitative Evaluations"
}],
"TextValue": item["text_value"]
}))
else:
ds.ContentSequence.append(update_and_insert_additional_DICOM_attributes_in_ds(Dataset(),
{
"RelationshipType": "CONTAINS",
"ValueType": "CODE",
"ConceptNameCodeSequence": [{
"CodeValue": "C00034375",
"CodingSchemeDesignator": "UMLS",
"CodeMeaning": "Qualitative Evaluations"
}],
"ConceptCodeSequence": [{
"CodeValue": item["code_value"][0],
"CodingSchemeDesignator": item["code_value"][1],
"CodeMeaning": item["code_value"][2]}]
}))
return ds
def add_coded_values(self, ds, coded_values):
"""Add coded values
Arguments:
ds {[type]} -- [description]
qualitative_evaluations {[type]} -- [description]
"""
for item in coded_values:
if item is None:
continue
ds.ContentSequence.append(update_and_insert_additional_DICOM_attributes_in_ds(Dataset(),
{
"RelationshipType": "HAS CONCEPT MOD",
"ValueType": "CODE",
"ConceptNameCodeSequence": [{
"CodeValue": item["ConceptNameCode"][0],
"CodingSchemeDesignator": item["ConceptNameCode"][1],
"CodeMeaning": item["ConceptNameCode"][2]}],
"ConceptCodeSequence": [{
"CodeValue": item["ConceptCode"][0],
"CodingSchemeDesignator": item["ConceptCode"][1],
"CodeMeaning": item["ConceptCode"][2]}]
}))
return ds
def add_text_values(self, ds, text_values):
"""Add text values
Arguments:
ds {[type]} -- [description]
qualitative_evaluations {[type]} -- [description]
"""
for item in text_values:
if item is None:
continue
ds.ContentSequence.append(update_and_insert_additional_DICOM_attributes_in_ds(Dataset(),
{
"RelationshipType": "HAS CONCEPT MOD",
"ValueType": "CODE",
"ConceptNameCodeSequence": [{
"CodeValue": item["ConceptNameCode"][0],
"CodingSchemeDesignator": item["ConceptNameCode"][1],
"CodeMeaning": item["ConceptNameCode"][2]}],
"TextValue": item["TextValue"]
}))
return ds
def add_landmark(self, dcm_file, graphic_data,
finding, finding_site,
tracking_id=None, tracking_uid=None,
qualitative_evaluations=None, coded_values=None, text_values=None):
"""Add landmark value
Arguments:
dcm_file {[type]} -- [description]
graphic_data {[type]} -- [description]
finding {[type]} -- [description]
finding_site {[type]} -- [description]
Keyword Arguments:
tracking_id {[type]} -- [description] (default: {None})
tracking_uid {[type]} -- [description] (default: {None})
"""
if not tracking_id:
tracking_id = ''.join(random.choice('0123456789ABCDEF')
for i in range(16))
if not tracking_uid:
tracking_uid = uid.generate_uid()
ds_ref = read_file(dcm_file)
ds = self.initiate_measurement_group()
ds.ContentSequence = self.initiate_content_sequence(tracking_id, tracking_uid,
finding, finding_site)
if coded_values is not None:
ds = self.add_coded_values(ds, coded_values)
ds.ContentSequence.append(update_and_insert_additional_DICOM_attributes_in_ds(Dataset(),
{
"RelationshipType": "CONTAINS",
"ValueType": "CODE",
"ConceptNameCodeSequence": [{
"CodeValue": "758637006",
"CodingSchemeDesignator": "SCT",
"CodeMeaning": "Anatomical locations"}],
"ConceptCodeSequence": [{
"CodeValue": "26216008",
"CodingSchemeDesignator": "SCT",
"CodeMeaning": "Center"}],
"ContentSequence": [{
"RelationshipType": "INFERRED FROM",
"ValueType": "SCOORD",
"ContentSequence": [{
"ReferencedSOPSequence": [{
"ReferencedSOPClassUID": ds_ref.SOPClassUID,
"ReferencedSOPInstanceUID": ds_ref.SOPInstanceUID
}],
"RelationshipType": "SELECTED FROM",
"ValueType": "IMAGE" # value type
}],
"GraphicData": graphic_data,
"GraphicType": "POINT",
}]
}))
if text_values is not None:
ds = self.add_text_values(ds, text_values)
if qualitative_evaluations is not None:
ds = self.add_qualitative_evaluations(ds, qualitative_evaluations)
self.dataset.ContentSequence[3].ContentSequence.append(ds)
def add_unmeasurable_measurement(self, dcm_file, graphic_data,
finding, finding_site, reason,
tracking_id=None, tracking_uid=None):
"""Add an unmeasurable measurement
Arguments:
dcm_file {[type]} -- [description]
graphic_data {[type]} -- [description]
finding {[type]} -- [description]
finding_site {[type]} -- [description]
Keyword Arguments:
tracking_id {[type]} -- [description] (default: {None})
tracking_uid {[type]} -- [description] (default: {None})
"""
if not tracking_id:
tracking_id = ''.join(random.choice('0123456789ABCDEF')
for i in range(16))
if not tracking_uid:
tracking_uid = uid.generate_uid()
ds_ref = read_file(dcm_file)
ds = self.initiate_measurement_group()
ds.ContentSequence = self.initiate_content_sequence(tracking_id, tracking_uid,
finding, finding_site)
ds.ContentSequence.append(update_and_insert_additional_DICOM_attributes_in_ds(Dataset(),
{
"RelationshipType": "CONTAINS",
"ValueType": "TEXT",
"ConceptNameCodeSequence": [{
"CodeValue": "C00034375",
"CodingSchemeDesignator": "UMLS",
"CodeMeaning": "Qualitative Evaluations"
}],
"TextValue": reason,
"ContentSequence": [{
"RelationshipType": "INFERRED FROM",
"ValueType": "SCOORD",
"ContentSequence": [{
"ReferencedSOPSequence": [{
"ReferencedSOPClassUID": ds_ref.SOPClassUID,
"ReferencedSOPInstanceUID": ds_ref.SOPInstanceUID
}],
"RelationshipType": "SELECTED FROM",
"ValueType": "IMAGE" # value type
}],
"GraphicData": graphic_data,
"GraphicType": "CIRCLE",
}]
}))
self.dataset.ContentSequence[3].ContentSequence.append(ds)
def add_linear_measurement_single_axis(self,
dcm_ref, linear_measurement, graphic_data,
measurement_type, finding, finding_site,
tracking_id=None, tracking_uid=None,
qualitative_evaluations=None, coded_values=None, text_values=None):
"""Add linear measurement
Arguments:
dcm_file {[type]} -- [description]
linear_measurement {[type]} -- [description]
graphic_data {[type]} -- [description]
measurement_type {[type]} -- [description]
finding {[type]} -- [description]
finding_site {[type]} -- [description]
"""
if not tracking_id:
tracking_id = ''.join(random.choice('0123456789ABCDEF')
for i in range(16))
if not tracking_uid:
tracking_uid = uid.generate_uid()
referenced_sop_sequence = None
if isinstance(dcm_ref, str):
ds_ref = read_file(dcm_ref)
referenced_sop_sequence = [{
"ReferencedSOPClassUID": ds_ref.SOPClassUID,
"ReferencedSOPInstanceUID": ds_ref.SOPInstanceUID
}]
else:
referenced_sop_sequence = [{
"ReferencedSOPClassUID": dcm_ref.ReferencedSOPClassUID,
"ReferencedSOPInstanceUID": dcm_ref.ReferencedSOPInstanceUID
}]
if "ReferencedFrameNumber" in dcm_ref:
referenced_sop_sequence[0]["ReferencedFrameNumber"] = dcm_ref.ReferencedFrameNumber
ds = self.initiate_measurement_group()
ds.ContentSequence = self.initiate_content_sequence(tracking_id, tracking_uid,
finding, finding_site)
if coded_values is not None:
ds = self.add_coded_values(ds, coded_values)
ds.ContentSequence.append(update_and_insert_additional_DICOM_attributes_in_ds(Dataset(),
{
"RelationshipType": "CONTAINS",
"ValueType": "NUM",
"ConceptNameCodeSequence": [{
"CodeValue": measurement_type[0],
"CodingSchemeDesignator": measurement_type[1],
"CodeMeaning": measurement_type[2]}],
"MeasuredValueSequence": [{
"MeasurementUnitsCodeSequence": [{
"CodeValue": "mm",
"CodingSchemeDesignator": "UCUM",
"CodeMeaning": "millimeter"
}],
"NumericValue": linear_measurement
}],
"ContentSequence": [{
"RelationshipType": "INFERRED FROM",
"ValueType": "SCOORD",
"ContentSequence": [{
"ReferencedSOPSequence": referenced_sop_sequence,
"RelationshipType": "SELECTED FROM",
"ValueType": "IMAGE" # value type
}],
"GraphicData": graphic_data,
"GraphicType": "POLYLINE",
}]
}))
if text_values is not None:
ds = self.add_text_values(ds, text_values)
if qualitative_evaluations is not None:
ds = self.add_qualitative_evaluations(ds, qualitative_evaluations)
self.dataset.ContentSequence[3].ContentSequence.append(ds)
def add_linear_measurement_double_axis(self,
dcm_file,
linear_measurement_axis1, graphic_data_axis1, measurement_type_axis1,
linear_measurement_axis2, graphic_data_axis2, measurement_type_axis2,
finding, finding_site, tracking_id=None, tracking_uid=None,
qualitative_evaluations=None, coded_values=None, text_values=None):
"""Add linear measurement with two axis
Arguments:
dcm_file {[type]} -- [description]
linear_measurement_axis1 {[type]} -- [description]
graphic_data_axis1 {[type]} -- [description]
measurement_type_axis1 {[type]} -- [description]
linear_measurement_axis2 {[type]} -- [description]
graphic_data_axis2 {[type]} -- [description]
measurement_type_axis2 {[type]} -- [description]
finding {[type]} -- [description]
finding_site {[type]} -- [description]
tracking_id {[type]} -- [description]
tracking_uid {[type]} -- [description]
"""
if not tracking_id:
tracking_id = ''.join(random.choice('0123456789ABCDEF')
for i in range(16))
if not tracking_uid:
tracking_uid = uid.generate_uid()
ds_ref = read_file(dcm_file)
ds = self.initiate_measurement_group()
ds.ContentSequence = self.initiate_content_sequence(tracking_id, tracking_uid,
finding, finding_site)
if coded_values is not None:
ds = self.add_coded_values(ds, coded_values)
ds.ContentSequence.append(update_and_insert_additional_DICOM_attributes_in_ds(Dataset(),
{
"RelationshipType": "CONTAINS",
"ValueType": "NUM",
"ConceptNameCodeSequence": [{
"CodeValue": measurement_type_axis1[0],
"CodingSchemeDesignator": measurement_type_axis1[1],
"CodeMeaning": measurement_type_axis1[2]}],
"MeasuredValueSequence": [{
"MeasurementUnitsCodeSequence": [{
"CodeValue": "mm",
"CodingSchemeDesignator": "UCUM",
"CodeMeaning": "millimeter"
}],
"NumericValue": linear_measurement_axis1
}],
"ContentSequence": [{
"RelationshipType": "INFERRED FROM",
"ValueType": "SCOORD",
"ContentSequence": [{
"ReferencedSOPSequence": [{
"ReferencedSOPClassUID": ds_ref.SOPClassUID,
"ReferencedSOPInstanceUID": ds_ref.SOPInstanceUID
}],
"RelationshipType": "SELECTED FROM",
"ValueType": "IMAGE" # value type
}],
"GraphicData": graphic_data_axis1,
"GraphicType": "POLYLINE",
}]
}))
ds.ContentSequence.append(update_and_insert_additional_DICOM_attributes_in_ds(Dataset(),
{
"RelationshipType": "CONTAINS",
"ValueType": "NUM",
"ConceptNameCodeSequence": [{
"CodeValue": measurement_type_axis2[0],
"CodingSchemeDesignator": measurement_type_axis2[1],
"CodeMeaning": measurement_type_axis2[2]}],
"MeasuredValueSequence": [{
"MeasurementUnitsCodeSequence": [{
"CodeValue": "mm",
"CodingSchemeDesignator": "UCUM",
"CodeMeaning": "millimeter"
}],
"NumericValue": linear_measurement_axis2
}],
"ContentSequence": [{
"RelationshipType": "INFERRED FROM",
"ValueType": "SCOORD",
"ContentSequence": [{
"ReferencedSOPSequence": [{
"ReferencedSOPClassUID": ds_ref.SOPClassUID,
"ReferencedSOPInstanceUID": ds_ref.SOPInstanceUID
}],
"RelationshipType": "SELECTED FROM",
"ValueType": "IMAGE" # value type
}],
"GraphicData": graphic_data_axis2,
"GraphicType": "POLYLINE",
}]
}))
if text_values is not None:
ds = self.add_text_values(ds, text_values)
if qualitative_evaluations is not None:
ds = self.add_qualitative_evaluations(ds, qualitative_evaluations)
self.dataset.ContentSequence[3].ContentSequence.append(ds)
def add_volume_measurement(self,
seg_dcm_file, dcm_file, volume_measurement, segment_number, graphic_data,
finding, finding_site, tracking_id=None, tracking_uid=None,
qualitative_evaluations=None, coded_values=None, text_values=None):
"""Add volume measurement
Arguments:
seg_dcm_file {[type]} -- [description]
dcm_file {[type]} -- [description]
volume_measurement {[type]} -- [description]
segment_number {[type]} -- [description]
graphic_data {[type]} -- [description]
finding {[type]} -- [description]
finding_site {[type]} -- [description]
Keyword Arguments:
tracking_id {[type]} -- [description] (default: {None})
tracking_uid {[type]} -- [description] (default: {None})
qualitative_evaluations {[type]} -- [description] (default: {None})
coded_values {[type]} -- [description] (default: {None})
text_values {[type]} -- [description] (default: {None})
"""
if not tracking_id:
tracking_id = ''.join(random.choice('0123456789ABCDEF')
for i in range(16))
if not tracking_uid:
tracking_uid = uid.generate_uid()
ds_ref_seg = read_file(seg_dcm_file)
ds_ref = read_file(dcm_file)
ds = self.initiate_measurement_group()
ds.ContentSequence = self.initiate_content_sequence(tracking_id, tracking_uid,
finding, finding_site)
if coded_values is not None:
ds = self.add_coded_values(ds, coded_values)
ds.ContentSequence.append(update_and_insert_additional_DICOM_attributes_in_ds(Dataset(),
{
"ReferencedSOPSequence": [{
"ReferencedSOPClassUID": ds_ref_seg.SOPClassUID,
"ReferencedSOPInstanceUID": ds_ref_seg.SOPInstanceUID,
"ReferencedSegmentNumber": segment_number
}],
"RelationshipType": "CONTAINS",
"ValueType": "IMAGE",
"ConceptNameCodeSequence": [{
"CodeValue": "121191",
"CodingSchemeDesignator": "DCM",
"CodeMeaning": "Referenced Segment"}],
}
))
ds.ContentSequence.append(update_and_insert_additional_DICOM_attributes_in_ds(Dataset(),
{
"RelationshipType": "CONTAINS",
"ValueType": "UIDREF",
"ConceptNameCodeSequence": [{
"CodeValue": "121232",
"CodingSchemeDesignator": "DCM",
"CodeMeaning": "Source series for segmentation"}],
"UID": ds_ref_seg.ReferencedSeriesSequence[0].SeriesInstanceUID
}
))
ds.ContentSequence.append(update_and_insert_additional_DICOM_attributes_in_ds(Dataset(),
{
"RelationshipType": "CONTAINS",
"ValueType": "NUM",
"ConceptNameCodeSequence": [{
"CodeValue": "118565006",
"CodingSchemeDesignator": "SCT",
"CodeMeaning": "Volume"}],
"MeasuredValueSequence": [{
"MeasurementUnitsCodeSequence": [{
"CodeValue": "mm3",
"CodingSchemeDesignator": "UCUM",
"CodeMeaning": "cubic millimeter"
}],
"NumericValue": volume_measurement
}]
}))
ds.ContentSequence.append(update_and_insert_additional_DICOM_attributes_in_ds(Dataset(),
{
"RelationshipType": "CONTAINS",
"ValueType": "SCOORD",
"ConceptNameCodeSequence": [{
"CodeValue": "111010",
"CodingSchemeDesignator": "DCM",
"CodeMeaning": "Center"}],
"ContentSequence": [{
"ReferencedSOPSequence": [{
"ReferencedSOPClassUID": ds_ref.SOPClassUID,
"ReferencedSOPInstanceUID": ds_ref.SOPInstanceUID
}],
"RelationshipType": "SELECTED FROM",
"ValueType": "IMAGE" # value type
}],
"GraphicData": graphic_data,
"GraphicType": "POINT",
}))
if text_values is not None:
ds = self.add_text_values(ds, text_values)
if qualitative_evaluations is not None:
ds = self.add_qualitative_evaluations(ds, qualitative_evaluations)
self.dataset.ContentSequence[3].ContentSequence.append(ds)
def add_volume_and_linear_measurement_single_axis(self,
seg_dcm_file, dcm_file, volume_measurement, segment_number, graphic_data_center,
linear_measurement, graphic_data_linear_measurement, measurement_type,
finding, finding_site, tracking_id=None, tracking_uid=None,
qualitative_evaluations=None, coded_values=None, text_values=None):
"""Add volume measurement with a single axis distance measurement
Arguments:
seg_dcm_file {[type]} -- [description]
dcm_file {[type]} -- [description]
volume_measurement {[type]} -- [description]
segment_number {[type]} -- [description]
graphic_data {[type]} -- [description]
finding {[type]} -- [description]
finding_site {[type]} -- [description]
Keyword Arguments:
tracking_id {[type]} -- [description] (default: {None})
tracking_uid {[type]} -- [description] (default: {None})
qualitative_evaluations {[type]} -- [description] (default: {None})
coded_values {[type]} -- [description] (default: {None})
text_values {[type]} -- [description] (default: {None})
"""
if not tracking_id:
tracking_id = ''.join(random.choice('0123456789ABCDEF')
for i in range(16))
if not tracking_uid:
tracking_uid = uid.generate_uid()
ds_ref_seg = read_file(seg_dcm_file)
ds_ref = read_file(dcm_file)
ds = self.initiate_measurement_group()
ds.ContentSequence = self.initiate_content_sequence(tracking_id, tracking_uid,
finding, finding_site)
if coded_values is not None:
ds = self.add_coded_values(ds, coded_values)
ds.ContentSequence.append(update_and_insert_additional_DICOM_attributes_in_ds(Dataset(),
{
"ReferencedSOPSequence": [{
"ReferencedSOPClassUID": ds_ref_seg.SOPClassUID,
"ReferencedSOPInstanceUID": ds_ref_seg.SOPInstanceUID,
"ReferencedSegmentNumber": segment_number
}],
"RelationshipType": "CONTAINS",
"ValueType": "IMAGE",
"ConceptNameCodeSequence": [{
"CodeValue": "121191",
"CodingSchemeDesignator": "DCM",
"CodeMeaning": "Referenced Segment"}],
}
))
ds.ContentSequence.append(update_and_insert_additional_DICOM_attributes_in_ds(Dataset(),
{
"RelationshipType": "CONTAINS",
"ValueType": "UIDREF",
"ConceptNameCodeSequence": [{
"CodeValue": "121232",
"CodingSchemeDesignator": "DCM",
"CodeMeaning": "Source series for segmentation"}],
"UID": ds_ref_seg.ReferencedSeriesSequence[0].SeriesInstanceUID
}
))
ds.ContentSequence.append(update_and_insert_additional_DICOM_attributes_in_ds(Dataset(),
{
"RelationshipType": "CONTAINS",
"ValueType": "NUM",
"ConceptNameCodeSequence": [{
"CodeValue": "118565006",
"CodingSchemeDesignator": "SCT",
"CodeMeaning": "Volume"}],
"MeasuredValueSequence": [{
"MeasurementUnitsCodeSequence": [{
"CodeValue": "mm3",
"CodingSchemeDesignator": "UCUM",
"CodeMeaning": "cubic millimeter"
}],
"NumericValue": volume_measurement
}]
}))
ds.ContentSequence.append(update_and_insert_additional_DICOM_attributes_in_ds(Dataset(),
{
"RelationshipType": "CONTAINS",
"ValueType": "SCOORD",
"ConceptNameCodeSequence": [{
"CodeValue": "111010",
"CodingSchemeDesignator": "DCM",
"CodeMeaning": "Center"}],
"ContentSequence": [{
"ReferencedSOPSequence": [{
"ReferencedSOPClassUID": ds_ref.SOPClassUID,
"ReferencedSOPInstanceUID": ds_ref.SOPInstanceUID
}],
"RelationshipType": "SELECTED FROM",
"ValueType": "IMAGE" # value type
}],
"GraphicData": graphic_data_center,
"GraphicType": "POINT",
}))
ds.ContentSequence.append(update_and_insert_additional_DICOM_attributes_in_ds(Dataset(),
{
"RelationshipType": "CONTAINS",
"ValueType": "NUM",
"ConceptNameCodeSequence": [{
"CodeValue": measurement_type[0],
"CodingSchemeDesignator": measurement_type[1],
"CodeMeaning": measurement_type[2]}],
"MeasuredValueSequence": [{
"MeasurementUnitsCodeSequence": [{
"CodeValue": "mm",
"CodingSchemeDesignator": "UCUM",
"CodeMeaning": "millimeter"
}],
"NumericValue": linear_measurement
}],
"ContentSequence": [{
"RelationshipType": "INFERRED FROM",
"ValueType": "SCOORD",
"ContentSequence": [{
"ReferencedSOPSequence": [{
"ReferencedSOPClassUID": ds_ref.SOPClassUID,
"ReferencedSOPInstanceUID": ds_ref.SOPInstanceUID
}],
"RelationshipType": "SELECTED FROM",
"ValueType": "IMAGE" # value type
}],
"GraphicData": graphic_data_linear_measurement,
"GraphicType": "POLYLINE",
}]
}))
if text_values is not None:
ds = self.add_text_values(ds, text_values)
if qualitative_evaluations is not None:
ds = self.add_qualitative_evaluations(ds, qualitative_evaluations)
self.dataset.ContentSequence[3].ContentSequence.append(ds)
| 47.848235 | 135 | 0.502028 |
99bc9bd86dbdad6655f83b90106d5ea51d207e12 | 3,344 | py | Python | examples/temp/GradientTest_2DHorizontal_time.py | zfang-slim/pysit | 8fca42b9749841abc302d1f8195a1437fad7ae4d | [
"BSD-3-Clause"
] | 64 | 2015-09-08T06:23:27.000Z | 2022-03-09T23:35:24.000Z | examples/temp/GradientTest_2DHorizontal_time.py | zfang-slim/pysit | 8fca42b9749841abc302d1f8195a1437fad7ae4d | [
"BSD-3-Clause"
] | 23 | 2015-10-08T01:14:24.000Z | 2021-07-15T11:37:05.000Z | examples/temp/GradientTest_2DHorizontal_time.py | zfang-slim/pysit | 8fca42b9749841abc302d1f8195a1437fad7ae4d | [
"BSD-3-Clause"
] | 48 | 2015-06-25T14:48:22.000Z | 2021-12-06T19:50:25.000Z | # Std import block
import time
import copy
import numpy as np
import matplotlib.pyplot as plt
from pysit import *
from pysit.gallery import horizontal_reflector
from GradientTest import GradientTest
if __name__ == '__main__':
# Setup
# Define Domain
pmlx = PML(0.1, 100)
pmlz = PML(0.1, 100)
x_config = (0.1, 1.0, pmlx, pmlx)
z_config = (0.1, 0.8, pmlz, pmlz)
d = RectangularDomain(x_config, z_config)
m = CartesianMesh(d, 91, 71)
# Generate true wave speed
C, C0, m, d = horizontal_reflector(m)
# Set up shots
zmin = d.z.lbound
zmax = d.z.rbound
zpos = zmin + (1./9.)*zmax
shots = equispaced_acquisition(m,
RickerWavelet(10.0),
sources=1,
source_depth=zpos,
source_kwargs={},
receivers='max',
receiver_depth=zpos,
receiver_kwargs={},
)
# Define and configure the wave solver
trange = (0.0, 3)
solver = ConstantDensityAcousticWave(m,
spatial_accuracy_order=2,
trange=trange,
kernel_implementation='cpp')
# Generate synthetic Seismic data
tt = time.time()
wavefields = []
base_model = solver.ModelParameters(m, {'C': C})
generate_seismic_data(shots, solver, base_model, wavefields=wavefields)
print('Data generation: {0}s'.format(time.time()-tt))
objective = TemporalLeastSquares(solver)
# Define the inversion algorithm
grad_test = GradientTest(objective)
grad_test.base_model = solver.ModelParameters(m, {'C': C0})
grad_test.length_ratio = np.power(5.0, range(-6, -0))
# Set up the perturbation direction
dC_vec = copy.deepcopy(grad_test.base_model)
m_size = m._shapes[(False, True)]
tmp = np.random.normal(0, 1, m_size)
tmp = np.ones(m_size)
tmp[0:2, :] = 0.0
tmp[m_size[0]-2:m_size[0], :] = 0.0
tmp[:, 0:2] = 0.0
tmp[:, m_size[1]-2:m_size[1]] = 0.0
tmp = np.reshape(tmp, grad_test.base_model.data.shape)
dC_vec.data = tmp
norm_dC_vec = np.linalg.norm(dC_vec.data)
norm_base_model = np.linalg.norm(grad_test.base_model.data)
dC_vec.data = dC_vec.data * 0.1 * (norm_base_model / norm_dC_vec)
grad_test.model_perturbation = dC_vec
# Execute inversion algorithm
print('Gradient test ...')
tt = time.time()
result = grad_test(shots)
print('...run time: {0}s'.format(time.time()-tt))
print(grad_test.objective_value)
plt.figure()
plt.loglog(grad_test.length_ratio, grad_test.zero_order_difference, 'b',
grad_test.length_ratio, grad_test.length_ratio, 'r')
plt.title('Zero order difference')
plt.gca().legend(('df_0', 'h'))
plt.figure()
plt.loglog(grad_test.length_ratio, grad_test.first_order_difference, 'b',
grad_test.length_ratio, np.power(grad_test.length_ratio, 1.0), 'y',
grad_test.length_ratio, np.power(grad_test.length_ratio, 2.0), 'r')
plt.title('First order difference')
plt.gca().legend(('df_1', 'h', 'h^2'))
plt.show()
| 30.678899 | 82 | 0.582237 |
48007a088c3c9ab32d910b78b873e30d5097df66 | 30,719 | py | Python | tests/tests_geomstats/test_hypersphere.py | lrozo/geomstats | accf4f2cc1de3bf9c6853d6618d7d18e44ce4874 | [
"MIT"
] | 1 | 2022-01-10T07:22:11.000Z | 2022-01-10T07:22:11.000Z | tests/tests_geomstats/test_hypersphere.py | lrozo/geomstats | accf4f2cc1de3bf9c6853d6618d7d18e44ce4874 | [
"MIT"
] | null | null | null | tests/tests_geomstats/test_hypersphere.py | lrozo/geomstats | accf4f2cc1de3bf9c6853d6618d7d18e44ce4874 | [
"MIT"
] | null | null | null | """Unit tests for the Hypersphere."""
import scipy.special
import tests.helper as helper
import geomstats.backend as gs
import geomstats.tests
from geomstats.geometry.hypersphere import Hypersphere
from geomstats.geometry.matrices import Matrices
from geomstats.learning.frechet_mean import FrechetMean
MEAN_ESTIMATION_TOL = 5e-3
KAPPA_ESTIMATION_TOL = 3e-2
ONLINE_KMEANS_TOL = 2e-2
class TestHypersphere(geomstats.tests.TestCase):
def setUp(self):
gs.random.seed(1234)
self.dimension = 4
self.space = Hypersphere(dim=self.dimension)
self.metric = self.space.metric
self.n_samples = 10
def test_random_uniform_and_belongs(self):
"""Test random uniform and belongs.
Test that the random uniform method samples
on the hypersphere space.
"""
n_samples = self.n_samples
point = self.space.random_uniform(n_samples)
result = self.space.belongs(point)
expected = gs.array([True] * n_samples)
self.assertAllClose(expected, result)
def test_random_uniform(self):
point = self.space.random_uniform()
self.assertAllClose(gs.shape(point), (self.dimension + 1,))
def test_replace_values(self):
points = gs.ones((3, 5))
new_points = gs.zeros((2, 5))
indcs = [True, False, True]
update = self.space._replace_values(points, new_points, indcs)
self.assertAllClose(update, gs.stack(
[gs.zeros(5), gs.ones(5), gs.zeros(5)]))
def test_projection_and_belongs(self):
point = gs.array([1., 2., 3., 4., 5.])
proj = self.space.projection(point)
result = self.space.belongs(proj)
expected = True
self.assertAllClose(expected, result)
def test_intrinsic_and_extrinsic_coords(self):
"""
Test that the composition of
intrinsic_to_extrinsic_coords and
extrinsic_to_intrinsic_coords
gives the identity.
"""
point_int = gs.array([.1, 0., 0., .1])
point_ext = self.space.intrinsic_to_extrinsic_coords(point_int)
result = self.space.extrinsic_to_intrinsic_coords(point_ext)
expected = point_int
self.assertAllClose(result, expected)
point_ext = (1. / (gs.sqrt(6.))
* gs.array([1., 0., 0., 1., 2.]))
point_int = self.space.extrinsic_to_intrinsic_coords(point_ext)
result = self.space.intrinsic_to_extrinsic_coords(point_int)
expected = point_ext
self.assertAllClose(result, expected)
def test_intrinsic_and_extrinsic_coords_vectorization(self):
"""Test change of coordinates.
Test that the composition of
intrinsic_to_extrinsic_coords and
extrinsic_to_intrinsic_coords
gives the identity.
"""
point_int = gs.array(
[[.1, 0., 0., .1],
[.1, .1, .1, .4],
[.1, .3, 0., .1],
[-0.1, .1, -.4, .1],
[0., 0., .1, .1],
[.1, .1, .1, .1]])
point_ext = self.space.intrinsic_to_extrinsic_coords(point_int)
result = self.space.extrinsic_to_intrinsic_coords(point_ext)
expected = point_int
self.assertAllClose(result, expected)
point_int = self.space.extrinsic_to_intrinsic_coords(point_ext)
result = self.space.intrinsic_to_extrinsic_coords(point_int)
expected = point_ext
self.assertAllClose(result, expected)
def test_log_and_exp_general_case(self):
"""Test Log and Exp.
Test that the Riemannian exponential
and the Riemannian logarithm are inverse.
Expect their composition to give the identity function.
NB: points on the n-dimensional sphere are
(n+1)-D vectors of norm 1.
"""
# Riemannian Log then Riemannian Exp
# General case
base_point = gs.array([1., 2., 3., 4., 6.])
base_point = base_point / gs.linalg.norm(base_point)
point = gs.array([0., 5., 6., 2., -1.])
point = point / gs.linalg.norm(point)
log = self.metric.log(point=point, base_point=base_point)
result = self.metric.exp(tangent_vec=log, base_point=base_point)
expected = point
self.assertAllClose(result, expected)
def test_log_and_exp_edge_case(self):
"""Test Log and Exp.
Test that the Riemannian exponential
and the Riemannian logarithm are inverse.
Expect their composition to give the identity function.
NB: points on the n-dimensional sphere are
(n+1)-D vectors of norm 1.
"""
# Riemannian Log then Riemannian Exp
# Edge case: two very close points, base_point_2 and point_2,
# form an angle < epsilon
base_point = gs.array([1., 2., 3., 4., 6.])
base_point = base_point / gs.linalg.norm(base_point)
point = (base_point
+ 1e-4 * gs.array([-1., -2., 1., 1., .1]))
point = point / gs.linalg.norm(point)
log = self.metric.log(point=point, base_point=base_point)
result = self.metric.exp(tangent_vec=log, base_point=base_point)
expected = point
self.assertAllClose(result, expected)
def test_exp_vectorization_single_samples(self):
dim = self.dimension + 1
one_vec = self.space.random_uniform()
one_base_point = self.space.random_uniform()
one_tangent_vec = self.space.to_tangent(
one_vec, base_point=one_base_point)
result = self.metric.exp(one_tangent_vec, one_base_point)
self.assertAllClose(gs.shape(result), (dim,))
one_base_point = gs.to_ndarray(one_base_point, to_ndim=2)
result = self.metric.exp(one_tangent_vec, one_base_point)
self.assertAllClose(gs.shape(result), (1, dim))
one_tangent_vec = gs.to_ndarray(one_tangent_vec, to_ndim=2)
result = self.metric.exp(one_tangent_vec, one_base_point)
self.assertAllClose(gs.shape(result), (1, dim))
one_base_point = self.space.random_uniform()
result = self.metric.exp(one_tangent_vec, one_base_point)
self.assertAllClose(gs.shape(result), (1, dim))
def test_exp_vectorization_n_samples(self):
n_samples = self.n_samples
dim = self.dimension + 1
one_vec = self.space.random_uniform()
one_base_point = self.space.random_uniform()
n_vecs = self.space.random_uniform(n_samples=n_samples)
n_base_points = self.space.random_uniform(n_samples=n_samples)
n_tangent_vecs = self.space.to_tangent(
n_vecs, base_point=one_base_point)
result = self.metric.exp(n_tangent_vecs, one_base_point)
self.assertAllClose(gs.shape(result), (n_samples, dim))
one_tangent_vec = self.space.to_tangent(
one_vec, base_point=n_base_points)
result = self.metric.exp(one_tangent_vec, n_base_points)
self.assertAllClose(gs.shape(result), (n_samples, dim))
n_tangent_vecs = self.space.to_tangent(
n_vecs, base_point=n_base_points)
result = self.metric.exp(n_tangent_vecs, n_base_points)
self.assertAllClose(gs.shape(result), (n_samples, dim))
def test_log_vectorization_single_samples(self):
dim = self.dimension + 1
one_base_point = self.space.random_uniform()
one_point = self.space.random_uniform()
result = self.metric.log(one_point, one_base_point)
self.assertAllClose(gs.shape(result), (dim,))
one_base_point = gs.to_ndarray(one_base_point, to_ndim=2)
result = self.metric.log(one_point, one_base_point)
self.assertAllClose(gs.shape(result), (1, dim))
one_point = gs.to_ndarray(one_base_point, to_ndim=2)
result = self.metric.log(one_point, one_base_point)
self.assertAllClose(gs.shape(result), (1, dim))
one_base_point = self.space.random_uniform()
result = self.metric.log(one_point, one_base_point)
self.assertAllClose(gs.shape(result), (1, dim))
def test_log_vectorization_n_samples(self):
n_samples = self.n_samples
dim = self.dimension + 1
one_base_point = self.space.random_uniform()
one_point = self.space.random_uniform()
n_points = self.space.random_uniform(n_samples=n_samples)
n_base_points = self.space.random_uniform(n_samples=n_samples)
result = self.metric.log(one_point, one_base_point)
self.assertAllClose(gs.shape(result), (dim,))
result = self.metric.log(n_points, one_base_point)
self.assertAllClose(gs.shape(result), (n_samples, dim))
result = self.metric.log(one_point, n_base_points)
self.assertAllClose(gs.shape(result), (n_samples, dim))
result = self.metric.log(n_points, n_base_points)
self.assertAllClose(gs.shape(result), (n_samples, dim))
def test_exp_log_are_inverse(self):
initial_point = self.space.random_uniform(2)
end_point = self.space.random_uniform(2)
vec = self.space.metric.log(point=end_point, base_point=initial_point)
result = self.space.metric.exp(vec, initial_point)
self.assertAllClose(end_point, result)
def test_log_extreme_case(self):
initial_point = self.space.random_uniform(2)
vec = 1e-4 * gs.random.rand(*initial_point.shape)
vec = self.space.to_tangent(vec, initial_point)
point = self.space.metric.exp(vec, base_point=initial_point)
result = self.space.metric.log(point, initial_point)
self.assertAllClose(vec, result)
def test_exp_and_log_and_projection_to_tangent_space_general_case(self):
"""Test Log and Exp.
Test that the Riemannian exponential
and the Riemannian logarithm are inverse.
Expect their composition to give the identity function.
NB: points on the n-dimensional sphere are
(n+1)-D vectors of norm 1.
"""
# Riemannian Exp then Riemannian Log
# General case
# NB: Riemannian log gives a regularized tangent vector,
# so we take the norm modulo 2 * pi.
base_point = gs.array([0., -3., 0., 3., 4.])
base_point = base_point / gs.linalg.norm(base_point)
vector = gs.array([3., 2., 0., 0., -1.])
vector = self.space.to_tangent(
vector=vector, base_point=base_point)
exp = self.metric.exp(tangent_vec=vector, base_point=base_point)
result = self.metric.log(point=exp, base_point=base_point)
expected = vector
norm_expected = gs.linalg.norm(expected)
regularized_norm_expected = gs.mod(norm_expected, 2 * gs.pi)
expected = expected / norm_expected * regularized_norm_expected
# The Log can be the opposite vector on the tangent space,
# whose Exp gives the base_point
are_close = gs.allclose(result, expected)
norm_2pi = gs.isclose(gs.linalg.norm(result - expected), 2 * gs.pi)
self.assertTrue(are_close or norm_2pi)
def test_exp_and_log_and_projection_to_tangent_space_edge_case(self):
"""Test Log and Exp.
Test that the Riemannian exponential
and the Riemannian logarithm are inverse.
Expect their composition to give the identity function.
NB: points on the n-dimensional sphere are
(n+1)-D vectors of norm 1.
"""
# Riemannian Exp then Riemannian Log
# Edge case: tangent vector has norm < epsilon
base_point = gs.array([10., -2., -.5, 34., 3.])
base_point = base_point / gs.linalg.norm(base_point)
vector = 1e-4 * gs.array([.06, -51., 6., 5., 3.])
vector = self.space.to_tangent(
vector=vector, base_point=base_point)
exp = self.metric.exp(tangent_vec=vector, base_point=base_point)
result = self.metric.log(point=exp, base_point=base_point)
self.assertAllClose(result, vector)
def test_squared_norm_and_squared_dist(self):
"""
Test that the squared distance between two points is
the squared norm of their logarithm.
"""
point_a = (1. / gs.sqrt(129.)
* gs.array([10., -2., -5., 0., 0.]))
point_b = (1. / gs.sqrt(435.)
* gs.array([1., -20., -5., 0., 3.]))
log = self.metric.log(point=point_a, base_point=point_b)
result = self.metric.squared_norm(vector=log)
expected = self.metric.squared_dist(point_a, point_b)
self.assertAllClose(result, expected)
def test_squared_dist_vectorization_single_sample(self):
one_point_a = self.space.random_uniform()
one_point_b = self.space.random_uniform()
result = self.metric.squared_dist(one_point_a, one_point_b)
self.assertAllClose(gs.shape(result), ())
one_point_a = gs.to_ndarray(one_point_a, to_ndim=2)
result = self.metric.squared_dist(one_point_a, one_point_b)
self.assertAllClose(gs.shape(result), (1,))
one_point_b = gs.to_ndarray(one_point_b, to_ndim=2)
result = self.metric.squared_dist(one_point_a, one_point_b)
self.assertAllClose(gs.shape(result), (1,))
one_point_a = self.space.random_uniform()
result = self.metric.squared_dist(one_point_a, one_point_b)
self.assertAllClose(gs.shape(result), (1,))
def test_squared_dist_vectorization_n_samples(self):
n_samples = self.n_samples
one_point_a = self.space.random_uniform()
one_point_b = self.space.random_uniform()
n_points_a = self.space.random_uniform(n_samples=n_samples)
n_points_b = self.space.random_uniform(n_samples=n_samples)
result = self.metric.squared_dist(one_point_a, one_point_b)
self.assertAllClose(gs.shape(result), ())
result = self.metric.squared_dist(n_points_a, one_point_b)
self.assertAllClose(gs.shape(result), (n_samples,))
result = self.metric.squared_dist(one_point_a, n_points_b)
self.assertAllClose(gs.shape(result), (n_samples,))
result = self.metric.squared_dist(n_points_a, n_points_b)
self.assertAllClose(gs.shape(result), (n_samples,))
one_point_a = gs.to_ndarray(one_point_a, to_ndim=2)
one_point_b = gs.to_ndarray(one_point_b, to_ndim=2)
result = self.metric.squared_dist(n_points_a, one_point_b)
self.assertAllClose(gs.shape(result), (n_samples,))
result = self.metric.squared_dist(one_point_a, n_points_b)
self.assertAllClose(gs.shape(result), (n_samples,))
result = self.metric.squared_dist(n_points_a, n_points_b)
self.assertAllClose(gs.shape(result), (n_samples,))
def test_norm_and_dist(self):
"""
Test that the distance between two points is
the norm of their logarithm.
"""
point_a = (1. / gs.sqrt(129.)
* gs.array([10., -2., -5., 0., 0.]))
point_b = (1. / gs.sqrt(435.)
* gs.array([1., -20., -5., 0., 3.]))
log = self.metric.log(point=point_a, base_point=point_b)
self.assertAllClose(gs.shape(log), (5,))
result = self.metric.norm(vector=log)
self.assertAllClose(gs.shape(result), ())
expected = self.metric.dist(point_a, point_b)
self.assertAllClose(gs.shape(expected), ())
self.assertAllClose(result, expected)
def test_dist_point_and_itself(self):
# Distance between a point and itself is 0
point_a = (1. / gs.sqrt(129.)
* gs.array([10., -2., -5., 0., 0.]))
point_b = point_a
result = self.metric.dist(point_a, point_b)
expected = 0.
self.assertAllClose(result, expected)
def test_dist_pairwise(self):
point_a = (1. / gs.sqrt(129.)
* gs.array([10., -2., -5., 0., 0.]))
point_b = (1. / gs.sqrt(435.)
* gs.array([1., -20., -5., 0., 3.]))
point = gs.array([point_a, point_b])
result = self.metric.dist_pairwise(point)
expected = gs.array([[0., 1.24864502],
[1.24864502, 0.]])
self.assertAllClose(result, expected, rtol=1e-3)
def test_dist_pairwise_parallel(self):
n_samples = 15
points = self.space.random_uniform(n_samples)
result = self.metric.dist_pairwise(points, n_jobs=2, prefer='threads')
is_sym = Matrices.is_symmetric(result)
belongs = Matrices(n_samples, n_samples).belongs(result)
self.assertTrue(is_sym)
self.assertTrue(belongs)
def test_dist_orthogonal_points(self):
# Distance between two orthogonal points is pi / 2.
point_a = gs.array([10., -2., -.5, 0., 0.])
point_a = point_a / gs.linalg.norm(point_a)
point_b = gs.array([2., 10, 0., 0., 0.])
point_b = point_b / gs.linalg.norm(point_b)
result = gs.dot(point_a, point_b)
expected = 0
self.assertAllClose(result, expected)
result = self.metric.dist(point_a, point_b)
expected = gs.pi / 2
self.assertAllClose(result, expected)
def test_exp_and_dist_and_projection_to_tangent_space(self):
base_point = gs.array([16., -2., -2.5, 84., 3.])
base_point = base_point / gs.linalg.norm(base_point)
vector = gs.array([9., 0., -1., -2., 1.])
tangent_vec = self.space.to_tangent(
vector=vector, base_point=base_point)
exp = self.metric.exp(
tangent_vec=tangent_vec, base_point=base_point)
result = self.metric.dist(base_point, exp)
expected = gs.linalg.norm(tangent_vec) % (2 * gs.pi)
self.assertAllClose(result, expected)
def test_exp_and_dist_and_projection_to_tangent_space_vec(self):
base_point = gs.array([
[16., -2., -2.5, 84., 3.],
[16., -2., -2.5, 84., 3.]])
base_single_point = gs.array([16., -2., -2.5, 84., 3.])
scalar_norm = gs.linalg.norm(base_single_point)
base_point = base_point / scalar_norm
vector = gs.array(
[[9., 0., -1., -2., 1.],
[9., 0., -1., -2., 1]])
tangent_vec = self.space.to_tangent(
vector=vector, base_point=base_point)
exp = self.metric.exp(
tangent_vec=tangent_vec, base_point=base_point)
result = self.metric.dist(base_point, exp)
expected = gs.linalg.norm(tangent_vec, axis=-1) % (2 * gs.pi)
self.assertAllClose(result, expected)
def test_geodesic_and_belongs(self):
n_geodesic_points = 10
initial_point = self.space.random_uniform(2)
vector = gs.array([[2., 0., -1., -2., 1.]] * 2)
initial_tangent_vec = self.space.to_tangent(
vector=vector, base_point=initial_point)
geodesic = self.metric.geodesic(
initial_point=initial_point,
initial_tangent_vec=initial_tangent_vec)
t = gs.linspace(start=0., stop=1., num=n_geodesic_points)
points = geodesic(t)
result = gs.stack([self.space.belongs(pt) for pt in points])
self.assertTrue(gs.all(result))
initial_point = initial_point[0]
initial_tangent_vec = initial_tangent_vec[0]
geodesic = self.metric.geodesic(
initial_point=initial_point,
initial_tangent_vec=initial_tangent_vec)
points = geodesic(t)
result = self.space.belongs(points)
expected = gs.array(n_geodesic_points * [True])
self.assertAllClose(expected, result)
def test_geodesic_end_point(self):
n_geodesic_points = 10
initial_point = self.space.random_uniform(4)
geodesic = self.metric.geodesic(
initial_point=initial_point[:2],
end_point=initial_point[2:])
t = gs.linspace(start=0., stop=1., num=n_geodesic_points)
points = geodesic(t)
result = points[:, -1]
expected = initial_point[2:]
self.assertAllClose(expected, result)
def test_inner_product(self):
tangent_vec_a = gs.array([1., 0., 0., 0., 0.])
tangent_vec_b = gs.array([0., 1., 0., 0., 0.])
base_point = gs.array([0., 0., 0., 0., 1.])
result = self.metric.inner_product(
tangent_vec_a, tangent_vec_b, base_point)
expected = 0.
self.assertAllClose(expected, result)
def test_inner_product_vectorization_single_samples(self):
tangent_vec_a = gs.array([1., 0., 0., 0., 0.])
tangent_vec_b = gs.array([0., 1., 0., 0., 0.])
base_point = gs.array([0., 0., 0., 0., 1.])
result = self.metric.inner_product(
tangent_vec_a, tangent_vec_b, base_point)
expected = 0.
self.assertAllClose(expected, result)
tangent_vec_a = gs.array([[1., 0., 0., 0., 0.]])
tangent_vec_b = gs.array([0., 1., 0., 0., 0.])
base_point = gs.array([0., 0., 0., 0., 1.])
result = self.metric.inner_product(
tangent_vec_a, tangent_vec_b, base_point)
expected = gs.array([0.])
self.assertAllClose(expected, result)
tangent_vec_a = gs.array([1., 0., 0., 0., 0.])
tangent_vec_b = gs.array([[0., 1., 0., 0., 0.]])
base_point = gs.array([0., 0., 0., 0., 1.])
result = self.metric.inner_product(
tangent_vec_a, tangent_vec_b, base_point)
expected = gs.array([0.])
self.assertAllClose(expected, result)
tangent_vec_a = gs.array([[1., 0., 0., 0., 0.]])
tangent_vec_b = gs.array([[0., 1., 0., 0., 0.]])
base_point = gs.array([0., 0., 0., 0., 1.])
result = self.metric.inner_product(
tangent_vec_a, tangent_vec_b, base_point)
expected = gs.array([0.])
self.assertAllClose(expected, result)
tangent_vec_a = gs.array([[1., 0., 0., 0., 0.]])
tangent_vec_b = gs.array([[0., 1., 0., 0., 0.]])
base_point = gs.array([[0., 0., 0., 0., 1.]])
result = self.metric.inner_product(
tangent_vec_a, tangent_vec_b, base_point)
expected = gs.array([0.])
self.assertAllClose(expected, result)
def test_diameter(self):
dim = 2
sphere = Hypersphere(dim)
point_a = gs.array([[0., 0., 1.]])
point_b = gs.array([[1., 0., 0.]])
point_c = gs.array([[0., 0., -1.]])
result = sphere.metric.diameter(gs.vstack((point_a, point_b, point_c)))
expected = gs.pi
self.assertAllClose(expected, result)
def test_closest_neighbor_index(self):
"""Check that the closest neighbor is one of neighbors."""
n_samples = 10
points = self.space.random_uniform(n_samples=n_samples)
point = points[0, :]
neighbors = points[1:, :]
index = self.metric.closest_neighbor_index(point, neighbors)
closest_neighbor = points[index, :]
test = gs.sum(gs.all(points == closest_neighbor, axis=1))
result = test > 0
self.assertTrue(result)
def test_sample_von_mises_fisher_arbitrary_mean(self):
"""
Check that the maximum likelihood estimates of the mean and
concentration parameter are close to the real values. A first
estimation of the concentration parameter is obtained by a
closed-form expression and improved through the Newton method.
"""
for dim in [2, 9]:
n_points = 10000
sphere = Hypersphere(dim)
# check mean value for concentrated distribution for different mean
kappa = 1000.
mean = sphere.random_uniform()
points = sphere.random_von_mises_fisher(
mu=mean, kappa=kappa, n_samples=n_points)
sum_points = gs.sum(points, axis=0)
result = sum_points / gs.linalg.norm(sum_points)
expected = mean
self.assertAllClose(result, expected, atol=MEAN_ESTIMATION_TOL)
def test_random_von_mises_kappa(self):
# check concentration parameter for dispersed distribution
kappa = 1.
n_points = 100000
for dim in [2, 9]:
sphere = Hypersphere(dim)
points = sphere.random_von_mises_fisher(
kappa=kappa, n_samples=n_points)
sum_points = gs.sum(points, axis=0)
mean_norm = gs.linalg.norm(sum_points) / n_points
kappa_estimate = (mean_norm * (dim + 1. - mean_norm**2)
/ (1. - mean_norm**2))
kappa_estimate = gs.cast(kappa_estimate, gs.float64)
p = dim + 1
n_steps = 100
for _ in range(n_steps):
bessel_func_1 = scipy.special.iv(p / 2., kappa_estimate)
bessel_func_2 = scipy.special.iv(p / 2. - 1., kappa_estimate)
ratio = bessel_func_1 / bessel_func_2
denominator = 1. - ratio**2 - (p - 1.) * ratio / kappa_estimate
mean_norm = gs.cast(mean_norm, gs.float64)
kappa_estimate = (
kappa_estimate - (ratio - mean_norm) / denominator)
result = kappa_estimate
expected = kappa
self.assertAllClose(result, expected, atol=KAPPA_ESTIMATION_TOL)
def test_random_von_mises_general_dim_mean(self):
for dim in [2, 9]:
sphere = Hypersphere(dim)
n_points = 100000
# check mean value for concentrated distribution
kappa = 10
points = sphere.random_von_mises_fisher(
kappa=kappa, n_samples=n_points)
sum_points = gs.sum(points, axis=0)
expected = gs.array([1.] + [0.] * dim)
result = sum_points / gs.linalg.norm(sum_points)
self.assertAllClose(
result, expected, atol=KAPPA_ESTIMATION_TOL)
def test_random_von_mises_one_sample_belongs(self):
for dim in [2, 9]:
sphere = Hypersphere(dim)
point = sphere.random_von_mises_fisher()
self.assertAllClose(point.shape, (dim + 1, ))
result = sphere.belongs(point)
self.assertTrue(result)
def test_spherical_to_extrinsic(self):
"""
Check vectorization of conversion from spherical
to extrinsic coordinates on the 2-sphere.
"""
dim = 2
sphere = Hypersphere(dim)
points_spherical = gs.array([gs.pi / 2, 0])
result = sphere.spherical_to_extrinsic(points_spherical)
expected = gs.array([1., 0., 0.])
self.assertAllClose(result, expected)
def test_spherical_to_extrinsic_vectorization(self):
dim = 2
sphere = Hypersphere(dim)
points_spherical = gs.array([[gs.pi / 2, 0],
[gs.pi / 6, gs.pi / 4]])
result = sphere.spherical_to_extrinsic(points_spherical)
expected = gs.array(
[[1., 0., 0.],
[gs.sqrt(2.) / 4., gs.sqrt(2.) / 4., gs.sqrt(3.) / 2.]])
self.assertAllClose(result, expected)
def test_tangent_spherical_to_extrinsic(self):
"""
Check vectorization of conversion from spherical
to extrinsic coordinates for tangent vectors to the
2-sphere.
"""
dim = 2
sphere = Hypersphere(dim)
base_points_spherical = gs.array([
[gs.pi / 2, 0],
[gs.pi / 2, 0]])
tangent_vecs_spherical = gs.array([
[0.25, 0.5],
[0.3, 0.2]])
result = sphere.tangent_spherical_to_extrinsic(
tangent_vecs_spherical, base_points_spherical)
expected = gs.array([
[0, 0.5, -0.25],
[0, 0.2, -0.3]])
self.assertAllClose(result, expected)
result = sphere.tangent_spherical_to_extrinsic(
tangent_vecs_spherical[0], base_points_spherical[0])
self.assertAllClose(result, expected[0])
def test_christoffels_vectorization(self):
"""
Check vectorization of Christoffel symbols in
spherical coordinates on the 2-sphere.
"""
dim = 2
sphere = Hypersphere(dim)
points_spherical = gs.array([[gs.pi / 2, 0],
[gs.pi / 6, gs.pi / 4]])
christoffel = sphere.metric.christoffels(
points_spherical)
result = christoffel.shape
expected = gs.array([2, dim, dim, dim])
self.assertAllClose(result, expected)
def test_parallel_transport_vectorization(self):
sphere = Hypersphere(2)
metric = sphere.metric
shape = (4, 3)
results = helper.test_parallel_transport(sphere, metric, shape)
for res in results:
self.assertTrue(res)
def test_is_tangent(self):
space = self.space
vec = space.random_uniform()
result = space.is_tangent(vec, vec)
self.assertFalse(result)
base_point = space.random_uniform()
tangent_vec = space.to_tangent(vec, base_point)
result = space.is_tangent(tangent_vec, base_point)
self.assertTrue(result)
base_point = space.random_uniform(2)
vec = space.random_uniform(2)
tangent_vec = space.to_tangent(vec, base_point)
result = space.is_tangent(tangent_vec, base_point)
self.assertAllClose(gs.shape(result), (2, ))
self.assertTrue(gs.all(result))
def test_sectional_curvature(self):
n_samples = 4
sphere = self.space
base_point = sphere.random_uniform(n_samples)
tan_vec_a = sphere.to_tangent(
gs.random.rand(n_samples, sphere.dim + 1), base_point)
tan_vec_b = sphere.to_tangent(
gs.random.rand(n_samples, sphere.dim + 1), base_point)
result = sphere.metric.sectional_curvature(
tan_vec_a, tan_vec_b, base_point)
expected = gs.ones(result.shape)
self.assertAllClose(result, expected)
@geomstats.tests.np_and_pytorch_only
def test_riemannian_normal_and_belongs(self):
mean = self.space.random_uniform()
cov = gs.eye(self.space.dim)
sample = self.space.random_riemannian_normal(mean, cov, 10)
result = self.space.belongs(sample)
self.assertTrue(gs.all(result))
@geomstats.tests.np_and_pytorch_only
def test_riemannian_normal_mean(self):
space = self.space
mean = space.random_uniform()
precision = gs.eye(space.dim) * 10
sample = space.random_riemannian_normal(mean, precision, 10000)
estimator = FrechetMean(space.metric, method='adaptive')
estimator.fit(sample)
estimate = estimator.estimate_
self.assertAllClose(estimate, mean, atol=1e-2)
| 38.112903 | 79 | 0.62043 |
e06687867cd3066d6a6d2151dccf270e24d1fd40 | 1,144 | bzl | Python | src/cdk/config.bzl | forsti0506/components | c03fd6e0737ec8f654fdb8ad3eacb023b323e575 | [
"MIT"
] | null | null | null | src/cdk/config.bzl | forsti0506/components | c03fd6e0737ec8f654fdb8ad3eacb023b323e575 | [
"MIT"
] | null | null | null | src/cdk/config.bzl | forsti0506/components | c03fd6e0737ec8f654fdb8ad3eacb023b323e575 | [
"MIT"
] | null | null | null | # List of all entry-points of the Angular CDK package.
CDK_ENTRYPOINTS = [
"a11y",
"accordion",
"bidi",
"clipboard",
"coercion",
"collections",
"dialog",
"drag-drop",
"keycodes",
"layout",
"observers",
"overlay",
"platform",
"portal",
"scrolling",
"stepper",
"table",
"text-field",
"tree",
"testing",
"testing/testbed",
"testing/selenium-webdriver",
]
# List of all entry-point targets of the Angular Material package.
CDK_TARGETS = ["//src/cdk"] + ["//src/cdk/%s" % ep for ep in CDK_ENTRYPOINTS]
# Within the CDK, only a few targets have sass libraries which need to be
# part of the release package. This list declares all CDK targets with sass
# libraries that need to be included and re-exported at the package root.
# **Note**: When updating the list of CDK entry-points with styles, also update
# the `exports` field in the `cdk/package.json` file.
CDK_ENTRYPOINTS_WITH_STYLES = [
"a11y",
"overlay",
"text-field",
]
CDK_SCSS_LIBS = [
"//src/cdk/%s:%s_scss_lib" % (p, p.replace("-", "_"))
for p in CDK_ENTRYPOINTS_WITH_STYLES
]
| 25.422222 | 79 | 0.63986 |
604368b75ff0c77436fd9a7633939b5563307c68 | 24,075 | py | Python | qutip/wigner.py | ldabas/qutip | 2bd445c0625b89a8b5bfe636182e2c44c961da5e | [
"BSD-3-Clause"
] | null | null | null | qutip/wigner.py | ldabas/qutip | 2bd445c0625b89a8b5bfe636182e2c44c961da5e | [
"BSD-3-Clause"
] | null | null | null | qutip/wigner.py | ldabas/qutip | 2bd445c0625b89a8b5bfe636182e2c44c961da5e | [
"BSD-3-Clause"
] | null | null | null | # This file is part of QuTiP: Quantum Toolbox in Python.
#
# Copyright (c) 2011 and later, Paul D. Nation and Robert J. Johansson.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the QuTiP: Quantum Toolbox in Python nor the names
# of its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###############################################################################
__all__ = ['wigner', 'qfunc', 'spin_q_function',
'spin_wigner', 'wigner_transform']
import numpy as np
from numpy import (
zeros, array, arange, exp, real, conj, pi, copy, sqrt, meshgrid, size,
conjugate, cos, sin, polyval, fliplr,
)
import scipy.sparse as sp
import scipy.fftpack as ft
import scipy.linalg as la
from scipy.special import genlaguerre
from scipy.special import binom
from scipy.special import sph_harm
from qutip.qobj import Qobj, isket, isoper
from qutip.states import ket2dm
from qutip.parallel import parfor
from qutip.utilities import clebsch
from qutip.operators import jmat
from scipy.special import factorial
from qutip.cy.sparse_utils import _csr_get_diag
import qutip as qt
from qutip.sparse import eigh
def wigner_transform(psi, j, fullparity, steps, slicearray):
"""takes the density matrix or state vector of any finite state and
generates the Wigner function for that state on a sphere, generating a spin
Wigner function useful for displaying the quasi-probability for a qubit or
any qudit. For the standard, continuous-variable Wigner function for
position and momentum variables, wigner() should be used.
Parameters
----------
psi : qobj
a state vector or density matrix.
j : int
the total angular momentum of the quantum state.
fullparity : bool
should the parity of the full SU space be used?
steps : int
number of points at which the Wigner transform is calculated.
slicearray : list of str
the angle slice to be used for each particle in case of a
multi-particle quantum state. 'l' yields an equal angle
slice. 'x', 'y' and 'z' angle slices can also be chosen.
Returns
----------
wigner : list of float
the wigner transformation at `steps` different theta and phi.
Raises
------
ComplexWarning
This can be ignored as it is caused due to rounding errors.
Notes
------
See example notebook wigner_visualisation.
References
------
[1] T. Tilma, M. J. Everitt, J. H. Samson, W. J. Munro,
and K. Nemoto, Phys. Rev. Lett. 117, 180401 (2016).
[2] R. P. Rundle, P. W. Mills, T. Tilma, J. H. Samson, and
M. J. Everitt, Phys. Rev. A 96, 022117 (2017).
"""
if not (psi.type == 'ket' or psi.type == 'operator' or psi.type == 'bra'):
raise TypeError('Input state is not a valid operator.')
if psi.type == 'ket' or psi.type == 'bra':
rho = ket2dm(psi)
else:
rho = psi
sun = 2 # The order of the SU group
# calculate total number of particles in quantum state:
N = np.int32(np.log(np.shape(rho)[0]) / np.log(2 * j + 1))
theta = np.zeros((N, steps))
phi = np.zeros((N, steps))
for i in range(N):
theta[i, :] = np.linspace(0, np.pi, steps)
phi[i, :] = np.linspace(0, 2 * np.pi, steps)
theta, phi = _angle_slice(np.array(slicearray, dtype=str), theta, phi)
wigner = np.zeros((steps, steps))
if fullparity:
pari = _parity(sun**N, j)
else:
pari = _parity(sun, j)
for t in range(steps):
for p in range(steps):
wigner[t, p] = np.real(np.trace(rho.data @ _kernelsu2(
theta[:, t], phi[:, p], N, j, pari, fullparity)))
return wigner
def _parity(N, j):
"""Private function to calculate the parity of the quantum system.
"""
if j == 0.5:
pi = np.identity(N) - np.sqrt((N - 1) * N * (N + 1) / 2) * _lambda_f(N)
return pi / N
elif j > 0.5:
mult = np.int32(2 * j + 1)
matrix = np.zeros((mult, mult))
foo = np.ones(mult)
for n in np.arange(-j, j + 1, 1):
for l in np.arange(0, mult, 1):
foo[l] = (2 * l + 1) * qt.clebsch(j, l, j, n, 0, n)
matrix[np.int32(n + j), np.int32(n + j)] = np.sum(foo)
return matrix / mult
def _lambda_f(N):
"""Private function needed for the calculation of the parity.
"""
matrix = np.sqrt(2 / (N * (N - 1))) * np.identity(N)
matrix[-1, -1] = - np.sqrt(2 * (N - 1) / N)
return matrix
def _kernelsu2(theta, phi, N, j, parity, fullparity):
"""Private function that calculates the kernel for the SU2 unitary group.
"""
U = np.ones(1)
# calculate the total rotation matrix (tensor product for each particle):
for i in range(0, N):
U = np.kron(U, _rotation_matrix(theta[i], phi[i], j))
if not fullparity:
op_parity = parity # The parity for a one particle system
for i in range(1, N):
parity = np.kron(parity, op_parity)
matrix = U @ parity @ U.conj().T
return matrix
def _rotation_matrix(theta, phi, j):
"""Private function to calculate the rotation operator for the SU2 kernel.
"""
return la.expm(1j * phi * jmat(j, 'z').full()) @ \
la.expm(1j * theta * jmat(j, 'y').full())
def _angle_slice(slicearray, theta, phi):
"""Private function to modify theta and phi for angle slicing.
"""
xind = np.where(slicearray == 'x')
theta[xind, :] = np.pi - theta[xind, :]
phi[xind, :] = -phi[xind, :]
yind = np.where(slicearray == 'y')
theta[yind, :] = np.pi - theta[yind, :]
phi[yind, :] = np.pi - phi[yind, :]
zind = np.where(slicearray == 'z')
phi[zind, :] = phi[zind, :] + np.pi
return theta, phi
def wigner(psi, xvec, yvec, method='clenshaw', g=2,
sparse=False, parfor=False):
"""Wigner function for a state vector or density matrix at points
`xvec + i * yvec`.
Parameters
----------
state : qobj
A state vector or density matrix.
xvec : array_like
x-coordinates at which to calculate the Wigner function.
yvec : array_like
y-coordinates at which to calculate the Wigner function. Does not
apply to the 'fft' method.
g : float
Scaling factor for `a = 0.5 * g * (x + iy)`, default `g = sqrt(2)`.
The value of `g` is related to the value of `hbar` in the commutation
relation `[x, y] = i * hbar` via `hbar=2/g^2` giving the default
value `hbar=1`.
method : string {'clenshaw', 'iterative', 'laguerre', 'fft'}
Select method 'clenshaw' 'iterative', 'laguerre', or 'fft', where 'clenshaw'
and 'iterative' use an iterative method to evaluate the Wigner functions for density
matrices :math:`|m><n|`, while 'laguerre' uses the Laguerre polynomials
in scipy for the same task. The 'fft' method evaluates the Fourier
transform of the density matrix. The 'iterative' method is default, and
in general recommended, but the 'laguerre' method is more efficient for
very sparse density matrices (e.g., superpositions of Fock states in a
large Hilbert space). The 'clenshaw' method is the preferred method for
dealing with density matrices that have a large number of excitations
(>~50). 'clenshaw' is a fast and numerically stable method.
sparse : bool {False, True}
Tells the default solver whether or not to keep the input density
matrix in sparse format. As the dimensions of the density matrix
grow, setthing this flag can result in increased performance.
parfor : bool {False, True}
Flag for calculating the Laguerre polynomial based Wigner function
method='laguerre' in parallel using the parfor function.
Returns
-------
W : array
Values representing the Wigner function calculated over the specified
range [xvec,yvec].
yvex : array
FFT ONLY. Returns the y-coordinate values calculated via the Fourier
transform.
Notes
-----
The 'fft' method accepts only an xvec input for the x-coordinate.
The y-coordinates are calculated internally.
References
----------
Ulf Leonhardt,
Measuring the Quantum State of Light, (Cambridge University Press, 1997)
"""
if not (psi.type == 'ket' or psi.type == 'oper' or psi.type == 'bra'):
raise TypeError('Input state is not a valid operator.')
if method == 'fft':
return _wigner_fourier(psi, xvec, g)
if psi.type == 'ket' or psi.type == 'bra':
rho = ket2dm(psi)
else:
rho = psi
if method == 'iterative':
return _wigner_iterative(rho, xvec, yvec, g)
elif method == 'laguerre':
return _wigner_laguerre(rho, xvec, yvec, g, parfor)
elif method == 'clenshaw':
return _wigner_clenshaw(rho, xvec, yvec, g, sparse=sparse)
else:
raise TypeError(
"method must be either 'iterative', 'laguerre', or 'fft'.")
def _wigner_iterative(rho, xvec, yvec, g=sqrt(2)):
r"""
Using an iterative method to evaluate the wigner functions for the Fock
state :math:`|m><n|`.
The Wigner function is calculated as
:math:`W = \sum_{mn} \rho_{mn} W_{mn}` where :math:`W_{mn}` is the Wigner
function for the density matrix :math:`|m><n|`.
In this implementation, for each row m, Wlist contains the Wigner functions
Wlist = [0, ..., W_mm, ..., W_mN]. As soon as one W_mn Wigner function is
calculated, the corresponding contribution is added to the total Wigner
function, weighted by the corresponding element in the density matrix
:math:`rho_{mn}`.
"""
M = np.prod(rho.shape[0])
X, Y = meshgrid(xvec, yvec)
A = 0.5 * g * (X + 1.0j * Y)
Wlist = array([zeros(np.shape(A), dtype=complex) for k in range(M)])
Wlist[0] = exp(-2.0 * abs(A) ** 2) / pi
W = real(rho[0, 0]) * real(Wlist[0])
for n in range(1, M):
Wlist[n] = (2.0 * A * Wlist[n - 1]) / sqrt(n)
W += 2 * real(rho[0, n] * Wlist[n])
for m in range(1, M):
temp = copy(Wlist[m])
Wlist[m] = (2 * conj(A) * temp - sqrt(m) * Wlist[m - 1]) / sqrt(m)
# Wlist[m] = Wigner function for |m><m|
W += real(rho[m, m] * Wlist[m])
for n in range(m + 1, M):
temp2 = (2 * A * Wlist[n - 1] - sqrt(m) * temp) / sqrt(n)
temp = copy(Wlist[n])
Wlist[n] = temp2
# Wlist[n] = Wigner function for |m><n|
W += 2 * real(rho[m, n] * Wlist[n])
return 0.5 * W * g ** 2
def _wigner_laguerre(rho, xvec, yvec, g, parallel):
r"""
Using Laguerre polynomials from scipy to evaluate the Wigner function for
the density matrices :math:`|m><n|`, :math:`W_{mn}`. The total Wigner
function is calculated as :math:`W = \sum_{mn} \rho_{mn} W_{mn}`.
"""
M = np.prod(rho.shape[0])
X, Y = meshgrid(xvec, yvec)
A = 0.5 * g * (X + 1.0j * Y)
W = zeros(np.shape(A))
# compute wigner functions for density matrices |m><n| and
# weight by all the elements in the density matrix
B = 4 * abs(A) ** 2
if sp.isspmatrix_csr(rho.data):
# for compress sparse row matrices
if parallel:
iterator = (
(m, rho, A, B) for m in range(len(rho.data.indptr) - 1))
W1_out = parfor(_par_wig_eval, iterator)
W += sum(W1_out)
else:
for m in range(len(rho.data.indptr) - 1):
for jj in range(rho.data.indptr[m], rho.data.indptr[m + 1]):
n = rho.data.indices[jj]
if m == n:
W += real(rho[m, m] * (-1) ** m * genlaguerre(m, 0)(B))
elif n > m:
W += 2.0 * real(rho[m, n] * (-1) ** m *
(2 * A) ** (n - m) *
sqrt(factorial(m) / factorial(n)) *
genlaguerre(m, n - m)(B))
else:
# for dense density matrices
B = 4 * abs(A) ** 2
for m in range(M):
if abs(rho[m, m]) > 0.0:
W += real(rho[m, m] * (-1) ** m * genlaguerre(m, 0)(B))
for n in range(m + 1, M):
if abs(rho[m, n]) > 0.0:
W += 2.0 * real(rho[m, n] * (-1) ** m *
(2 * A) ** (n - m) *
sqrt(factorial(m) / factorial(n)) *
genlaguerre(m, n - m)(B))
return 0.5 * W * g ** 2 * np.exp(-B / 2) / pi
def _par_wig_eval(args):
"""
Private function for calculating terms of Laguerre Wigner function
using parfor.
"""
m, rho, A, B = args
W1 = zeros(np.shape(A))
for jj in range(rho.data.indptr[m], rho.data.indptr[m + 1]):
n = rho.data.indices[jj]
if m == n:
W1 += real(rho[m, m] * (-1) ** m * genlaguerre(m, 0)(B))
elif n > m:
W1 += 2.0 * real(rho[m, n] * (-1) ** m *
(2 * A) ** (n - m) *
sqrt(factorial(m) / factorial(n)) *
genlaguerre(m, n - m)(B))
return W1
def _wigner_fourier(psi, xvec, g=np.sqrt(2)):
"""
Evaluate the Wigner function via the Fourier transform.
"""
if psi.type == 'bra':
psi = psi.dag()
if psi.type == 'ket':
return _psi_wigner_fft(psi.full(), xvec, g)
elif psi.type == 'oper':
eig_vals, eig_vecs = eigh(psi.full())
W = 0
for ii in range(psi.shape[0]):
W1, yvec = _psi_wigner_fft(
np.reshape(eig_vecs[:, ii], (psi.shape[0], 1)), xvec, g)
W += eig_vals[ii] * W1
return W, yvec
def _psi_wigner_fft(psi, xvec, g=sqrt(2)):
"""
FFT method for a single state vector. Called multiple times when the
input is a density matrix.
"""
n = len(psi)
A = _osc_eigen(n, xvec * g / np.sqrt(2))
xpsi = np.dot(psi.T, A)
W, yvec = _wigner_fft(xpsi, xvec * g / np.sqrt(2))
return (0.5 * g ** 2) * np.real(W.T), yvec * np.sqrt(2) / g
def _wigner_fft(psi, xvec):
"""
Evaluates the Fourier transformation of a given state vector.
Returns the corresponding density matrix and range
"""
n = 2*len(psi.T)
r1 = np.concatenate((np.array([[0]]),
np.fliplr(psi.conj()),
np.zeros((1, n//2 - 1))), axis=1)
r2 = np.concatenate((np.array([[0]]), psi,
np.zeros((1, n//2 - 1))), axis=1)
w = la.toeplitz(np.zeros((n//2, 1)), r1) * \
np.flipud(la.toeplitz(np.zeros((n//2, 1)), r2))
w = np.concatenate((w[:, n//2:n], w[:, 0:n//2]), axis=1)
w = ft.fft(w)
w = np.real(np.concatenate((w[:, 3*n//4:n+1], w[:, 0:n//4]), axis=1))
p = np.arange(-n/4, n/4)*np.pi / (n*(xvec[1] - xvec[0]))
w = w / (p[1] - p[0]) / n
return w, p
def _osc_eigen(N, pnts):
"""
Vector of and N-dim oscillator eigenfunctions evaluated
at the points in pnts.
"""
pnts = np.asarray(pnts)
lpnts = len(pnts)
A = np.zeros((N, lpnts))
A[0, :] = np.exp(-pnts ** 2 / 2.0) / pi ** 0.25
if N == 1:
return A
else:
A[1, :] = np.sqrt(2) * pnts * A[0, :]
for k in range(2, N):
A[k, :] = np.sqrt(2.0 / k) * pnts * A[k - 1, :] - \
np.sqrt((k - 1.0) / k) * A[k - 2, :]
return A
def _wigner_clenshaw(rho, xvec, yvec, g=sqrt(2), sparse=False):
r"""
Using Clenshaw summation - numerically stable and efficient
iterative algorithm to evaluate polynomial series.
The Wigner function is calculated as
:math:`W = e^(-0.5*x^2)/pi * \sum_{L} c_L (2x)^L / \sqrt(L!)` where
:math:`c_L = \sum_n \rho_{n,L+n} LL_n^L` where
:math:`LL_n^L = (-1)^n \sqrt(L!n!/(L+n)!) LaguerreL[n,L,x]`
"""
M = np.prod(rho.shape[0])
X,Y = np.meshgrid(xvec, yvec)
#A = 0.5 * g * (X + 1.0j * Y)
A2 = g * (X + 1.0j * Y) #this is A2 = 2*A
B = np.abs(A2)
B *= B
w0 = (2*rho.data[0,-1])*np.ones_like(A2)
L = M-1
#calculation of \sum_{L} c_L (2x)^L / \sqrt(L!)
#using Horner's method
if not sparse:
rho = rho.full() * (2*np.ones((M,M)) - np.diag(np.ones(M)))
while L > 0:
L -= 1
#here c_L = _wig_laguerre_val(L, B, np.diag(rho, L))
w0 = _wig_laguerre_val(L, B, np.diag(rho, L)) + w0 * A2 * (L+1)**-0.5
else:
while L > 0:
L -= 1
diag = _csr_get_diag(rho.data.data,rho.data.indices,
rho.data.indptr,L)
if L != 0:
diag *= 2
#here c_L = _wig_laguerre_val(L, B, np.diag(rho, L))
w0 = _wig_laguerre_val(L, B, diag) + w0 * A2 * (L+1)**-0.5
return w0.real * np.exp(-B*0.5) * (g*g*0.5 / pi)
def _wig_laguerre_val(L, x, c):
r"""
this is evaluation of polynomial series inspired by hermval from numpy.
Returns polynomial series
\sum_n b_n LL_n^L,
where
LL_n^L = (-1)^n \sqrt(L!n!/(L+n)!) LaguerreL[n,L,x]
The evaluation uses Clenshaw recursion
"""
if len(c) == 1:
y0 = c[0]
y1 = 0
elif len(c) == 2:
y0 = c[0]
y1 = c[1]
else:
k = len(c)
y0 = c[-2]
y1 = c[-1]
for i in range(3, len(c) + 1):
k -= 1
y0, y1 = c[-i] - y1 * (float((k - 1)*(L + k - 1))/((L+k)*k))**0.5, \
y0 - y1 * ((L + 2*k -1) - x) * ((L+k)*k)**-0.5
return y0 - y1 * ((L + 1) - x) * (L + 1)**-0.5
# -----------------------------------------------------------------------------
# Q FUNCTION
#
def qfunc(state, xvec, yvec, g=sqrt(2)):
"""Q-function of a given state vector or density matrix
at points `xvec + i * yvec`.
Parameters
----------
state : qobj
A state vector or density matrix.
xvec : array_like
x-coordinates at which to calculate the Husimi-Q function.
yvec : array_like
y-coordinates at which to calculate the Husimi-Q function.
g : float
Scaling factor for `a = 0.5 * g * (x + iy)`, default `g = sqrt(2)`.
The value of `g` is related to the value of `hbar` in the commutation
relation `[x, y] = 1j * hbar` via `hbar=2/g^2` giving the default
value `hbar=1`.
Returns
--------
Q : array
Values representing the Q-function calculated over the specified range
[xvec,yvec].
"""
X, Y = meshgrid(xvec, yvec)
amat = 0.5 * g * (X + Y * 1j)
if not (isoper(state) or isket(state)):
raise TypeError('Invalid state operand to qfunc.')
qmat = zeros(size(amat))
if isket(state):
qmat = _qfunc_pure(state, amat)
elif isoper(state):
d, v = la.eig(state.full())
# d[i] = eigenvalue i
# v[:,i] = eigenvector i
qmat = zeros(np.shape(amat))
for k in arange(0, len(d)):
qmat1 = _qfunc_pure(v[:, k], amat)
qmat += real(d[k] * qmat1)
qmat = 0.25 * qmat * g ** 2
return qmat
#
# Q-function for a pure state: Q = |<alpha|psi>|^2 / pi
#
# |psi> = the state in fock basis
# |alpha> = the coherent state with amplitude alpha
#
def _qfunc_pure(psi, alpha_mat):
"""
Calculate the Q-function for a pure state.
"""
n = np.prod(psi.shape)
if isinstance(psi, Qobj):
psi = psi.full().flatten()
else:
psi = psi.T
qmat = abs(polyval(fliplr([psi / sqrt(factorial(arange(n)))])[0],
conjugate(alpha_mat))) ** 2
return real(qmat) * exp(-abs(alpha_mat) ** 2) / pi
# -----------------------------------------------------------------------------
# PSEUDO DISTRIBUTION FUNCTIONS FOR SPINS
#
def spin_q_function(rho, theta, phi):
"""Husimi Q-function for spins.
Parameters
----------
state : qobj
A state vector or density matrix for a spin-j quantum system.
theta : array_like
Polar angle at which to calculate the Husimi-Q function.
phi : array_like
Azimuthal angle at which to calculate the Husimi-Q function.
Returns
-------
Q, THETA, PHI : 2d-array
Values representing the spin Husimi Q function at the values specified
by THETA and PHI.
"""
if rho.type == 'bra':
rho = rho.dag()
if rho.type == 'ket':
rho = ket2dm(rho)
J = rho.shape[0]
j = (J - 1) / 2
THETA, PHI = meshgrid(theta, phi)
Q = np.zeros_like(THETA, dtype=complex)
for m1 in arange(-j, j+1):
Q += binom(2*j, j+m1) * cos(THETA/2) ** (2*(j-m1)) * sin(THETA/2) ** (2*(j+m1)) * \
rho.data[int(j-m1), int(j-m1)]
for m2 in arange(m1+1, j+1):
Q += (sqrt(binom(2*j, j+m1)) * sqrt(binom(2*j, j+m2)) *
cos(THETA/2) ** (2*j-m1-m2) * sin(THETA/2) ** (2*j+m1+m2)) * \
(exp(1j * (m2-m1) * PHI) * rho.data[int(j-m1), int(j-m2)] +
exp(1j * (m1-m2) * PHI) * rho.data[int(j-m2), int(j-m1)])
return Q.real/pi, THETA, PHI
def _rho_kq(rho, j, k, q):
v = 0j
for m1 in arange(-j, j+1):
for m2 in arange(-j, j+1):
v += (-1)**(j - m1 - q) * clebsch(j, j, k, m1, -m2,
q) * rho.data[m1 + j, m2 + j]
return v
def spin_wigner(rho, theta, phi):
"""Wigner function for a spin-j system on the 2-sphere of radius j
(for j = 1/2 this is the Bloch sphere).
Parameters
----------
state : qobj
A state vector or density matrix for a spin-j quantum system.
theta : array_like
Polar angle at which to calculate the W function.
phi : array_like
Azimuthal angle at which to calculate the W function.
Returns
-------
W, THETA, PHI : 2d-array
Values representing the spin Wigner function at the values specified
by THETA and PHI.
Notes
-----
Experimental.
"""
if rho.type == 'bra':
rho = rho.dag()
if rho.type == 'ket':
rho = ket2dm(rho)
J = rho.shape[0]
j = (J - 1) / 2
THETA, PHI = meshgrid(theta, phi)
W = np.zeros_like(THETA, dtype=complex)
for k in range(int(2 * j)+1):
for q in arange(-k, k+1):
# sph_harm takes azimuthal angle then polar angle as arguments
W += _rho_kq(rho, j, k, q) * sph_harm(q, k, PHI, THETA)
return W, THETA, PHI
| 32.755102 | 92 | 0.554559 |
88527f3b1b963d1b69885003d14e177f6d3d2961 | 1,004 | py | Python | setup.py | dashonigkuchen/MinePI | a7a79561010614eb93abfe8ced3e27d016211eb1 | [
"MIT"
] | 8 | 2021-06-12T17:14:22.000Z | 2022-03-22T13:57:38.000Z | setup.py | dashonigkuchen/MinePI | a7a79561010614eb93abfe8ced3e27d016211eb1 | [
"MIT"
] | 7 | 2021-06-12T17:47:02.000Z | 2022-02-12T18:23:46.000Z | setup.py | dashonigkuchen/MinePI | a7a79561010614eb93abfe8ced3e27d016211eb1 | [
"MIT"
] | 5 | 2021-08-12T17:43:13.000Z | 2022-03-22T13:39:22.000Z | from distutils.core import setup
setup(
name = "MinePI",
packages = ["MinePI"],
version = "0.4.2",
license = "MIT",
description = "Minecraft utility library.",
author = "benno1237, honigkuchen",
author_email = "benno.kollreider@gmail.com",
url = "https://github.com/benno1237/MinePI",
download_url = "https://github.com/benno1237/MinePI/archive/refs/tags/0.4.2.tar.gz",
keywords = ["Minecraft", "Skin", "Render", "Head", "UUID"],
install_requires = [
"aiohttp",
"Pillow",
"sphinx-prompt"
],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
],
)
| 32.387097 | 88 | 0.598606 |
6cbf6ca37635930496c4a99a18a0d2e6ad6ea355 | 2,844 | py | Python | flink-python/pyflink/ml/tests/test_ml_environment.py | mnmhouse/flink | 8b05cbee4425c5ee33d73bed1473e075d7e17387 | [
"Apache-2.0"
] | 41 | 2018-11-14T04:05:42.000Z | 2022-02-09T10:39:23.000Z | flink-python/pyflink/ml/tests/test_ml_environment.py | mnmhouse/flink | 8b05cbee4425c5ee33d73bed1473e075d7e17387 | [
"Apache-2.0"
] | 15 | 2021-06-13T18:06:12.000Z | 2022-02-09T22:40:04.000Z | flink-python/pyflink/ml/tests/test_ml_environment.py | fantasticKe/flink | c42ad0fcbcd5f2666952ee3fc4763490915091f6 | [
"Apache-2.0"
] | 16 | 2019-01-04T09:19:03.000Z | 2022-01-10T14:34:31.000Z | ################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
from pyflink.ml.api.ml_environment_factory import MLEnvironment
from pyflink.dataset import ExecutionEnvironment
from pyflink.datastream import StreamExecutionEnvironment
from pyflink.table import BatchTableEnvironment, StreamTableEnvironment
from pyflink.testing.test_case_utils import PyFlinkTestCase
class MLEnvironmentTest(PyFlinkTestCase):
def test_default_constructor(self):
ml_environment = MLEnvironment()
self.assertIsNotNone(ml_environment.get_execution_environment())
self.assertIsNotNone(ml_environment.get_stream_execution_environment())
self.assertIsNotNone(ml_environment.get_batch_table_environment())
self.assertIsNotNone(ml_environment.get_stream_table_environment())
def test_construct_with_batch_env(self):
execution_environment = ExecutionEnvironment.get_execution_environment()
batch_table_environment = BatchTableEnvironment.create(execution_environment)
ml_environment = MLEnvironment(
exe_env=execution_environment,
batch_tab_env=batch_table_environment)
self.assertEqual(ml_environment.get_execution_environment(), execution_environment)
self.assertEqual(ml_environment.get_batch_table_environment(), batch_table_environment)
def test_construct_with_stream_env(self):
stream_execution_environment = StreamExecutionEnvironment.get_execution_environment()
stream_table_environment = StreamTableEnvironment.create(stream_execution_environment)
ml_environment = MLEnvironment(
stream_exe_env=stream_execution_environment,
stream_tab_env=stream_table_environment)
self.assertEqual(
ml_environment.get_stream_execution_environment(),
stream_execution_environment)
self.assertEqual(ml_environment.get_stream_table_environment(), stream_table_environment)
| 50.785714 | 97 | 0.741913 |
95d96401b96dcda6663fd81641635f82746ff31c | 10,165 | py | Python | arena/main.py | Xewus/Arena | f5aaa8ca3ba8b7fb66e359b1d973dc6a219821f1 | [
"MIT"
] | null | null | null | arena/main.py | Xewus/Arena | f5aaa8ca3ba8b7fb66e359b1d973dc6a219821f1 | [
"MIT"
] | null | null | null | arena/main.py | Xewus/Arena | f5aaa8ca3ba8b7fb66e359b1d973dc6a219821f1 | [
"MIT"
] | null | null | null | import random
from pygame import mixer
from time import sleep
from game import game_settings
from game.game_settings import (
CREATE_USERS_HERO, MAX_HERO_DEFENSE, MAX_HERO_ATTACK,
MAX_HERO_DODGE, MAX_HERO_HEALTH, WITH_THINGS)
from game.heroes import AVAILABLE_HEROES_CLASSES, Hero
from game.things import THINGS
mixer.init()
start_game = mixer.Sound(game_settings.start_game)
burn = mixer.Sound(game_settings.burn_child)
last_breath = mixer.Sound(game_settings.last_breath)
death_half_population = mixer.Sound(game_settings.death_half_population)
win = mixer.Sound(game_settings.win)
COUNT_BOTS = game_settings.COUNT_BOTS
COUNT_THINGS_ON_HERO = game_settings.COUNT_THINGS_ON_HERO
MAX_POPULATION = game_settings.MAX_POPULATION
SURVIVAL = game_settings.SURVIVAL
FERTILITY = game_settings.FERTILITY
def user_settings():
if input('Изменить настройки игры? Y/N: ').lower() != 'y':
return None
global COUNT_BOTS
global COUNT_THINGS_ON_HERO
global MAX_POPULATION
global SURVIVAL
count_bots = False
while count_bots is False:
count_bots = check_input_numeric_value(
atr='количество ботов',
max_value=game_settings.COUNT_BOTS)
COUNT_BOTS = count_bots
count_things_on_hero = False
while count_things_on_hero is False:
count_things_on_hero = check_input_numeric_value(
atr='количество вещей у героя',
max_value=game_settings.COUNT_THINGS_ON_HERO)
COUNT_THINGS_ON_HERO = count_things_on_hero
max_population = False
while max_population is False:
max_population = check_input_numeric_value(
atr='максимально количество героев',
min_value=COUNT_BOTS, max_value=game_settings.MAX_POPULATION)
MAX_POPULATION = max_population
SURVIVAL = input(
'Установить режим игры "на выживание"?'
'HP не будет восстанавлиываться после боя. Y/N :').lower() == 'y'
def check_input_numeric_value(atr, min_value=0, max_value=1):
'''Checking user input for numeric attributes.'''
value = input(
f'Установите {atr} от {min_value} до {max_value}: ')
if not value.isdigit():
print('Введены неверные данные.')
return False
value = int(value)
return (value, max_value)[value > max_value]
def auto_create_hero(names):
'''Creates bots when the program starts.'''
klasses = list(AVAILABLE_HEROES_CLASSES.values())
klass = random.choice(klasses)
name, surname, sex = names.pop(random.randint(0, len(names) - 1))
defense = random.randint(0, MAX_HERO_DEFENSE)
attack = random.randint(1, MAX_HERO_ATTACK)
dodge = random.randint(0, MAX_HERO_DODGE)
helth = random.randint(1, MAX_HERO_HEALTH)
hero = klass(name, surname, sex, defense, attack, dodge, helth)
print(f'Create {type(hero).__name__} "{hero.name}" {hero.sex}\n'
f'def={hero.defense}, attack={hero.attack}, '
f'dodge={hero.dodge}, HP={hero.health}\n\n')
return hero
def create_hero():
'''Creating a custom hero.'''
klass = False
while klass is False:
print('На данный момент в игре доступны следующие классы:')
[print(klass.__name__) for klass in AVAILABLE_HEROES_CLASSES.values()]
klass = input(
'Выберите класс введя первую букву класса: ').lower()
klass = AVAILABLE_HEROES_CLASSES.get(klass, False)
if not klass:
print('Не правильно выбран класс.')
else:
print(f'Выбран {klass.__name__}')
name = False
while name is False:
name = input('Введите имя только из букв: ')
if not name.isalpha():
name = False
print('Не правильное имя.')
continue
name = name.capitalize()
print(f'Выбрано {name}')
surname = 'Userson'
sex = False
while sex is False:
sex = input('Выберите пол персонажа W/M: ').lower()
if sex not in 'mw':
sex = False
print('Неправильно указан пол.')
continue
defense = False
while defense is False:
defense = check_input_numeric_value(
atr='защита', max_value=MAX_HERO_DEFENSE)
attack = False
while attack is False:
attack = check_input_numeric_value(
atr='атака', max_value=MAX_HERO_ATTACK)
dodge = False
while dodge is False:
dodge = check_input_numeric_value(
atr='уклонение', max_value=MAX_HERO_DODGE)
health = False
while health is False:
health = check_input_numeric_value(
atr='здоровье', max_value=MAX_HERO_HEALTH)
hero = klass(name, surname, sex, defense, attack, dodge, health)
return hero
def user_create_hero(heroes):
'''Allows the user to set game settings and create custom heroes.'''
create_heroes = CREATE_USERS_HERO
while create_heroes:
available_count_create = MAX_POPULATION - len(heroes)
if not available_count_create:
break
print(f'Доступно создание {available_count_create} героев')
create = input('Желаете создать нового героя? Y/N: ').lower()
if create != 'y':
break
hero = create_hero()
print(f'Создан {type(hero).__name__} "{hero.name}" {hero.sex}\n'
f'def={hero.defense}, attack={hero.attack}, '
f'dodge={hero.dodge} HP={hero.health}\n')
heroes.append(hero)
def get_things(heroes, things):
'''Distribution of things to heroes.'''
if not WITH_THINGS or COUNT_THINGS_ON_HERO == 0:
return None
for hero in heroes:
limit = random.randint(0, COUNT_THINGS_ON_HERO)
choised_things = random.sample(things, limit)
if choised_things:
hero.set_things(choised_things)
print(f'\n{hero.name} получил предметы:')
for thing in choised_things:
print(f'"{thing.name}"')
else:
print(f'\n"{hero.name}" не повезло, ему не выпало ничего!')
print(
f'def={hero.defense}, attack={hero.attack}, '
f'dodge={hero.dodge}, HP={hero.health}\n\n')
def burn_child(heroes, fighter_1, fighter_2):
'''Creating a new hero if two opposite-sex heroes meet.'''
klasse = type(fighter_1)
name = fighter_1.name
surname = fighter_2.name + 'son'[:13]
sex = fighter_2.sex
defense = (fighter_1.defense + fighter_2.defense) // 2
attack = (fighter_1.attack + fighter_2.attack) // 2
dodge = (fighter_1.dodge + fighter_2.dodge) // 2
health = (fighter_1.health + fighter_2.health) // 2
child = klasse(name, surname, sex, defense, attack, dodge, health)
heroes.append(child)
burn.play()
sleep(2)
if len(heroes) > MAX_POPULATION:
global FERTILITY
FERTILITY -= 1
random.shuffle(heroes)
del heroes[:(MAX_POPULATION // 2)]
print('Половина населения погибли от голода!')
death_half_population.play()
sleep(3)
return child
def hit(heroes, fighter_1, fighter_2, freeze_params):
'''The one hit of two heroes.'''
fighter_2.decrease_params(fighter_1.attack)
if fighter_2.health <= 0:
heroes.remove(fighter_2)
if not SURVIVAL:
(fighter_1.defense, fighter_1.attack,
fighter_1.dodge, fighter_1.health) = freeze_params
return fighter_1
return fighter_2
def two_heroes_fight(heroes, fighter_1, fighter_2):
'''The battle of two heroes.
A new bot may appear if two heroes of the opposite sex meet.'''
def tuple_for_freeze(fighter):
return (
fighter.defense, fighter.attack, fighter.dodge, fighter.health)
freeze_params_1 = tuple_for_freeze(fighter_1)
freeze_params_2 = tuple_for_freeze(fighter_2)
if fighter_1.sex != fighter_2.sex and random.randint(0, FERTILITY):
return burn_child(heroes, fighter_1, fighter_2)
while True:
alive = hit(heroes, fighter_1, fighter_2, freeze_params_1)
if alive == fighter_1:
return fighter_1
alive = hit(heroes, fighter_2, fighter_1, freeze_params_2)
if alive == fighter_2:
return fighter_2
def main():
start_game.play()
user_settings()
names = game_settings.NAMES.copy()
heroes = [auto_create_hero(names) for _ in range(COUNT_BOTS)]
count_battle = 0
user_create_hero(heroes)
if not heroes:
return print('Желающих сражаться - нет.')
get_things(heroes, THINGS)
print('\n--------- FIGHT! --------\n')
while Hero.population > 1:
fighter_1, fighter_2 = random.sample(heroes, 2)
count_battle += 1
print(f'Бой №{count_battle} начался! \n'
f'Участники: {type(fighter_1).__name__} {fighter_1.name} и'
f' {type(fighter_2).__name__} {fighter_2.name}.\n')
winner = two_heroes_fight(heroes, fighter_1, fighter_2)
if winner not in (fighter_1, fighter_2):
print('Был рождён:\n'
f'{type(winner).__name__} {winner.name} {winner.surname}!\n'
f'def={winner.defense}, attack={winner.attack}, '
f'dodge={winner.dodge}, HP={winner.health}')
else:
del fighter_1
del fighter_2
last_breath.play()
print(f'В этом бою победил {winner.name}!!!\n')
sleep(2)
winner = heroes[0]
win.play()
print(f' Поздравляем чемпиона {count_battle} боёв:\n'
f' {type(winner).__name__} {winner.name} {winner.surname}!!!\n'
f'def={winner.defense}, attack={winner.attack}, '
f'dodge={winner.dodge}, HP={winner.health}')
print('\n--------- GAME OVER --------\n')
if __name__ == '__main__':
running = True
while running:
main()
running = input('Сыграем ещё? Y/N: \n\n').lower()
running = running == 'y'
| 34.692833 | 79 | 0.623512 |
bcfd2a45760b97a1a6ce96236d39e8d45edc0eac | 2,603 | py | Python | ExcelTestCheck/ExcelTestCheck.py | yanagi-ori/EysenckTest_ExcelCheck | 4b4a0e791efb09a3d42e3317d608809830455336 | [
"MIT"
] | null | null | null | ExcelTestCheck/ExcelTestCheck.py | yanagi-ori/EysenckTest_ExcelCheck | 4b4a0e791efb09a3d42e3317d608809830455336 | [
"MIT"
] | null | null | null | ExcelTestCheck/ExcelTestCheck.py | yanagi-ori/EysenckTest_ExcelCheck | 4b4a0e791efb09a3d42e3317d608809830455336 | [
"MIT"
] | null | null | null | from tkinter import Tk, Button, Label
from tkinter.filedialog import askopenfile
from tkinter.messagebox import showerror
from PIL import ImageTk, Image
import pyexcel
import xlwt
def calculation(test_path, ans_path):
my_array = pyexcel.get_array(file_name=test_path)
ans = pyexcel.get_array(file_name=ans_path)
book = xlwt.Workbook(encoding="utf-8")
results = book.add_sheet("Результаты")
iq = book.add_sheet("IQ")
for row in range(1, len(my_array)):
total = 0
for i in range(1, len(my_array[row])):
if str(my_array[row][i]).lower().replace(" ", '') == str(ans[i][0]):
total += 1
results.write(row - 1, 0, my_array[row][0])
results.write(row - 1, 1, total)
iq.write(row - 1, 0, my_array[row][0])
if total == 0:
iq.write(row, 1, "<75")
else:
iq.write(row, 1, 75 + 2.5 * total)
book.save("results.xls")
def initialization():
def open_test():
root.test_path = askopenfile(parent=root)
if root.test_path is not None:
root.checkmark_l1.place(anchor='center', rely=0.4, relx=0.8)
def open_ans():
root.ans_path = askopenfile(parent=root)
if root.ans_path is not None:
root.checkmark_l2.place(anchor='center', rely=0.5, relx=0.8)
def calculate():
if root.test_path is not None and root.ans_path is not None:
try:
calculation(root.test_path.name, root.ans_path.name)
Label(text="Готово. Провертье папку с программой.").place(anchor='center', rely=0.8, relx=0.5)
except Exception:
Label(text="Неверный тип входных данных.").place(anchor='center', rely=0.8, relx=0.5)
else:
showerror(title="Ошибка", message="Не выбраны исходные файлы")
root.button_test = Button(root, text='Файл с тестовыми данными',
command=open_test).place(anchor="center", rely=0.4, relx=0.5)
root.button_ans = Button(root, text='Файл с ответами',
command=open_ans).place(anchor='center', rely=0.5, relx=0.5)
root.button_ans = Button(root, text='Подсчитать', command=calculate).place(anchor='center', rely=0.6, relx=0.5)
root = Tk()
root.title("EysenckTest")
root.geometry("360x360")
root.test_path = None
root.ans_path = None
checkmark_img = ImageTk.PhotoImage(Image.open("./checkmark.png").resize((25, 25), Image.ANTIALIAS))
root.checkmark_l1 = Label(image=checkmark_img)
root.checkmark_l2 = Label(image=checkmark_img)
initialization()
root.mainloop()
| 36.152778 | 115 | 0.631579 |
f52e8971096574484cb0f7f8f76bc4621857d56f | 347 | py | Python | commands/deviot_rebuild_boards.py | tablatronix/Deviot | ac77241301d607304b2b4d07adad3a707cfadca2 | [
"Apache-2.0"
] | null | null | null | commands/deviot_rebuild_boards.py | tablatronix/Deviot | ac77241301d607304b2b4d07adad3a707cfadca2 | [
"Apache-2.0"
] | null | null | null | commands/deviot_rebuild_boards.py | tablatronix/Deviot | ac77241301d607304b2b4d07adad3a707cfadca2 | [
"Apache-2.0"
] | null | null | null | from sublime_plugin import WindowCommand
from ..platformio.pio_bridge import PioBridge
class DeviotRebuildBoardsCommand(WindowCommand):
"""
Rebuild the boards.json file who is used to list the
boards in the quick menu
Extends: sublime_plugin.WindowCommand
"""
def run(self):
PioBridge().save_boards_list_async() | 26.692308 | 57 | 0.740634 |
2dcfb90f3d0716c3e8272f0eb5abd7d4f428d8f4 | 32,013 | py | Python | tests/core/full_node/test_full_node_store.py | yuanliuus/thyme-blockchain | 9ea5cddc78f601fcbe77101d74147cf8190e423d | [
"Apache-2.0"
] | 6 | 2021-06-30T13:03:47.000Z | 2021-07-10T12:46:37.000Z | tests/core/full_node/test_full_node_store.py | yuanliuus/thyme-blockchain | 9ea5cddc78f601fcbe77101d74147cf8190e423d | [
"Apache-2.0"
] | 8 | 2021-07-01T15:45:09.000Z | 2021-09-08T04:30:46.000Z | tests/core/full_node/test_full_node_store.py | yuanliuus/thyme-blockchain | 9ea5cddc78f601fcbe77101d74147cf8190e423d | [
"Apache-2.0"
] | 11 | 2021-07-03T17:30:57.000Z | 2022-03-15T08:47:03.000Z | # flake8: noqa: F811, F401
import asyncio
from secrets import token_bytes
from typing import List, Optional
import pytest
from thyme.consensus.blockchain import ReceiveBlockResult
from thyme.consensus.multiprocess_validation import PreValidationResult
from thyme.consensus.pot_iterations import is_overflow_block
from thyme.full_node.full_node_store import FullNodeStore
from thyme.full_node.signage_point import SignagePoint
from thyme.protocols import timelord_protocol
from thyme.protocols.timelord_protocol import NewInfusionPointVDF
from thyme.types.blockchain_format.sized_bytes import bytes32
from thyme.types.unfinished_block import UnfinishedBlock
from thyme.util.block_cache import BlockCache
from thyme.util.block_tools import get_signage_point
from thyme.util.hash import std_hash
from thyme.util.ints import uint8, uint32, uint64, uint128
from tests.core.fixtures import default_1000_blocks, empty_blockchain # noqa: F401
from tests.setup_nodes import bt, test_constants
@pytest.fixture(scope="session")
def event_loop():
loop = asyncio.get_event_loop()
yield loop
class TestFullNodeStore:
@pytest.mark.asyncio
async def test_basic_store(self, empty_blockchain, normalized_to_identity: bool = False):
blockchain = empty_blockchain
blocks = bt.get_consecutive_blocks(
10,
seed=b"1234",
normalized_to_identity_cc_eos=normalized_to_identity,
normalized_to_identity_icc_eos=normalized_to_identity,
normalized_to_identity_cc_ip=normalized_to_identity,
normalized_to_identity_cc_sp=normalized_to_identity,
)
store = FullNodeStore(test_constants)
unfinished_blocks = []
for block in blocks:
unfinished_blocks.append(
UnfinishedBlock(
block.finished_sub_slots,
block.reward_chain_block.get_unfinished(),
block.challenge_chain_sp_proof,
block.reward_chain_sp_proof,
block.foliage,
block.foliage_transaction_block,
block.transactions_info,
block.transactions_generator,
[],
)
)
# Add/get candidate block
assert store.get_candidate_block(unfinished_blocks[0].get_hash()) is None
for height, unf_block in enumerate(unfinished_blocks):
store.add_candidate_block(unf_block.get_hash(), uint32(height), unf_block)
candidate = store.get_candidate_block(unfinished_blocks[4].get_hash())
assert candidate is not None
assert candidate[1] == unfinished_blocks[4]
store.clear_candidate_blocks_below(uint32(8))
assert store.get_candidate_block(unfinished_blocks[5].get_hash()) is None
assert store.get_candidate_block(unfinished_blocks[8].get_hash()) is not None
# Test seen unfinished blocks
h_hash_1 = bytes32(token_bytes(32))
assert not store.seen_unfinished_block(h_hash_1)
assert store.seen_unfinished_block(h_hash_1)
store.clear_seen_unfinished_blocks()
assert not store.seen_unfinished_block(h_hash_1)
# Add/get unfinished block
for height, unf_block in enumerate(unfinished_blocks):
assert store.get_unfinished_block(unf_block.partial_hash) is None
store.add_unfinished_block(uint32(height), unf_block, PreValidationResult(None, uint64(123532), None))
assert store.get_unfinished_block(unf_block.partial_hash) == unf_block
store.remove_unfinished_block(unf_block.partial_hash)
assert store.get_unfinished_block(unf_block.partial_hash) is None
blocks = bt.get_consecutive_blocks(
1,
skip_slots=5,
normalized_to_identity_cc_ip=normalized_to_identity,
normalized_to_identity_cc_sp=normalized_to_identity,
normalized_to_identity_cc_eos=normalized_to_identity,
normalized_to_identity_icc_eos=normalized_to_identity,
)
sub_slots = blocks[0].finished_sub_slots
assert len(sub_slots) == 5
assert (
store.get_finished_sub_slots(
BlockCache({}),
None,
sub_slots[0].challenge_chain.challenge_chain_end_of_slot_vdf.challenge,
)
== []
)
# Test adding non-connecting sub-slots genesis
assert store.get_sub_slot(test_constants.GENESIS_CHALLENGE) is None
assert store.get_sub_slot(sub_slots[0].challenge_chain.get_hash()) is None
assert store.get_sub_slot(sub_slots[1].challenge_chain.get_hash()) is None
assert store.new_finished_sub_slot(sub_slots[1], blockchain, None, None) is None
assert store.new_finished_sub_slot(sub_slots[2], blockchain, None, None) is None
# Test adding sub-slots after genesis
assert store.new_finished_sub_slot(sub_slots[0], blockchain, None, None) is not None
sub_slot = store.get_sub_slot(sub_slots[0].challenge_chain.get_hash())
assert sub_slot is not None
assert sub_slot[0] == sub_slots[0]
assert store.get_sub_slot(sub_slots[1].challenge_chain.get_hash()) is None
assert store.new_finished_sub_slot(sub_slots[1], blockchain, None, None) is not None
for i in range(len(sub_slots)):
assert store.new_finished_sub_slot(sub_slots[i], blockchain, None, None) is not None
slot_i = store.get_sub_slot(sub_slots[i].challenge_chain.get_hash())
assert slot_i is not None
assert slot_i[0] == sub_slots[i]
assert store.get_finished_sub_slots(BlockCache({}), None, sub_slots[-1].challenge_chain.get_hash()) == sub_slots
assert store.get_finished_sub_slots(BlockCache({}), None, std_hash(b"not a valid hash")) is None
assert (
store.get_finished_sub_slots(BlockCache({}), None, sub_slots[-2].challenge_chain.get_hash())
== sub_slots[:-1]
)
# Test adding genesis peak
await blockchain.receive_block(blocks[0])
peak = blockchain.get_peak()
peak_full_block = await blockchain.get_full_peak()
if peak.overflow:
store.new_peak(peak, peak_full_block, sub_slots[-2], sub_slots[-1], None, blockchain)
else:
store.new_peak(peak, peak_full_block, None, sub_slots[-1], None, blockchain)
assert store.get_sub_slot(sub_slots[0].challenge_chain.get_hash()) is None
assert store.get_sub_slot(sub_slots[1].challenge_chain.get_hash()) is None
assert store.get_sub_slot(sub_slots[2].challenge_chain.get_hash()) is None
if peak.overflow:
slot_3 = store.get_sub_slot(sub_slots[3].challenge_chain.get_hash())
assert slot_3 is not None
assert slot_3[0] == sub_slots[3]
else:
assert store.get_sub_slot(sub_slots[3].challenge_chain.get_hash()) is None
slot_4 = store.get_sub_slot(sub_slots[4].challenge_chain.get_hash())
assert slot_4 is not None
assert slot_4[0] == sub_slots[4]
assert (
store.get_finished_sub_slots(
blockchain,
peak,
sub_slots[-1].challenge_chain.get_hash(),
)
== []
)
# Test adding non genesis peak directly
blocks = bt.get_consecutive_blocks(
2,
skip_slots=2,
normalized_to_identity_cc_eos=normalized_to_identity,
normalized_to_identity_icc_eos=normalized_to_identity,
normalized_to_identity_cc_ip=normalized_to_identity,
normalized_to_identity_cc_sp=normalized_to_identity,
)
blocks = bt.get_consecutive_blocks(
3,
block_list_input=blocks,
normalized_to_identity_cc_eos=normalized_to_identity,
normalized_to_identity_icc_eos=normalized_to_identity,
normalized_to_identity_cc_ip=normalized_to_identity,
normalized_to_identity_cc_sp=normalized_to_identity,
)
for block in blocks:
await blockchain.receive_block(block)
sb = blockchain.block_record(block.header_hash)
sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(block.header_hash)
res = store.new_peak(sb, block, sp_sub_slot, ip_sub_slot, None, blockchain)
assert res[0] is None
# Add reorg blocks
blocks_reorg = bt.get_consecutive_blocks(
20,
normalized_to_identity_cc_eos=normalized_to_identity,
normalized_to_identity_icc_eos=normalized_to_identity,
normalized_to_identity_cc_ip=normalized_to_identity,
normalized_to_identity_cc_sp=normalized_to_identity,
)
for block in blocks_reorg:
res, _, fork_height = await blockchain.receive_block(block)
if res == ReceiveBlockResult.NEW_PEAK:
if fork_height is not None and fork_height != block.height - 1:
fork_block = blockchain.block_record(blockchain.height_to_hash(fork_height))
else:
fork_block = None
sb = blockchain.block_record(block.header_hash)
sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(block.header_hash)
res = store.new_peak(sb, block, sp_sub_slot, ip_sub_slot, fork_block, blockchain)
assert res[0] is None
# Add slots to the end
blocks_2 = bt.get_consecutive_blocks(
1,
block_list_input=blocks_reorg,
skip_slots=2,
normalized_to_identity_cc_eos=normalized_to_identity,
normalized_to_identity_icc_eos=normalized_to_identity,
normalized_to_identity_cc_ip=normalized_to_identity,
normalized_to_identity_cc_sp=normalized_to_identity,
)
for slot in blocks_2[-1].finished_sub_slots:
store.new_finished_sub_slot(slot, blockchain, blockchain.get_peak(), await blockchain.get_full_peak())
assert store.get_sub_slot(sub_slots[3].challenge_chain.get_hash()) is None
assert store.get_sub_slot(sub_slots[4].challenge_chain.get_hash()) is None
# Test adding signage point
peak = blockchain.get_peak()
ss_start_iters = peak.ip_sub_slot_total_iters(test_constants)
for i in range(1, test_constants.NUM_SPS_SUB_SLOT - test_constants.NUM_SP_INTERVALS_EXTRA):
sp = get_signage_point(
test_constants,
blockchain,
peak,
ss_start_iters,
uint8(i),
[],
peak.sub_slot_iters,
)
assert store.new_signage_point(uint8(i), blockchain, peak, peak.sub_slot_iters, sp)
blocks = blocks_reorg
while True:
blocks = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
normalized_to_identity_cc_eos=normalized_to_identity,
normalized_to_identity_icc_eos=normalized_to_identity,
normalized_to_identity_cc_ip=normalized_to_identity,
normalized_to_identity_cc_sp=normalized_to_identity,
)
res, _, fork_height = await blockchain.receive_block(blocks[-1])
if res == ReceiveBlockResult.NEW_PEAK:
if fork_height is not None and fork_height != blocks[-1].height - 1:
fork_block = blockchain.block_record(blockchain.height_to_hash(fork_height))
else:
fork_block = None
sb = blockchain.block_record(blocks[-1].header_hash)
sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(blocks[-1].header_hash)
res = store.new_peak(sb, blocks[-1], sp_sub_slot, ip_sub_slot, fork_block, blockchain)
assert res[0] is None
if sb.overflow and sp_sub_slot is not None:
assert sp_sub_slot != ip_sub_slot
break
peak = blockchain.get_peak()
assert peak.overflow
# Overflow peak should result in 2 finished sub slots
assert len(store.finished_sub_slots) == 2
# Add slots to the end, except for the last one, which we will use to test invalid SP
blocks_2 = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
skip_slots=3,
normalized_to_identity_cc_eos=normalized_to_identity,
normalized_to_identity_icc_eos=normalized_to_identity,
normalized_to_identity_cc_ip=normalized_to_identity,
normalized_to_identity_cc_sp=normalized_to_identity,
)
for slot in blocks_2[-1].finished_sub_slots[:-1]:
store.new_finished_sub_slot(slot, blockchain, blockchain.get_peak(), await blockchain.get_full_peak())
finished_sub_slots = blocks_2[-1].finished_sub_slots
assert len(store.finished_sub_slots) == 4
# Test adding signage points for overflow blocks (sp_sub_slot)
ss_start_iters = peak.sp_sub_slot_total_iters(test_constants)
# for i in range(peak.signage_point_index, test_constants.NUM_SPS_SUB_SLOT):
# if i < peak.signage_point_index:
# continue
# latest = peak
# while latest.total_iters > peak.sp_total_iters(test_constants):
# latest = blockchain.blocks[latest.prev_hash]
# sp = get_signage_point(
# test_constants,
# blockchain.blocks,
# latest,
# ss_start_iters,
# uint8(i),
# [],
# peak.sub_slot_iters,
# )
# assert store.new_signage_point(i, blockchain.blocks, peak, peak.sub_slot_iters, sp)
# Test adding signage points for overflow blocks (ip_sub_slot)
for i in range(1, test_constants.NUM_SPS_SUB_SLOT - test_constants.NUM_SP_INTERVALS_EXTRA):
sp = get_signage_point(
test_constants,
blockchain,
peak,
peak.ip_sub_slot_total_iters(test_constants),
uint8(i),
[],
peak.sub_slot_iters,
)
assert store.new_signage_point(uint8(i), blockchain, peak, peak.sub_slot_iters, sp)
# Test adding future signage point, a few slots forward (good)
saved_sp_hash = None
for slot_offset in range(1, len(finished_sub_slots)):
for i in range(
1,
test_constants.NUM_SPS_SUB_SLOT - test_constants.NUM_SP_INTERVALS_EXTRA,
):
sp = get_signage_point(
test_constants,
blockchain,
peak,
peak.ip_sub_slot_total_iters(test_constants) + slot_offset * peak.sub_slot_iters,
uint8(i),
finished_sub_slots[:slot_offset],
peak.sub_slot_iters,
)
assert sp.cc_vdf is not None
saved_sp_hash = sp.cc_vdf.output.get_hash()
assert store.new_signage_point(uint8(i), blockchain, peak, peak.sub_slot_iters, sp)
# Test adding future signage point (bad)
for i in range(1, test_constants.NUM_SPS_SUB_SLOT - test_constants.NUM_SP_INTERVALS_EXTRA):
sp = get_signage_point(
test_constants,
blockchain,
peak,
peak.ip_sub_slot_total_iters(test_constants) + len(finished_sub_slots) * peak.sub_slot_iters,
uint8(i),
finished_sub_slots[: len(finished_sub_slots)],
peak.sub_slot_iters,
)
assert not store.new_signage_point(uint8(i), blockchain, peak, peak.sub_slot_iters, sp)
# Test adding past signage point
sp = SignagePoint(
blocks[1].reward_chain_block.challenge_chain_sp_vdf,
blocks[1].challenge_chain_sp_proof,
blocks[1].reward_chain_block.reward_chain_sp_vdf,
blocks[1].reward_chain_sp_proof,
)
assert not store.new_signage_point(
blocks[1].reward_chain_block.signage_point_index,
blockchain,
peak,
blockchain.block_record(blocks[1].header_hash).sp_sub_slot_total_iters(test_constants),
sp,
)
# Get signage point by index
assert (
store.get_signage_point_by_index(
finished_sub_slots[0].challenge_chain.get_hash(),
uint8(4),
finished_sub_slots[0].reward_chain.get_hash(),
)
is not None
)
assert (
store.get_signage_point_by_index(finished_sub_slots[0].challenge_chain.get_hash(), uint8(4), std_hash(b"1"))
is None
)
# Get signage point by hash
assert store.get_signage_point(saved_sp_hash) is not None
assert store.get_signage_point(std_hash(b"2")) is None
# Test adding signage points before genesis
store.initialize_genesis_sub_slot()
assert len(store.finished_sub_slots) == 1
for i in range(1, test_constants.NUM_SPS_SUB_SLOT - test_constants.NUM_SP_INTERVALS_EXTRA):
sp = get_signage_point(
test_constants,
BlockCache({}, {}),
None,
uint128(0),
uint8(i),
[],
peak.sub_slot_iters,
)
assert store.new_signage_point(uint8(i), blockchain, None, peak.sub_slot_iters, sp)
blocks_3 = bt.get_consecutive_blocks(
1,
skip_slots=2,
normalized_to_identity_cc_eos=normalized_to_identity,
normalized_to_identity_icc_eos=normalized_to_identity,
normalized_to_identity_cc_ip=normalized_to_identity,
normalized_to_identity_cc_sp=normalized_to_identity,
)
for slot in blocks_3[-1].finished_sub_slots:
store.new_finished_sub_slot(slot, blockchain, None, None)
assert len(store.finished_sub_slots) == 3
finished_sub_slots = blocks_3[-1].finished_sub_slots
for slot_offset in range(1, len(finished_sub_slots) + 1):
for i in range(
1,
test_constants.NUM_SPS_SUB_SLOT - test_constants.NUM_SP_INTERVALS_EXTRA,
):
sp = get_signage_point(
test_constants,
BlockCache({}, {}),
None,
slot_offset * peak.sub_slot_iters,
uint8(i),
finished_sub_slots[:slot_offset],
peak.sub_slot_iters,
)
assert store.new_signage_point(uint8(i), blockchain, None, peak.sub_slot_iters, sp)
# Test adding signage points after genesis
blocks_4 = bt.get_consecutive_blocks(
1,
normalized_to_identity_cc_eos=normalized_to_identity,
normalized_to_identity_icc_eos=normalized_to_identity,
normalized_to_identity_cc_ip=normalized_to_identity,
normalized_to_identity_cc_sp=normalized_to_identity,
)
blocks_5 = bt.get_consecutive_blocks(
1,
block_list_input=blocks_4,
skip_slots=1,
normalized_to_identity_cc_eos=normalized_to_identity,
normalized_to_identity_icc_eos=normalized_to_identity,
normalized_to_identity_cc_ip=normalized_to_identity,
normalized_to_identity_cc_sp=normalized_to_identity,
)
# If this is not the case, fix test to find a block that is
assert (
blocks_4[-1].reward_chain_block.signage_point_index
< test_constants.NUM_SPS_SUB_SLOT - test_constants.NUM_SP_INTERVALS_EXTRA
)
await blockchain.receive_block(blocks_4[-1])
sb = blockchain.block_record(blocks_4[-1].header_hash)
store.new_peak(sb, blocks_4[-1], None, None, None, blockchain)
for i in range(
sb.signage_point_index + test_constants.NUM_SP_INTERVALS_EXTRA,
test_constants.NUM_SPS_SUB_SLOT,
):
if is_overflow_block(test_constants, uint8(i)):
finished_sub_slots = blocks_5[-1].finished_sub_slots
else:
finished_sub_slots = []
sp = get_signage_point(
test_constants,
blockchain,
sb,
uint128(0),
uint8(i),
finished_sub_slots,
peak.sub_slot_iters,
)
assert store.new_signage_point(uint8(i), empty_blockchain, sb, peak.sub_slot_iters, sp)
# Test future EOS cache
store.initialize_genesis_sub_slot()
blocks = bt.get_consecutive_blocks(
1,
normalized_to_identity_cc_eos=normalized_to_identity,
normalized_to_identity_icc_eos=normalized_to_identity,
normalized_to_identity_cc_ip=normalized_to_identity,
normalized_to_identity_cc_sp=normalized_to_identity,
)
await blockchain.receive_block(blocks[-1])
while True:
blocks = bt.get_consecutive_blocks(
1,
block_list_input=blocks,
normalized_to_identity_cc_eos=normalized_to_identity,
normalized_to_identity_icc_eos=normalized_to_identity,
normalized_to_identity_cc_ip=normalized_to_identity,
normalized_to_identity_cc_sp=normalized_to_identity,
)
await blockchain.receive_block(blocks[-1])
sb = blockchain.block_record(blocks[-1].header_hash)
if sb.first_in_sub_slot:
break
assert len(blocks) >= 2
dependant_sub_slots = blocks[-1].finished_sub_slots
peak = blockchain.get_peak()
peak_full_block = await blockchain.get_full_peak()
for block in blocks[:-2]:
sb = blockchain.block_record(block.header_hash)
sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(block.header_hash)
peak = sb
peak_full_block = block
res = store.new_peak(sb, block, sp_sub_slot, ip_sub_slot, None, blockchain)
assert res[0] is None
assert store.new_finished_sub_slot(dependant_sub_slots[0], blockchain, peak, peak_full_block) is None
block = blocks[-2]
sb = blockchain.block_record(block.header_hash)
sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(block.header_hash)
res = store.new_peak(sb, block, sp_sub_slot, ip_sub_slot, None, blockchain)
assert res[0] == dependant_sub_slots[0]
assert res[1] == res[2] == []
# Test future IP cache
store.initialize_genesis_sub_slot()
blocks = bt.get_consecutive_blocks(
60,
normalized_to_identity_cc_ip=normalized_to_identity,
normalized_to_identity_cc_sp=normalized_to_identity,
normalized_to_identity_cc_eos=normalized_to_identity,
normalized_to_identity_icc_eos=normalized_to_identity,
)
for block in blocks[:5]:
await blockchain.receive_block(block)
sb = blockchain.block_record(block.header_hash)
sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(block.header_hash)
res = store.new_peak(sb, block, sp_sub_slot, ip_sub_slot, None, blockchain)
assert res[0] is None
case_0, case_1 = False, False
for i in range(5, len(blocks) - 1):
prev_block = blocks[i]
block = blocks[i + 1]
new_ip = NewInfusionPointVDF(
block.reward_chain_block.get_unfinished().get_hash(),
block.reward_chain_block.challenge_chain_ip_vdf,
block.challenge_chain_ip_proof,
block.reward_chain_block.reward_chain_ip_vdf,
block.reward_chain_ip_proof,
block.reward_chain_block.infused_challenge_chain_ip_vdf,
block.infused_challenge_chain_ip_proof,
)
store.add_to_future_ip(new_ip)
await blockchain.receive_block(prev_block)
sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(prev_block.header_hash)
sb = blockchain.block_record(prev_block.header_hash)
res = store.new_peak(sb, prev_block, sp_sub_slot, ip_sub_slot, None, blockchain)
if len(block.finished_sub_slots) == 0:
case_0 = True
assert res[2] == [new_ip]
else:
case_1 = True
assert res[2] == []
found_ips: List[timelord_protocol.NewInfusionPointVDF] = []
for ss in block.finished_sub_slots:
ipvdf = store.new_finished_sub_slot(ss, blockchain, sb, prev_block)
assert ipvdf is not None
found_ips += ipvdf
assert found_ips == [new_ip]
# If flaky, increase the number of blocks created
assert case_0 and case_1
# Try to get two blocks in the same slot, such that we have
# SP, B2 SP .... SP B1
# i2 ......... i1
# Then do a reorg up to B2, removing all signage points after B2, but not before
for block in blocks:
await blockchain.receive_block(block)
while True:
blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=1)
assert (await blockchain.receive_block(blocks[-1]))[0] == ReceiveBlockResult.NEW_PEAK
peak = blockchain.get_peak()
sub_slots = await blockchain.get_sp_and_ip_sub_slots(peak.header_hash)
store.new_peak(peak, blocks[-1], sub_slots[0], sub_slots[1], None, blockchain)
blocks = bt.get_consecutive_blocks(2, block_list_input=blocks, guarantee_transaction_block=True)
i3 = blocks[-3].reward_chain_block.signage_point_index
i2 = blocks[-2].reward_chain_block.signage_point_index
i1 = blocks[-1].reward_chain_block.signage_point_index
if (
len(blocks[-2].finished_sub_slots) == len(blocks[-1].finished_sub_slots) == 0
and not is_overflow_block(test_constants, signage_point_index=i2)
and not is_overflow_block(test_constants, signage_point_index=i1)
and i2 > i3 + 3
and i1 > (i2 + 3)
):
# We hit all the conditions that we want
all_sps: List[Optional[SignagePoint]] = [None] * test_constants.NUM_SPS_SUB_SLOT
def assert_sp_none(sp_index: int, is_none: bool):
sp_to_check: Optional[SignagePoint] = all_sps[sp_index]
assert sp_to_check is not None
assert sp_to_check.cc_vdf is not None
fetched = store.get_signage_point(sp_to_check.cc_vdf.output.get_hash())
assert (fetched is None) == is_none
if fetched is not None:
assert fetched == sp_to_check
for i in range(i3 + 1, test_constants.NUM_SPS_SUB_SLOT - 3):
finished_sub_slots = []
sp = get_signage_point(
test_constants,
blockchain,
peak,
uint128(peak.ip_sub_slot_total_iters(bt.constants)),
uint8(i),
finished_sub_slots,
peak.sub_slot_iters,
)
all_sps[i] = sp
assert store.new_signage_point(uint8(i), blockchain, peak, peak.sub_slot_iters, sp)
# Adding a new peak clears all SPs after that peak
assert (await blockchain.receive_block(blocks[-2]))[0] == ReceiveBlockResult.NEW_PEAK
peak = blockchain.get_peak()
sub_slots = await blockchain.get_sp_and_ip_sub_slots(peak.header_hash)
store.new_peak(peak, blocks[-2], sub_slots[0], sub_slots[1], None, blockchain)
assert_sp_none(i2, False)
assert_sp_none(i2 + 1, False)
assert_sp_none(i1, True)
assert_sp_none(i1 + 1, True)
assert_sp_none(i1 + 4, True)
for i in range(i2, test_constants.NUM_SPS_SUB_SLOT):
if is_overflow_block(test_constants, uint8(i)):
blocks_alt = bt.get_consecutive_blocks(1, block_list_input=blocks[:-1], skip_slots=1)
finished_sub_slots = blocks_alt[-1].finished_sub_slots
else:
finished_sub_slots = []
sp = get_signage_point(
test_constants,
blockchain,
peak,
uint128(peak.ip_sub_slot_total_iters(bt.constants)),
uint8(i),
finished_sub_slots,
peak.sub_slot_iters,
)
all_sps[i] = sp
assert store.new_signage_point(uint8(i), blockchain, peak, peak.sub_slot_iters, sp)
assert_sp_none(i2, False)
assert_sp_none(i2 + 1, False)
assert_sp_none(i1, False)
assert_sp_none(i1 + 1, False)
assert_sp_none(i1 + 4, False)
assert (await blockchain.receive_block(blocks[-1]))[0] == ReceiveBlockResult.NEW_PEAK
peak = blockchain.get_peak()
sub_slots = await blockchain.get_sp_and_ip_sub_slots(peak.header_hash)
# Do a reorg, which should remove everything after B2
store.new_peak(
peak,
blocks[-1],
sub_slots[0],
sub_slots[1],
(await blockchain.get_block_records_at([blocks[-2].height]))[0],
blockchain,
)
assert_sp_none(i2, False)
assert_sp_none(i2 + 1, False)
assert_sp_none(i1, True)
assert_sp_none(i1 + 1, True)
assert_sp_none(i1 + 4, True)
break
else:
for block in blocks[-2:]:
assert (await blockchain.receive_block(block))[0] == ReceiveBlockResult.NEW_PEAK
@pytest.mark.asyncio
async def test_basic_store_compact_blockchain(self, empty_blockchain):
await self.test_basic_store(empty_blockchain, True)
@pytest.mark.asyncio
async def test_long_chain_slots(self, empty_blockchain, default_1000_blocks):
blockchain = empty_blockchain
store = FullNodeStore(test_constants)
blocks = default_1000_blocks
peak = None
peak_full_block = None
for block in blocks:
for sub_slot in block.finished_sub_slots:
assert store.new_finished_sub_slot(sub_slot, blockchain, peak, peak_full_block) is not None
res, _, _ = await blockchain.receive_block(block)
assert res == ReceiveBlockResult.NEW_PEAK
peak = blockchain.get_peak()
peak_full_block = await blockchain.get_full_peak()
sp_sub_slot, ip_sub_slot = await blockchain.get_sp_and_ip_sub_slots(peak.header_hash)
store.new_peak(peak, peak_full_block, sp_sub_slot, ip_sub_slot, None, blockchain)
| 45.280057 | 120 | 0.620779 |
27984b1494d4e3a214d335ab59932e282a361570 | 2,925 | py | Python | src/tfi/driver/pytorch/load.py | ajbouh/tfi | 6e89e8c8f1ca3b285c788cc6b802fc44f9001290 | [
"MIT"
] | 160 | 2017-09-13T00:32:05.000Z | 2018-05-21T18:17:32.000Z | src/tfi/driver/pytorch/load.py | tesserai/tfi | 6e89e8c8f1ca3b285c788cc6b802fc44f9001290 | [
"MIT"
] | 6 | 2017-09-14T17:54:21.000Z | 2018-01-27T19:31:18.000Z | src/tfi/driver/pytorch/load.py | ajbouh/tfi | 6e89e8c8f1ca3b285c788cc6b802fc44f9001290 | [
"MIT"
] | 11 | 2017-09-13T00:37:08.000Z | 2018-03-05T08:03:34.000Z | import inspect
import torch
deserialized_objects = {}
restore_location = torch.serialization.default_restore_location
def _check_container_source(container_type, source_file, original_source):
current_source = inspect.getsource(container_type)
if original_source != current_source:
if container_type.dump_patches:
file_name = container_type.__name__ + '.patch'
diff = difflib.unified_diff(current_source.split('\n'),
original_source.split('\n'),
source_file,
source_file, lineterm="")
lines = '\n'.join(diff)
try:
with open(file_name, 'a+') as f:
file_size = f.seek(0, 2)
f.seek(0)
if file_size == 0:
f.write(lines)
elif file_size != len(lines) or f.read() != lines:
raise IOError
msg = ("Saved a reverse patch to " + file_name + ". "
"Run `patch -p0 < " + file_name + "` to revert your "
"changes.")
except IOError:
msg = ("Tried to save a patch, but couldn't create a "
"writable file " + file_name + ". Make sure it "
"doesn't exist and your working directory is "
"writable.")
else:
msg = ("you can retrieve the original source code by "
"accessing the object's source attribute or set "
"`torch.nn.Module.dump_patches = True` and use the "
"patch tool to revert the changes.")
msg = ("source code of class '{}' has changed. {}"
.format(torch.typename(container_type), msg))
warnings.warn(msg, SourceChangeWarning)
def persistent_load(saved_id):
assert isinstance(saved_id, tuple)
typename = saved_id[0]
data = saved_id[1:]
if typename == 'module':
# Ignore containers that don't have any sources saved
if all(data[1:]):
_check_container_source(*data)
return data[0]
elif typename == 'storage':
data_type, root_key, location, size, view_metadata = data
if root_key not in deserialized_objects:
deserialized_objects[root_key] = restore_location(
data_type(size), location)
storage = deserialized_objects[root_key]
if view_metadata is not None:
view_key, offset, view_size = view_metadata
if view_key not in deserialized_objects:
deserialized_objects[view_key] = storage[offset:offset + view_size]
return deserialized_objects[view_key]
else:
return storage
else:
raise RuntimeError("Unknown saved id type: %s" % saved_id[0])
| 44.318182 | 83 | 0.552137 |
cd7134e5f3a880ec6370166598d7a6d5642ef270 | 121 | py | Python | models/model_canteen/admin.py | SanjarbekSaminjonov/musofirlar.backend | 23b09e90cc4e3d153063ad1768b5ae1c18ff866d | [
"Apache-2.0"
] | 1 | 2021-12-23T12:43:17.000Z | 2021-12-23T12:43:17.000Z | models/model_canteen/admin.py | SanjarbekSaminjonov/musofirlar.backend | 23b09e90cc4e3d153063ad1768b5ae1c18ff866d | [
"Apache-2.0"
] | null | null | null | models/model_canteen/admin.py | SanjarbekSaminjonov/musofirlar.backend | 23b09e90cc4e3d153063ad1768b5ae1c18ff866d | [
"Apache-2.0"
] | null | null | null | from django.contrib import admin
from .models import Canteen
# Register your models here.
admin.site.register(Canteen)
| 17.285714 | 32 | 0.801653 |
ac7f4bb9cfc99116ee032e34681cd2dbede941af | 5,947 | py | Python | CIM14/IEC61970/Generation/Production/HydroPump.py | MaximeBaudette/PyCIM | d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14 | [
"MIT"
] | 58 | 2015-04-22T10:41:03.000Z | 2022-03-29T16:04:34.000Z | CIM14/IEC61970/Generation/Production/HydroPump.py | MaximeBaudette/PyCIM | d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14 | [
"MIT"
] | 12 | 2015-08-26T03:57:23.000Z | 2020-12-11T20:14:42.000Z | CIM14/IEC61970/Generation/Production/HydroPump.py | MaximeBaudette/PyCIM | d68ee5ccfc1d32d44c5cd09fb173142fb5ff4f14 | [
"MIT"
] | 35 | 2015-01-10T12:21:03.000Z | 2020-09-09T08:18:16.000Z | # Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.IEC61970.Core.PowerSystemResource import PowerSystemResource
class HydroPump(PowerSystemResource):
"""A synchronous motor-driven pump, typically associated with a pumped storage plant
"""
def __init__(self, pumpPowerAtMaxHead=0.0, pumpPowerAtMinHead=0.0, pumpDischAtMinHead=0.0, pumpDischAtMaxHead=0.0, SynchronousMachine=None, HydroPumpOpSchedule=None, HydroPowerPlant=None, *args, **kw_args):
"""Initialises a new 'HydroPump' instance.
@param pumpPowerAtMaxHead: The pumping power under maximum head conditions, usually at full gate
@param pumpPowerAtMinHead: The pumping power under minimum head conditions, usually at full gate.
@param pumpDischAtMinHead: The pumping discharge (m3/sec) under minimum head conditions, usually at full gate
@param pumpDischAtMaxHead: The pumping discharge (m3/sec) under maximum head conditions, usually at full gate
@param SynchronousMachine: The synchronous machine drives the turbine which moves the water from a low elevation to a higher elevation. The direction of machine rotation for pumping may or may not be the same as for generating.
@param HydroPumpOpSchedule: The hydro pump has a pumping schedule over time, indicating when pumping is to occur.
@param HydroPowerPlant: The hydro pump may be a member of a pumped storage plant or a pump for distributing water
"""
#: The pumping power under maximum head conditions, usually at full gate
self.pumpPowerAtMaxHead = pumpPowerAtMaxHead
#: The pumping power under minimum head conditions, usually at full gate.
self.pumpPowerAtMinHead = pumpPowerAtMinHead
#: The pumping discharge (m3/sec) under minimum head conditions, usually at full gate
self.pumpDischAtMinHead = pumpDischAtMinHead
#: The pumping discharge (m3/sec) under maximum head conditions, usually at full gate
self.pumpDischAtMaxHead = pumpDischAtMaxHead
self._SynchronousMachine = None
self.SynchronousMachine = SynchronousMachine
self._HydroPumpOpSchedule = None
self.HydroPumpOpSchedule = HydroPumpOpSchedule
self._HydroPowerPlant = None
self.HydroPowerPlant = HydroPowerPlant
super(HydroPump, self).__init__(*args, **kw_args)
_attrs = ["pumpPowerAtMaxHead", "pumpPowerAtMinHead", "pumpDischAtMinHead", "pumpDischAtMaxHead"]
_attr_types = {"pumpPowerAtMaxHead": float, "pumpPowerAtMinHead": float, "pumpDischAtMinHead": float, "pumpDischAtMaxHead": float}
_defaults = {"pumpPowerAtMaxHead": 0.0, "pumpPowerAtMinHead": 0.0, "pumpDischAtMinHead": 0.0, "pumpDischAtMaxHead": 0.0}
_enums = {}
_refs = ["SynchronousMachine", "HydroPumpOpSchedule", "HydroPowerPlant"]
_many_refs = []
def getSynchronousMachine(self):
"""The synchronous machine drives the turbine which moves the water from a low elevation to a higher elevation. The direction of machine rotation for pumping may or may not be the same as for generating.
"""
return self._SynchronousMachine
def setSynchronousMachine(self, value):
if self._SynchronousMachine is not None:
self._SynchronousMachine._HydroPump = None
self._SynchronousMachine = value
if self._SynchronousMachine is not None:
self._SynchronousMachine.HydroPump = None
self._SynchronousMachine._HydroPump = self
SynchronousMachine = property(getSynchronousMachine, setSynchronousMachine)
def getHydroPumpOpSchedule(self):
"""The hydro pump has a pumping schedule over time, indicating when pumping is to occur.
"""
return self._HydroPumpOpSchedule
def setHydroPumpOpSchedule(self, value):
if self._HydroPumpOpSchedule is not None:
self._HydroPumpOpSchedule._HydroPump = None
self._HydroPumpOpSchedule = value
if self._HydroPumpOpSchedule is not None:
self._HydroPumpOpSchedule.HydroPump = None
self._HydroPumpOpSchedule._HydroPump = self
HydroPumpOpSchedule = property(getHydroPumpOpSchedule, setHydroPumpOpSchedule)
def getHydroPowerPlant(self):
"""The hydro pump may be a member of a pumped storage plant or a pump for distributing water
"""
return self._HydroPowerPlant
def setHydroPowerPlant(self, value):
if self._HydroPowerPlant is not None:
filtered = [x for x in self.HydroPowerPlant.HydroPumps if x != self]
self._HydroPowerPlant._HydroPumps = filtered
self._HydroPowerPlant = value
if self._HydroPowerPlant is not None:
if self not in self._HydroPowerPlant._HydroPumps:
self._HydroPowerPlant._HydroPumps.append(self)
HydroPowerPlant = property(getHydroPowerPlant, setHydroPowerPlant)
| 50.82906 | 235 | 0.734992 |
71e82a5ff1312881ce6c164eca614cd214f54719 | 1,363 | py | Python | src/tests/videoframe/test_stereo.py | mhubii/GIFT-Grab | c9678e25edbd0403b25366d60a3e2e2e5c32320f | [
"BSD-3-Clause"
] | 41 | 2017-06-24T21:58:17.000Z | 2022-03-29T04:44:05.000Z | src/tests/videoframe/test_stereo.py | mhubii/GIFT-Grab | c9678e25edbd0403b25366d60a3e2e2e5c32320f | [
"BSD-3-Clause"
] | 91 | 2017-06-14T09:57:40.000Z | 2020-12-02T11:36:19.000Z | src/tests/videoframe/test_stereo.py | mhubii/GIFT-Grab | c9678e25edbd0403b25366d60a3e2e2e5c32320f | [
"BSD-3-Clause"
] | 15 | 2017-05-21T14:04:50.000Z | 2021-11-22T06:03:26.000Z | from pytest import (mark, yield_fixture, raises)
from pygiftgrab import (VideoFrame, ColourSpace)
stereo_frame, stereo_count = None, None
data_length = 0
@yield_fixture(autouse=True)
def peri_test(colour_space):
global stereo_frame, stereo_count, data_length
stereo_count = 2
cols, rows = 1920, 1080
stereo_frame = VideoFrame(colour_space, cols, rows, stereo_count)
data_length = VideoFrame.required_data_length(colour_space, cols, rows)
@mark.stereo_frames
def test_default_index_is_0():
assert data_length == stereo_frame.data_length()
assert stereo_frame.data_length() == stereo_frame.data_length(0)
@mark.stereo_frames
def test_valid_index_returns_data_length():
for stereo_index in range(stereo_count):
assert stereo_frame.data_length(stereo_index) == data_length
@mark.stereo_frames
def test_invalid_index_raises():
for stereo_index in range(stereo_count, 2 * stereo_count):
with raises(IndexError):
stereo_frame.data_length(stereo_index)
@mark.stereo_frames
def test_stereo_frame_constructor(colour_space):
cols, rows = 1920, 1080
frame = VideoFrame(colour_space, cols, rows)
assert frame.stereo_count() == 1
for _stereo_count in range(2, 5):
frame = VideoFrame(colour_space, cols, rows, _stereo_count)
assert frame.stereo_count() == _stereo_count
| 30.288889 | 75 | 0.753485 |
d69dd4bd11734216510a6899361482e02f766747 | 128 | py | Python | pyroomacoustics/tests/test_issue_235.py | Womac/pyroomacoustics | af452ea42686eb12df34dd7ffdb0c833b64b27f9 | [
"MIT"
] | 915 | 2016-02-08T08:10:37.000Z | 2022-03-31T17:33:21.000Z | pyroomacoustics/tests/test_issue_235.py | zha80052/pyroomacoustics | 15a86425b68969b2109860ca3614f0cbf92b1bd0 | [
"MIT"
] | 212 | 2017-02-06T13:06:30.000Z | 2022-03-28T14:32:15.000Z | pyroomacoustics/tests/test_issue_235.py | zha80052/pyroomacoustics | 15a86425b68969b2109860ca3614f0cbf92b1bd0 | [
"MIT"
] | 513 | 2016-11-15T05:41:49.000Z | 2022-03-29T15:41:09.000Z | import pyroomacoustics as pra
def test_set_rt_no_directivity():
room = pra.ShoeBox([5, 4, 3])
room.set_ray_tracing()
| 16 | 33 | 0.710938 |
49757f4ca00287542582ce2f97ee7486a20b855e | 4,227 | py | Python | examples/pipelines/experiments/demo_count_rows_and_log.py | Anbang-Hu/shrike | 78189984c85696a9a9feaadb72aa471cf2409796 | [
"MIT"
] | 27 | 2021-05-27T00:01:24.000Z | 2022-01-30T19:55:24.000Z | examples/pipelines/experiments/demo_count_rows_and_log.py | Anbang-Hu/shrike | 78189984c85696a9a9feaadb72aa471cf2409796 | [
"MIT"
] | 284 | 2021-05-12T22:26:41.000Z | 2022-02-23T21:18:34.000Z | examples/pipelines/experiments/demo_count_rows_and_log.py | Anbang-Hu/shrike | 78189984c85696a9a9feaadb72aa471cf2409796 | [
"MIT"
] | 5 | 2021-06-02T04:51:47.000Z | 2021-12-20T17:07:41.000Z | """
The Azure ML pipeline for running a basic experiment
that consumes a dataset and does some logging.
to execute:
> python pipelines/experiments/demo_count_rows_and_log.py
--config-dir pipelines/config
--config-name experiments/demo_count_rows_and_log
run.submit=True
"""
# pylint: disable=no-member
# NOTE: because it raises 'dict' has no 'outputs' member in dsl.pipeline construction
import os
import sys
from azure.ml.component import dsl
from shrike.pipeline import AMLPipelineHelper
# NOTE: if you need to import from pipelines.*
ACCELERATOR_ROOT_PATH = os.path.abspath(
os.path.join(os.path.dirname(__file__), "..", "..")
)
if ACCELERATOR_ROOT_PATH not in sys.path:
print(f"Adding to path: {ACCELERATOR_ROOT_PATH}")
sys.path.append(str(ACCELERATOR_ROOT_PATH))
class CountRowsDemo(AMLPipelineHelper):
"""Runnable/reusable pipeline helper class
This class inherits from AMLPipelineHelper which provides
helper functions to create reusable production pipelines.
"""
def build(self, config):
"""Builds a pipeline function for this pipeline using AzureML SDK (dsl.pipeline).
This method returns a constructed pipeline function
(decorated with @dsl.pipeline).
Args:
config (DictConfig): configuration object
Returns:
dsl.pipeline: the function to create your pipeline
"""
# helper functions below load the subgraph/component from
# registered or local version depending on your config.run.use_local
count_rows_component = self.component_load("CountRowsAndLog")
# Here you should create an instance of a pipeline function
# (using your custom config dataclass)
@dsl.pipeline(
name="demo-component-count-rows-and-log",
description="Demo of a component that reads a dataset and counts the rows.",
default_datastore=config.compute.compliant_datastore,
)
def demo_pipeline_function(demo_dataset):
"""Pipeline function for this graph.
Args:
demo_dataset: input dataset
Returns:
dict[str->PipelineOutputData]: a dictionary of your pipeline outputs
for instance to be consumed by other graphs
"""
# general syntax:
# component_instance = component_class(input=data, param=value)
# or
# subgraph_instance = subgraph_function(input=data, param=value)
demo_component_step = count_rows_component(input_data=demo_dataset)
self.apply_recommended_runsettings(
"CountRowsAndLog", demo_component_step, gpu=False
)
# finally return the function itself to be built by helper code
return demo_pipeline_function
def pipeline_instance(self, pipeline_function, config):
"""Given a pipeline function, creates a runnable instance based on provided config.
This is used only when calling this as a runnable pipeline
using .main() function (see below).
The goal of this function is to map the config to the
pipeline_function inputs and params.
Args:
pipeline_function (function):
the pipeline function obtained from self.build()
config (DictConfig):
configuration object
Returns:
azureml.core.Pipeline: the instance constructed
with its inputs and params.
"""
# NOTE: self.dataset_load() helps to load the dataset
# based on its name and version
pipeline_input_dataset = self.dataset_load(
name=config.democomponent.input_data,
version=config.democomponent.input_data_version,
)
# we simply call the pipeline function
demo_pipeline = pipeline_function(demo_dataset=pipeline_input_dataset)
# and we return that function so that helper can run it.
return demo_pipeline
# NOTE: main block is necessary only if script is intended to be run from command line
if __name__ == "__main__":
# calling the helper .main() function
CountRowsDemo.main()
| 35.521008 | 91 | 0.670925 |
7046cfd73611c75ecc92b2b381bbc1439fe44a92 | 21,514 | py | Python | lithops/invokers.py | otrack/lithops | 81ffe3aa16f4483881e172e8805966735cc6e850 | [
"Apache-2.0"
] | null | null | null | lithops/invokers.py | otrack/lithops | 81ffe3aa16f4483881e172e8805966735cc6e850 | [
"Apache-2.0"
] | null | null | null | lithops/invokers.py | otrack/lithops | 81ffe3aa16f4483881e172e8805966735cc6e850 | [
"Apache-2.0"
] | null | null | null | #
# (C) Copyright IBM Corp. 2020
# (C) Copyright Cloudlab URV 2020
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import json
import pika
import time
import random
import queue
import logging
import multiprocessing as mp
from threading import Thread
from types import SimpleNamespace
from concurrent.futures import ThreadPoolExecutor
from lithops.version import __version__
from lithops.future import ResponseFuture
from lithops.config import extract_storage_config
from lithops.utils import version_str, is_lithops_worker, is_unix_system
from lithops.storage.utils import create_job_key
from lithops.constants import LOGGER_LEVEL
from lithops.util.metrics import PrometheusExporter
logger = logging.getLogger(__name__)
class Invoker:
"""
Abstract invoker class
"""
def __init__(self, config, executor_id, internal_storage, compute_handler):
log_level = logger.getEffectiveLevel()
self.log_active = log_level != logging.WARNING
self.log_level = LOGGER_LEVEL if not self.log_active else log_level
self.config = config
self.executor_id = executor_id
self.storage_config = extract_storage_config(self.config)
self.internal_storage = internal_storage
self.compute_handler = compute_handler
self.is_lithops_worker = is_lithops_worker()
self.workers = self.config['lithops'].get('workers')
logger.debug('ExecutorID {} - Total available workers: {}'
.format(self.executor_id, self.workers))
prom_enabled = self.config['lithops'].get('monitoring', False)
prom_config = self.config.get('prometheus', {})
self.prometheus = PrometheusExporter(prom_enabled, prom_config)
mode = self.config['lithops']['mode']
self.runtime_name = self.config[mode]['runtime']
def select_runtime(self, job_id, runtime_memory):
"""
Create a runtime and return metadata
"""
raise NotImplementedError
def run(self, job):
"""
Run a job
"""
raise NotImplementedError
def stop(self):
"""
Stop invoker-related processes
"""
pass
class StandaloneInvoker(Invoker):
"""
Module responsible to perform the invocations against the Standalone backend
"""
def __init__(self, config, executor_id, internal_storage, compute_handler):
super().__init__(config, executor_id, internal_storage, compute_handler)
def select_runtime(self, job_id, runtime_memory):
"""
Return the runtime metadata
"""
log_msg = ('ExecutorID {} | JobID {} - Selected Runtime: {} '
.format(self.executor_id, job_id, self.runtime_name))
logger.info(log_msg)
if not self.log_active:
print(log_msg, end='')
runtime_key = self.compute_handler.get_runtime_key(self.runtime_name)
runtime_meta = self.internal_storage.get_runtime_meta(runtime_key)
if not runtime_meta:
logger.debug('Runtime {} is not yet installed'.format(self.runtime_name))
if not self.log_active:
print('(Installing...)')
runtime_meta = self.compute_handler.create_runtime(self.runtime_name)
self.internal_storage.put_runtime_meta(runtime_key, runtime_meta)
else:
if not self.log_active:
print()
py_local_version = version_str(sys.version_info)
py_remote_version = runtime_meta['python_ver']
if py_local_version != py_remote_version:
raise Exception(("The indicated runtime '{}' is running Python {} and it "
"is not compatible with the local Python version {}")
.format(self.runtime_name, py_remote_version, py_local_version))
return runtime_meta
def run(self, job):
"""
Run a job
"""
job.runtime_name = self.runtime_name
self.prometheus.send_metric(name='job_total_calls',
value=job.total_calls,
labels=(
('job_id', job.job_id),
('function_name', job.function_name)
))
payload = {'config': self.config,
'log_level': self.log_level,
'executor_id': job.executor_id,
'job_id': job.job_id,
'job_description': job.__dict__,
'lithops_version': __version__}
self.compute_handler.run_job(payload)
log_msg = ('ExecutorID {} | JobID {} - {}() Invocation done - Total: {} activations'
.format(job.executor_id, job.job_id, job.function_name, job.total_calls))
logger.info(log_msg)
if not self.log_active:
print(log_msg)
futures = []
for i in range(job.total_calls):
call_id = "{:05d}".format(i)
fut = ResponseFuture(call_id, job,
job.metadata.copy(),
self.storage_config)
fut._set_state(ResponseFuture.State.Invoked)
futures.append(fut)
return futures
class ServerlessInvoker(Invoker):
"""
Module responsible to perform the invocations against the serverless backend
"""
REMOTE_INVOKER_MEMORY = 2048
INVOKER_PROCESSES = 2
def __init__(self, config, executor_id, internal_storage, compute_handler):
super().__init__(config, executor_id, internal_storage, compute_handler)
self.remote_invoker = self.config['serverless'].get('remote_invoker', False)
self.use_threads = (self.is_lithops_worker
or not is_unix_system()
or mp.get_start_method() != 'fork')
self.invokers = []
self.ongoing_activations = 0
if self.use_threads:
self.token_bucket_q = queue.Queue()
self.pending_calls_q = queue.Queue()
self.running_flag = SimpleNamespace(value=0)
self.INVOKER = Thread
else:
self.token_bucket_q = mp.Queue()
self.pending_calls_q = mp.Queue()
self.running_flag = mp.Value('i', 0)
self.INVOKER = mp.Process
self.job_monitor = JobMonitor(self.config, self.internal_storage, self.token_bucket_q)
logger.debug('ExecutorID {} - Serverless invoker created'.format(self.executor_id))
def select_runtime(self, job_id, runtime_memory):
"""
Return the runtime metadata
"""
if not runtime_memory:
runtime_memory = self.config['serverless']['runtime_memory']
timeout = self.config['serverless']['runtime_timeout']
log_msg = ('ExecutorID {} | JobID {} - Selected Runtime: {} - {}MB '
.format(self.executor_id, job_id, self.runtime_name, runtime_memory))
logger.info(log_msg)
if not self.log_active:
print(log_msg, end='')
runtime_key = self.compute_handler.get_runtime_key(self.runtime_name, runtime_memory)
runtime_meta = self.internal_storage.get_runtime_meta(runtime_key)
if not runtime_meta:
logger.debug('Runtime {} with {}MB is not yet installed'.format(self.runtime_name, runtime_memory))
if not self.log_active:
print('(Installing...)')
runtime_meta = self.compute_handler.create_runtime(self.runtime_name, runtime_memory, timeout)
self.internal_storage.put_runtime_meta(runtime_key, runtime_meta)
else:
if not self.log_active:
print()
py_local_version = version_str(sys.version_info)
py_remote_version = runtime_meta['python_ver']
if py_local_version != py_remote_version:
raise Exception(("The indicated runtime '{}' is running Python {} and it "
"is not compatible with the local Python version {}")
.format(self.runtime_name, py_remote_version, py_local_version))
return runtime_meta
def _start_invoker_process(self):
"""Starts the invoker process responsible to spawn pending calls
in background.
"""
for inv_id in range(self.INVOKER_PROCESSES):
p = self.INVOKER(target=self._run_invoker_process, args=(inv_id, ))
self.invokers.append(p)
p.daemon = True
p.start()
def _run_invoker_process(self, inv_id):
"""Run process that implements token bucket scheduling approach"""
logger.debug('ExecutorID {} - Invoker process {} started'
.format(self.executor_id, inv_id))
with ThreadPoolExecutor(max_workers=250) as executor:
while True:
try:
self.token_bucket_q.get()
job, call_id = self.pending_calls_q.get()
except KeyboardInterrupt:
break
if self.running_flag.value:
executor.submit(self._invoke, job, call_id)
else:
break
logger.debug('ExecutorID {} - Invoker process {} finished'
.format(self.executor_id, inv_id))
def _invoke(self, job, call_id):
"""Method used to perform the actual invocation against the
compute backend.
"""
payload = {'config': self.config,
'log_level': self.log_level,
'func_key': job.func_key,
'data_key': job.data_key,
'extra_env': job.extra_env,
'execution_timeout': job.execution_timeout,
'data_byte_range': job.data_ranges[int(call_id)],
'executor_id': job.executor_id,
'job_id': job.job_id,
'call_id': call_id,
'host_submit_tstamp': time.time(),
'lithops_version': __version__,
'runtime_name': job.runtime_name,
'runtime_memory': job.runtime_memory}
# do the invocation
start = time.time()
activation_id = self.compute_handler.invoke(job.runtime_name, job.runtime_memory, payload)
roundtrip = time.time() - start
resp_time = format(round(roundtrip, 3), '.3f')
if not activation_id:
# reached quota limit
time.sleep(random.randint(0, 5))
self.pending_calls_q.put((job, call_id))
self.token_bucket_q.put('#')
return
logger.info('ExecutorID {} | JobID {} - Function call {} done! ({}s) - Activation'
' ID: {}'.format(job.executor_id, job.job_id, call_id, resp_time, activation_id))
def _invoke_remote(self, job):
"""Method used to send a job_description to the remote invoker."""
start = time.time()
payload = {'config': self.config,
'log_level': self.log_level,
'executor_id': job.executor_id,
'job_id': job.job_id,
'job_description': job.__dict__,
'remote_invoker': True,
'invokers': 4,
'lithops_version': __version__}
activation_id = self.compute_handler.invoke(job.runtime_name, self.REMOTE_INVOKER_MEMORY, payload)
roundtrip = time.time() - start
resp_time = format(round(roundtrip, 3), '.3f')
if activation_id:
logger.info('ExecutorID {} | JobID {} - Remote invoker call done! ({}s) - Activation'
' ID: {}'.format(job.executor_id, job.job_id, resp_time, activation_id))
else:
raise Exception('Unable to spawn remote invoker')
def run(self, job):
"""
Run a job described in job_description
"""
job.runtime_name = self.runtime_name
try:
while True:
self.token_bucket_q.get_nowait()
self.ongoing_activations -= 1
except Exception:
pass
self.prometheus.send_metric(name='job_total_calls',
value=job.total_calls,
labels=(
('job_id', job.job_id),
('function_name', job.function_name)
))
if self.remote_invoker:
"""
Remote Invocation
Use a single cloud function to perform all the function invocations
"""
old_stdout = sys.stdout
sys.stdout = open(os.devnull, 'w')
self.select_runtime(job.job_id, self.REMOTE_INVOKER_MEMORY)
sys.stdout = old_stdout
log_msg = ('ExecutorID {} | JobID {} - Starting remote function '
'invocation: {}() - Total: {} activations'
.format(job.executor_id, job.job_id,
job.function_name, job.total_calls))
logger.info(log_msg)
if not self.log_active:
print(log_msg)
th = Thread(target=self._invoke_remote, args=(job,), daemon=True)
th.start()
time.sleep(0.1)
else:
"""
Normal Invocation
Use local threads to perform all the function invocations
"""
try:
if self.running_flag.value == 0:
self.ongoing_activations = 0
self.running_flag.value = 1
self._start_invoker_process()
log_msg = ('ExecutorID {} | JobID {} - Starting function '
'invocation: {}() - Total: {} activations'
.format(job.executor_id, job.job_id,
job.function_name, job.total_calls))
logger.info(log_msg)
if not self.log_active:
print(log_msg)
if self.ongoing_activations < self.workers:
callids = range(job.total_calls)
total_direct = self.workers-self.ongoing_activations
callids_to_invoke_direct = callids[:total_direct]
callids_to_invoke_nondirect = callids[total_direct:]
self.ongoing_activations += len(callids_to_invoke_direct)
logger.debug('ExecutorID {} | JobID {} - Free workers: '
'{} - Going to invoke {} function activations'
.format(job.executor_id, job.job_id, total_direct,
len(callids_to_invoke_direct)))
def _callback(future):
future.result()
executor = ThreadPoolExecutor(job.invoke_pool_threads)
for i in callids_to_invoke_direct:
call_id = "{:05d}".format(i)
future = executor.submit(self._invoke, job, call_id)
future.add_done_callback(_callback)
time.sleep(0.1)
# Put into the queue the rest of the callids to invoke within the process
if callids_to_invoke_nondirect:
logger.debug('ExecutorID {} | JobID {} - Putting remaining '
'{} function invocations into pending queue'
.format(job.executor_id, job.job_id,
len(callids_to_invoke_nondirect)))
for i in callids_to_invoke_nondirect:
call_id = "{:05d}".format(i)
self.pending_calls_q.put((job, call_id))
else:
logger.debug('ExecutorID {} | JobID {} - Ongoing activations '
'reached {} workers, queuing {} function invocations'
.format(job.executor_id, job.job_id, self.workers,
job.total_calls))
for i in range(job.total_calls):
call_id = "{:05d}".format(i)
self.pending_calls_q.put((job, call_id))
self.job_monitor.start_job_monitoring(job)
except (KeyboardInterrupt, Exception) as e:
self.stop()
raise e
# Create all futures
futures = []
for i in range(job.total_calls):
call_id = "{:05d}".format(i)
fut = ResponseFuture(call_id, job,
job.metadata.copy(),
self.storage_config)
fut._set_state(ResponseFuture.State.Invoked)
futures.append(fut)
return futures
def stop(self):
"""
Stop the invoker process and JobMonitor
"""
self.job_monitor.stop()
if self.invokers:
logger.debug('ExecutorID {} - Stopping invoker'
.format(self.executor_id))
self.running_flag.value = 0
for invoker in self.invokers:
self.token_bucket_q.put('#')
self.pending_calls_q.put((None, None))
while not self.pending_calls_q.empty():
try:
self.pending_calls_q.get(False)
except Exception:
pass
self.invokers = []
class JobMonitor:
def __init__(self, lithops_config, internal_storage, token_bucket_q):
self.config = lithops_config
self.internal_storage = internal_storage
self.token_bucket_q = token_bucket_q
self.is_lithops_worker = is_lithops_worker()
self.monitors = {}
self.rabbitmq_monitor = self.config['lithops'].get('rabbitmq_monitor', False)
if self.rabbitmq_monitor:
self.rabbit_amqp_url = self.config['rabbitmq'].get('amqp_url')
def stop(self):
for job_key in self.monitors:
self.monitors[job_key]['should_run'] = False
def get_active_jobs(self):
active_jobs = 0
for job_key in self.monitors:
if self.monitors[job_key]['thread'].is_alive():
active_jobs += 1
return active_jobs
def start_job_monitoring(self, job):
logger.debug('ExecutorID {} | JobID {} - Starting job monitoring'
.format(job.executor_id, job.job_id))
if self.rabbitmq_monitor:
th = Thread(target=self._job_monitoring_rabbitmq, args=(job,))
else:
th = Thread(target=self._job_monitoring_os, args=(job,))
if not self.is_lithops_worker:
th.daemon = True
job_key = create_job_key(job.executor_id, job.job_id)
self.monitors[job_key] = {'thread': th, 'should_run': True}
th.start()
def _job_monitoring_os(self, job):
total_callids_done = 0
job_key = create_job_key(job.executor_id, job.job_id)
while self.monitors[job_key]['should_run'] and total_callids_done < job.total_calls:
time.sleep(1)
callids_running, callids_done = self.internal_storage.get_job_status(job.executor_id, job.job_id)
total_new_tokens = len(callids_done) - total_callids_done
total_callids_done = total_callids_done + total_new_tokens
for i in range(total_new_tokens):
if self.monitors[job_key]['should_run']:
self.token_bucket_q.put('#')
else:
break
logger.debug('ExecutorID {} | JobID {} - Job monitoring finished'
.format(job.executor_id, job.job_id))
def _job_monitoring_rabbitmq(self, job):
total_callids_done = 0
job_key = create_job_key(job.executor_id, job.job_id)
exchange = 'lithops-{}'.format(job_key)
queue_1 = '{}-1'.format(exchange)
params = pika.URLParameters(self.rabbit_amqp_url)
connection = pika.BlockingConnection(params)
channel = connection.channel()
def callback(ch, method, properties, body):
nonlocal total_callids_done
call_status = json.loads(body.decode("utf-8"))
if call_status['type'] == '__end__':
if self.monitors[job_key]['should_run']:
self.token_bucket_q.put('#')
total_callids_done += 1
if total_callids_done == job.total_calls or \
not self.monitors[job_key]['should_run']:
ch.stop_consuming()
channel.basic_consume(callback, queue=queue_1, no_ack=True)
channel.start_consuming()
| 39.259124 | 111 | 0.572604 |
a9ff1bb7ede2600d39d033356a68a5344ce6ecbe | 468 | py | Python | src/main/migrations/0003_url_archivo.py | fernandogl/sunwise | d9a2e901bbdd456f7b78a1c83ee3650218a1d8f1 | [
"MIT"
] | null | null | null | src/main/migrations/0003_url_archivo.py | fernandogl/sunwise | d9a2e901bbdd456f7b78a1c83ee3650218a1d8f1 | [
"MIT"
] | null | null | null | src/main/migrations/0003_url_archivo.py | fernandogl/sunwise | d9a2e901bbdd456f7b78a1c83ee3650218a1d8f1 | [
"MIT"
] | null | null | null | # Generated by Django 2.1.5 on 2019-01-07 07:49
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0002_archivourl'),
]
operations = [
migrations.AddField(
model_name='url',
name='archivo',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='main.ArchivoUrl'),
),
]
| 23.4 | 114 | 0.630342 |
1f49a1b6863bb8919046a7c96c63bee4948bcc34 | 434 | py | Python | setup.py | mypetyak/django-citadel | 84b86b66b83aad8c02abb83b92bea906f76d8c68 | [
"MIT"
] | 4 | 2015-05-01T17:38:02.000Z | 2019-10-27T05:59:21.000Z | setup.py | mypetyak/django-citadel | 84b86b66b83aad8c02abb83b92bea906f76d8c68 | [
"MIT"
] | 3 | 2020-02-11T23:19:42.000Z | 2021-06-10T18:35:33.000Z | setup.py | mypetyak/django-citadel | 84b86b66b83aad8c02abb83b92bea906f76d8c68 | [
"MIT"
] | 2 | 2016-10-29T00:13:21.000Z | 2019-10-27T05:59:32.000Z | #!/usr/bin/env python
from distutils.core import setup
from setuptools import find_packages
setup(
name='django-citadel',
version='1.0.1',
description='A Django app that provides an encrypted Model and ModelField',
author='Christopher Bunn',
url='https://github.com/mypetyak/django-citadel',
packages=['citadel'],
install_requires = [
'pycrypto',
],
test_suite="tests.runtests.run_all"
)
| 24.111111 | 79 | 0.686636 |
71d36f1c6aec8077f38d2402e7963ad1be4ec3b2 | 17,500 | py | Python | src/vistir/path.py | yong422/vistir | 94301fa5fe4d9c51e50834688866940af455e927 | [
"ISC"
] | null | null | null | src/vistir/path.py | yong422/vistir | 94301fa5fe4d9c51e50834688866940af455e927 | [
"ISC"
] | null | null | null | src/vistir/path.py | yong422/vistir | 94301fa5fe4d9c51e50834688866940af455e927 | [
"ISC"
] | null | null | null | # -*- coding=utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
import atexit
import errno
import functools
import os
import posixpath
import shutil
import stat
import time
import warnings
import six
from six.moves import urllib_parse
from six.moves.urllib import request as urllib_request
from .backports.tempfile import _TemporaryFileWrapper
from .compat import (
Path,
ResourceWarning,
TemporaryDirectory,
FileNotFoundError,
PermissionError,
_fs_encoding,
_NamedTemporaryFile,
finalize,
fs_decode,
fs_encode,
IS_TYPE_CHECKING,
)
if IS_TYPE_CHECKING:
from typing import Optional, Callable, Text, ByteString, AnyStr
__all__ = [
"check_for_unc_path",
"get_converted_relative_path",
"handle_remove_readonly",
"normalize_path",
"is_in_path",
"is_file_url",
"is_readonly_path",
"is_valid_url",
"mkdir_p",
"ensure_mkdir_p",
"create_tracked_tempdir",
"create_tracked_tempfile",
"path_to_url",
"rmtree",
"safe_expandvars",
"set_write_bit",
"url_to_path",
"walk_up",
]
if os.name == "nt":
warnings.filterwarnings(
"ignore",
category=DeprecationWarning,
message="The Windows bytes API has been deprecated.*",
)
def unicode_path(path):
# Paths are supposed to be represented as unicode here
if six.PY2 and not isinstance(path, six.text_type):
return path.decode(_fs_encoding)
return path
def native_path(path):
if six.PY2 and not isinstance(path, bytes):
return path.encode(_fs_encoding)
return path
# once again thank you django...
# https://github.com/django/django/blob/fc6b90b/django/utils/_os.py
if six.PY3 or os.name == "nt":
abspathu = os.path.abspath
else:
def abspathu(path):
"""
Version of os.path.abspath that uses the unicode representation
of the current working directory, thus avoiding a UnicodeDecodeError
in join when the cwd has non-ASCII characters.
"""
if not os.path.isabs(path):
path = os.path.join(os.getcwdu(), path)
return os.path.normpath(path)
def normalize_path(path):
# type: (AnyStr) -> AnyStr
"""
Return a case-normalized absolute variable-expanded path.
:param str path: The non-normalized path
:return: A normalized, expanded, case-normalized path
:rtype: str
"""
return os.path.normpath(
os.path.normcase(
os.path.abspath(os.path.expandvars(os.path.expanduser(str(path))))
)
)
def is_in_path(path, parent):
# type: (AnyStr, AnyStr) -> bool
"""
Determine if the provided full path is in the given parent root.
:param str path: The full path to check the location of.
:param str parent: The parent path to check for membership in
:return: Whether the full path is a member of the provided parent.
:rtype: bool
"""
return normalize_path(str(path)).startswith(normalize_path(str(parent)))
def normalize_drive(path):
# type: (str) -> Text
"""Normalize drive in path so they stay consistent.
This currently only affects local drives on Windows, which can be
identified with either upper or lower cased drive names. The case is
always converted to uppercase because it seems to be preferred.
"""
from .misc import to_text
if os.name != "nt" or not isinstance(path, six.string_types):
return path
drive, tail = os.path.splitdrive(path)
# Only match (lower cased) local drives (e.g. 'c:'), not UNC mounts.
if drive.islower() and len(drive) == 2 and drive[1] == ":":
return "{}{}".format(drive.upper(), tail)
return to_text(path, encoding="utf-8")
def path_to_url(path):
# type: (str) -> Text
"""Convert the supplied local path to a file uri.
:param str path: A string pointing to or representing a local path
:return: A `file://` uri for the same location
:rtype: str
>>> path_to_url("/home/user/code/myrepo/myfile.zip")
'file:///home/user/code/myrepo/myfile.zip'
"""
from .misc import to_text, to_bytes
if not path:
return path
path = to_bytes(path, encoding="utf-8")
normalized_path = to_text(normalize_drive(os.path.abspath(path)), encoding="utf-8")
return to_text(Path(normalized_path).as_uri(), encoding="utf-8")
def url_to_path(url):
# type: (str) -> ByteString
"""
Convert a valid file url to a local filesystem path
Follows logic taken from pip's equivalent function
"""
from .misc import to_bytes
assert is_file_url(url), "Only file: urls can be converted to local paths"
_, netloc, path, _, _ = urllib_parse.urlsplit(url)
# Netlocs are UNC paths
if netloc:
netloc = "\\\\" + netloc
path = urllib_request.url2pathname(netloc + path)
return to_bytes(path, encoding="utf-8")
def is_valid_url(url):
"""Checks if a given string is an url"""
from .misc import to_text
if not url:
return url
pieces = urllib_parse.urlparse(to_text(url))
return all([pieces.scheme, pieces.netloc])
def is_file_url(url):
"""Returns true if the given url is a file url"""
from .misc import to_text
if not url:
return False
if not isinstance(url, six.string_types):
try:
url = getattr(url, "url")
except AttributeError:
raise ValueError("Cannot parse url from unknown type: {0!r}".format(url))
url = to_text(url, encoding="utf-8")
return urllib_parse.urlparse(url.lower()).scheme == "file"
def is_readonly_path(fn):
"""Check if a provided path exists and is readonly.
Permissions check is `bool(path.stat & stat.S_IREAD)` or `not os.access(path, os.W_OK)`
"""
fn = fs_encode(fn)
if os.path.exists(fn):
file_stat = os.stat(fn).st_mode
return not bool(file_stat & stat.S_IWRITE) or not os.access(fn, os.W_OK)
return False
def mkdir_p(newdir, mode=0o777):
"""Recursively creates the target directory and all of its parents if they do not
already exist. Fails silently if they do.
:param str newdir: The directory path to ensure
:raises: OSError if a file is encountered along the way
"""
# http://code.activestate.com/recipes/82465-a-friendly-mkdir/
newdir = fs_encode(newdir)
if os.path.exists(newdir):
if not os.path.isdir(newdir):
raise OSError(
"a file with the same name as the desired dir, '{0}', already exists.".format(
fs_decode(newdir)
)
)
else:
head, tail = os.path.split(newdir)
# Make sure the tail doesn't point to the asame place as the head
curdir = fs_encode(".")
tail_and_head_match = (
os.path.relpath(tail, start=os.path.basename(head)) == curdir
)
if tail and not tail_and_head_match and not os.path.isdir(newdir):
target = os.path.join(head, tail)
if os.path.exists(target) and os.path.isfile(target):
raise OSError(
"A file with the same name as the desired dir, '{0}', already exists.".format(
fs_decode(newdir)
)
)
os.makedirs(os.path.join(head, tail), mode)
def ensure_mkdir_p(mode=0o777):
"""Decorator to ensure `mkdir_p` is called to the function's return value.
"""
def decorator(f):
@functools.wraps(f)
def decorated(*args, **kwargs):
path = f(*args, **kwargs)
mkdir_p(path, mode=mode)
return path
return decorated
return decorator
TRACKED_TEMPORARY_DIRECTORIES = []
def create_tracked_tempdir(*args, **kwargs):
"""Create a tracked temporary directory.
This uses `TemporaryDirectory`, but does not remove the directory when
the return value goes out of scope, instead registers a handler to cleanup
on program exit.
The return value is the path to the created directory.
"""
tempdir = TemporaryDirectory(*args, **kwargs)
TRACKED_TEMPORARY_DIRECTORIES.append(tempdir)
atexit.register(tempdir.cleanup)
warnings.simplefilter("ignore", ResourceWarning)
return tempdir.name
def create_tracked_tempfile(*args, **kwargs):
"""Create a tracked temporary file.
This uses the `NamedTemporaryFile` construct, but does not remove the file
until the interpreter exits.
The return value is the file object.
"""
kwargs["wrapper_class_override"] = _TrackedTempfileWrapper
return _NamedTemporaryFile(*args, **kwargs)
def set_write_bit(fn):
# type: (str) -> None
"""
Set read-write permissions for the current user on the target path. Fail silently
if the path doesn't exist.
:param str fn: The target filename or path
:return: None
"""
fn = fs_encode(fn)
if not os.path.exists(fn):
return
file_stat = os.stat(fn).st_mode
os.chmod(fn, file_stat | stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
if not os.path.isdir(fn):
for path in [fn, os.path.dirname(fn)]:
try:
os.chflags(path, 0)
except AttributeError:
pass
return None
for root, dirs, files in os.walk(fn, topdown=False):
for dir_ in [os.path.join(root, d) for d in dirs]:
set_write_bit(dir_)
for file_ in [os.path.join(root, f) for f in files]:
set_write_bit(file_)
def rmtree(directory, ignore_errors=False, onerror=None):
# type: (str, bool, Optional[Callable]) -> None
"""
Stand-in for :func:`~shutil.rmtree` with additional error-handling.
This version of `rmtree` handles read-only paths, especially in the case of index
files written by certain source control systems.
:param str directory: The target directory to remove
:param bool ignore_errors: Whether to ignore errors, defaults to False
:param func onerror: An error handling function, defaults to :func:`handle_remove_readonly`
.. note::
Setting `ignore_errors=True` may cause this to silently fail to delete the path
"""
directory = fs_encode(directory)
if onerror is None:
onerror = handle_remove_readonly
try:
shutil.rmtree(directory, ignore_errors=ignore_errors, onerror=onerror)
except (IOError, OSError, FileNotFoundError, PermissionError) as exc:
# Ignore removal failures where the file doesn't exist
if exc.errno != errno.ENOENT:
raise
def _wait_for_files(path):
"""
Retry with backoff up to 1 second to delete files from a directory.
:param str path: The path to crawl to delete files from
:return: A list of remaining paths or None
:rtype: Optional[List[str]]
"""
timeout = 0.001
remaining = []
while timeout < 1.0:
remaining = []
if os.path.isdir(path):
L = os.listdir(path)
for target in L:
_remaining = _wait_for_files(target)
if _remaining:
remaining.extend(_remaining)
continue
try:
os.unlink(path)
except FileNotFoundError as e:
if e.errno == errno.ENOENT:
return
except (OSError, IOError, PermissionError):
time.sleep(timeout)
timeout *= 2
remaining.append(path)
else:
return
return remaining
def handle_remove_readonly(func, path, exc):
"""Error handler for shutil.rmtree.
Windows source repo folders are read-only by default, so this error handler
attempts to set them as writeable and then proceed with deletion.
:param function func: The caller function
:param str path: The target path for removal
:param Exception exc: The raised exception
This function will call check :func:`is_readonly_path` before attempting to call
:func:`set_write_bit` on the target path and try again.
"""
# Check for read-only attribute
from .compat import ResourceWarning, FileNotFoundError, PermissionError
PERM_ERRORS = (errno.EACCES, errno.EPERM, errno.ENOENT)
default_warning_message = "Unable to remove file due to permissions restriction: {!r}"
# split the initial exception out into its type, exception, and traceback
exc_type, exc_exception, exc_tb = exc
if is_readonly_path(path):
# Apply write permission and call original function
set_write_bit(path)
try:
func(path)
except (OSError, IOError, FileNotFoundError, PermissionError) as e:
if e.errno in PERM_ERRORS:
if e.errno == errno.ENOENT:
return
remaining = None
if os.path.isdir(path):
remaining = _wait_for_files(path)
if remaining:
warnings.warn(default_warning_message.format(path), ResourceWarning)
else:
func(path, ignore_errors=True)
return
if exc_exception.errno in PERM_ERRORS:
set_write_bit(path)
remaining = _wait_for_files(path)
try:
func(path)
except (OSError, IOError, FileNotFoundError, PermissionError) as e:
if e.errno in PERM_ERRORS:
if e.errno != errno.ENOENT: # File still exists
warnings.warn(default_warning_message.format(path), ResourceWarning)
return
else:
raise exc_exception
def walk_up(bottom):
"""Mimic os.walk, but walk 'up' instead of down the directory tree.
From: https://gist.github.com/zdavkeos/1098474
"""
bottom = os.path.realpath(bottom)
# Get files in current dir.
try:
names = os.listdir(bottom)
except Exception:
return
dirs, nondirs = [], []
for name in names:
if os.path.isdir(os.path.join(bottom, name)):
dirs.append(name)
else:
nondirs.append(name)
yield bottom, dirs, nondirs
new_path = os.path.realpath(os.path.join(bottom, ".."))
# See if we are at the top.
if new_path == bottom:
return
for x in walk_up(new_path):
yield x
def check_for_unc_path(path):
""" Checks to see if a pathlib `Path` object is a unc path or not"""
if (
os.name == "nt"
and len(path.drive) > 2
and not path.drive[0].isalpha()
and path.drive[1] != ":"
):
return True
else:
return False
def get_converted_relative_path(path, relative_to=None):
"""Convert `path` to be relative.
Given a vague relative path, return the path relative to the given
location.
:param str path: The location of a target path
:param str relative_to: The starting path to build against, optional
:returns: A relative posix-style path with a leading `./`
This performs additional conversion to ensure the result is of POSIX form,
and starts with `./`, or is precisely `.`.
>>> os.chdir('/home/user/code/myrepo/myfolder')
>>> vistir.path.get_converted_relative_path('/home/user/code/file.zip')
'./../../file.zip'
>>> vistir.path.get_converted_relative_path('/home/user/code/myrepo/myfolder/mysubfolder')
'./mysubfolder'
>>> vistir.path.get_converted_relative_path('/home/user/code/myrepo/myfolder')
'.'
"""
from .misc import to_text, to_bytes # noqa
if not relative_to:
relative_to = os.getcwdu() if six.PY2 else os.getcwd()
if six.PY2:
path = to_bytes(path, encoding="utf-8")
else:
path = to_text(path, encoding="utf-8")
relative_to = to_text(relative_to, encoding="utf-8")
start_path = Path(relative_to)
try:
start = start_path.resolve()
except OSError:
start = start_path.absolute()
# check if there is a drive letter or mount point
# if it is a mountpoint use the original absolute path
# instead of the unc path
if check_for_unc_path(start):
start = start_path.absolute()
path = start.joinpath(path).relative_to(start)
# check and see if the path that was passed into the function is a UNC path
# and raise value error if it is not.
if check_for_unc_path(path):
raise ValueError("The path argument does not currently accept UNC paths")
relpath_s = to_text(posixpath.normpath(path.as_posix()))
if not (relpath_s == "." or relpath_s.startswith("./")):
relpath_s = posixpath.join(".", relpath_s)
return relpath_s
def safe_expandvars(value):
"""Call os.path.expandvars if value is a string, otherwise do nothing.
"""
if isinstance(value, six.string_types):
return os.path.expandvars(value)
return value
class _TrackedTempfileWrapper(_TemporaryFileWrapper, object):
def __init__(self, *args, **kwargs):
super(_TrackedTempfileWrapper, self).__init__(*args, **kwargs)
self._finalizer = finalize(self, self.cleanup)
@classmethod
def _cleanup(cls, fileobj):
try:
fileobj.close()
finally:
os.unlink(fileobj.name)
def cleanup(self):
if self._finalizer.detach():
try:
self.close()
finally:
os.unlink(self.name)
else:
try:
self.close()
except OSError:
pass
| 30.276817 | 98 | 0.639257 |
1c69aa2d2a726eaec728bde7db86f00b75eef5c2 | 10,427 | py | Python | setup.py | Lechatelia/own_mmdet | eac5db1d1bee8eafe0ed46fa4bb61ca8605b502f | [
"Apache-2.0"
] | null | null | null | setup.py | Lechatelia/own_mmdet | eac5db1d1bee8eafe0ed46fa4bb61ca8605b502f | [
"Apache-2.0"
] | null | null | null | setup.py | Lechatelia/own_mmdet | eac5db1d1bee8eafe0ed46fa4bb61ca8605b502f | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
import os
import subprocess
import time
from setuptools import find_packages, setup
import torch
from torch.utils.cpp_extension import BuildExtension, CUDAExtension
def readme():
with open('README.md', encoding='utf-8') as f:
content = f.read()
return content
MAJOR = 1
MINOR = 1
PATCH = 0
SUFFIX = ''
if PATCH != '':
SHORT_VERSION = '{}.{}.{}{}'.format(MAJOR, MINOR, PATCH, SUFFIX)
else:
SHORT_VERSION = '{}.{}{}'.format(MAJOR, MINOR, SUFFIX)
version_file = 'mmdet/version.py'
def get_git_hash():
def _minimal_ext_cmd(cmd):
# construct minimal environment
env = {}
for k in ['SYSTEMROOT', 'PATH', 'HOME']:
v = os.environ.get(k)
if v is not None:
env[k] = v
# LANGUAGE is used on win32
env['LANGUAGE'] = 'C'
env['LANG'] = 'C'
env['LC_ALL'] = 'C'
out = subprocess.Popen(
cmd, stdout=subprocess.PIPE, env=env).communicate()[0]
return out
try:
out = _minimal_ext_cmd(['git', 'rev-parse', 'HEAD'])
sha = out.strip().decode('ascii')
except OSError:
sha = 'unknown'
return sha
def get_hash():
if os.path.exists('.git'):
sha = get_git_hash()[:7]
elif os.path.exists(version_file):
try:
from mmdet.version import __version__
sha = __version__.split('+')[-1]
except ImportError:
raise ImportError('Unable to get git version')
else:
sha = 'unknown'
return sha
def write_version_py():
content = """# GENERATED VERSION FILE
# TIME: {}
__version__ = '{}'
short_version = '{}'
"""
sha = get_hash()
VERSION = SHORT_VERSION + '+' + sha
with open(version_file, 'w') as f:
f.write(content.format(time.asctime(), VERSION, SHORT_VERSION))
def get_version():
with open(version_file, 'r') as f:
exec(compile(f.read(), version_file, 'exec'))
return locals()['__version__']
def make_cuda_ext(name, module, sources):
define_macros = []
if torch.cuda.is_available() or os.getenv('FORCE_CUDA', '0') == '1':
define_macros += [('WITH_CUDA', None)]
else:
raise EnvironmentError('CUDA is required to compile MMDetection!')
return CUDAExtension(
name='{}.{}'.format(module, name),
sources=[os.path.join(*module.split('.'), p) for p in sources],
define_macros=define_macros,
extra_compile_args={
'cxx': [],
'nvcc': [
'-D__CUDA_NO_HALF_OPERATORS__',
'-D__CUDA_NO_HALF_CONVERSIONS__',
'-D__CUDA_NO_HALF2_OPERATORS__',
]
})
def parse_requirements(fname='requirements.txt', with_version=True):
"""
Parse the package dependencies listed in a requirements file but strips
specific versioning information.
Args:
fname (str): path to requirements file
with_version (bool, default=False): if True include version specs
Returns:
List[str]: list of requirements items
CommandLine:
python -c "import setup; print(setup.parse_requirements())"
"""
import sys
from os.path import exists
import re
require_fpath = fname
def parse_line(line):
"""
Parse information from a line in a requirements text file
"""
if line.startswith('-r '):
# Allow specifying requirements in other files
target = line.split(' ')[1]
for info in parse_require_file(target):
yield info
else:
info = {'line': line}
if line.startswith('-e '):
info['package'] = line.split('#egg=')[1]
else:
# Remove versioning from the package
pat = '(' + '|'.join(['>=', '==', '>']) + ')'
parts = re.split(pat, line, maxsplit=1)
parts = [p.strip() for p in parts]
info['package'] = parts[0]
if len(parts) > 1:
op, rest = parts[1:]
if ';' in rest:
# Handle platform specific dependencies
# http://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-platform-specific-dependencies
version, platform_deps = map(str.strip,
rest.split(';'))
info['platform_deps'] = platform_deps
else:
version = rest # NOQA
info['version'] = (op, version)
yield info
def parse_require_file(fpath):
with open(fpath, 'r') as f:
for line in f.readlines():
line = line.strip()
if line and not line.startswith('#'):
for info in parse_line(line):
yield info
def gen_packages_items():
if exists(require_fpath):
for info in parse_require_file(require_fpath):
parts = [info['package']]
if with_version and 'version' in info:
parts.extend(info['version'])
if not sys.version.startswith('3.4'):
# apparently package_deps are broken in 3.4
platform_deps = info.get('platform_deps')
if platform_deps is not None:
parts.append(';' + platform_deps)
item = ''.join(parts)
yield item
packages = list(gen_packages_items())
return packages
if __name__ == '__main__':
write_version_py()
setup(
name='mmdet',
version=get_version(),
description='Open MMLab Detection Toolbox and Benchmark',
long_description=readme(),
author='OpenMMLab',
author_email='chenkaidev@gmail.com',
keywords='computer vision, object detection',
url='https://github.com/open-mmlab/mmdetection',
packages=find_packages(exclude=('configs', 'tools', 'demo')),
package_data={'mmdet.ops': ['*/*.so']},
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
license='Apache License 2.0',
setup_requires=parse_requirements('requirements/build.txt'),
tests_require=parse_requirements('requirements/tests.txt'),
install_requires=parse_requirements('requirements/runtime.txt'),
extras_require={
'all': parse_requirements('requirements.txt'),
'tests': parse_requirements('requirements/tests.txt'),
'build': parse_requirements('requirements/build.txt'),
'optional': parse_requirements('requirements/optional.txt'),
},
ext_modules=[
make_cuda_ext(
name='compiling_info',
module='mmdet.ops.utils',
sources=['src/compiling_info.cpp']),
make_cuda_ext(
name='nms_cpu',
module='mmdet.ops.nms',
sources=['src/nms_cpu.cpp']),
make_cuda_ext(
name='nms_cuda',
module='mmdet.ops.nms',
sources=['src/nms_cuda.cpp', 'src/nms_kernel.cu']),
make_cuda_ext(
name='roi_align_cuda',
module='mmdet.ops.roi_align',
sources=[
'src/roi_align_cuda.cpp',
'src/roi_align_kernel.cu',
'src/roi_align_kernel_v2.cu',
]),
make_cuda_ext(
name='roi_pool_cuda',
module='mmdet.ops.roi_pool',
sources=['src/roi_pool_cuda.cpp', 'src/roi_pool_kernel.cu']),
make_cuda_ext(
name='deform_conv_cuda',
module='mmdet.ops.dcn',
sources=[
'src/deform_conv_cuda.cpp',
'src/deform_conv_cuda_kernel.cu'
]),
make_cuda_ext(
name='deform_pool_cuda',
module='mmdet.ops.dcn',
sources=[
'src/deform_pool_cuda.cpp',
'src/deform_pool_cuda_kernel.cu'
]),
make_cuda_ext(
name='sigmoid_focal_loss_cuda',
module='mmdet.ops.sigmoid_focal_loss',
sources=[
'src/sigmoid_focal_loss.cpp',
'src/sigmoid_focal_loss_cuda.cu'
]),
make_cuda_ext(
name='masked_conv2d_cuda',
module='mmdet.ops.masked_conv',
sources=[
'src/masked_conv2d_cuda.cpp', 'src/masked_conv2d_kernel.cu'
]),
make_cuda_ext(
name='affine_grid_cuda',
module='mmdet.ops.affine_grid',
sources=['src/affine_grid_cuda.cpp']),
make_cuda_ext(
name='grid_sampler_cuda',
module='mmdet.ops.grid_sampler',
sources=[
'src/cpu/grid_sampler_cpu.cpp',
'src/cuda/grid_sampler_cuda.cu', 'src/grid_sampler.cpp'
]),
make_cuda_ext(
name='carafe_cuda',
module='mmdet.ops.carafe',
sources=['src/carafe_cuda.cpp', 'src/carafe_cuda_kernel.cu']),
make_cuda_ext(
name='carafe_naive_cuda',
module='mmdet.ops.carafe',
sources=[
'src/carafe_naive_cuda.cpp',
'src/carafe_naive_cuda_kernel.cu'
])
],
cmdclass={'build_ext': BuildExtension},
zip_safe=False)
| 34.641196 | 126 | 0.511557 |
5d9b06ebcd0acf2ac16a2d94d9e50f5b601b8e6b | 1,868 | py | Python | BAD IDEAS/LAND_OF_BAD_IDEAS.py | michaelgailling/obac | 483c532ec09e4249dc6bd4e34d8d1d0299da1c52 | [
"MIT"
] | 3 | 2018-10-03T12:02:29.000Z | 2018-10-05T02:33:39.000Z | BAD IDEAS/LAND_OF_BAD_IDEAS.py | michaelgailling/obac | 483c532ec09e4249dc6bd4e34d8d1d0299da1c52 | [
"MIT"
] | null | null | null | BAD IDEAS/LAND_OF_BAD_IDEAS.py | michaelgailling/obac | 483c532ec09e4249dc6bd4e34d8d1d0299da1c52 | [
"MIT"
] | null | null | null | import pickle
# Use a string as a way of creating global variable names
# Worst of the bad ideas so far
globals()["herp"] = 0
print(herp)
# The more forgiving abuse of the above is demonstrated below encapsulated in a class where it belongs
class BadIdeaClass1():
def __init__(self):
setattr(self, 'herp', 'derp')
print(self.herp)
def derp(self):
print("derp")
BIC1 = BadIdeaClass1()
print(BIC1.herp)
# More uses of strings as a form of aliasing functions with dictionaries
# Probably the best of the worst ideas so far
def Derp():#derp
print("adur?")#adur
def Herp():#derp
print("adur?")#adur
# dictionary -> function
hurdur = {"derp": Derp(),
"herp": Herp()}
hurdur["herp"]
hurdur["derp"]
class BadIdeaClass2():
def __init__(self):
setattr(self, "BIC", BadIdeaClass1)
BIC2 = BadIdeaClass2
picklestring = pickle.dumps(BIC1)
print(picklestring)
BIC1 = None
print(pickle.loads(picklestring).herp)
def Handle_File_Type(fname):
ext = ""
for i in fname:
if i == "." or ext:
ext += i
print(ext)
Handle_File_Type("derp.txt")
try:
print("BUGGY CODE GOES HERE")
except Exception as err:
print(err)
import timeit
#A testing function that takes a while
def costly_func():
return map(lambda x: x^2, range(10))
#If you want to test a function with parameters you need to wrap it first... for some reason
def WrapperFUN(func, *args, **kwargs):
return func(*args, **kwargs)
#Pass the function as a parameter -> Receive average time of function based on the number of runs. Defaults to a single run.
def TimeProfileFUN(func, runs=1, type="average"):
dts = 0.0
for i in range(0, runs):
dts += timeit.timeit(func)
return dts/runs
#Wrap it
#wrapped = WrapperFUN(costly_func())
#Profile it
print(TimeProfileFUN(costly_func, 10)) | 21.471264 | 124 | 0.673983 |
6c75244e786ea995804bfab4ae782fb849f7d81e | 4,116 | py | Python | examples/python/01-list-properties.py | copterspace/tiscamera | 2ce3ee845e7c9ca9d479c8e67f5b27e805eff219 | [
"Apache-2.0"
] | null | null | null | examples/python/01-list-properties.py | copterspace/tiscamera | 2ce3ee845e7c9ca9d479c8e67f5b27e805eff219 | [
"Apache-2.0"
] | null | null | null | examples/python/01-list-properties.py | copterspace/tiscamera | 2ce3ee845e7c9ca9d479c8e67f5b27e805eff219 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# Copyright 2017 The Imaging Source Europe GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This example will show you how to list available properties
#
import sys
import gi
gi.require_version("Tcam", "0.1")
gi.require_version("Gst", "1.0")
from gi.repository import Tcam, Gst
def main():
Gst.init(sys.argv) # init gstreamer
# set this to a specific camera serial if you
# do not want to use the default camera
serial = None
# we create a source element to retrieve a property list through it
camera = Gst.ElementFactory.make("tcambin")
# serial is defined, thus make the source open that device
if serial is not None:
camera.set_property("serial", serial)
property_names = camera.get_tcam_property_names()
for name in property_names:
(ret, value,
min_value, max_value,
default_value, step_size,
value_type, flags,
category, group) = camera.get_tcam_property(name)
if not ret:
print("could not receive value {}".format(name))
continue
if value_type == "integer" or value_type == "double":
print("{}({}) value: {} default: {} min: {} max: {} grouping: {} - {}".format(name,
value_type,
value, default_value,
min_value, max_value,
category, group))
elif value_type == "string":
print("{}(string) value: {} default: {} grouping: {} - {}".format(name,
value,
default_value,
category,
group))
elif value_type == "button":
print("{}(button) grouping is {} - {}".format(name,
category,
group))
elif value_type == "boolean":
print("{}(boolean) value: {} default: {} grouping: {} - {}".format(name,
value,
default_value,
category,
group))
elif value_type == "enum":
enum_entries = camera.get_tcam_menu_entries(name)
print("{}(enum) value: {} default: {} grouping {} - {}".format(name,
value,
default_value,
category,
group))
print("Entries: ")
for entry in enum_entries:
print("\t {}".format(entry))
else:
print("This should not happen.")
if __name__ == "__main__":
main()
| 42.43299 | 111 | 0.422983 |
07cf1af458bed5034f3ead9da8e3fd5cfe27cf13 | 683 | py | Python | src/infra/factories/in_memory_repository_factory.py | marcelinoavelar/github-monitor | e0f850d05d1eaa7561eb3c838ae3cc35047ab033 | [
"MIT"
] | null | null | null | src/infra/factories/in_memory_repository_factory.py | marcelinoavelar/github-monitor | e0f850d05d1eaa7561eb3c838ae3cc35047ab033 | [
"MIT"
] | 1 | 2022-03-30T20:31:18.000Z | 2022-03-30T20:31:18.000Z | src/infra/factories/in_memory_repository_factory.py | marcelinoavelar/github-monitor | e0f850d05d1eaa7561eb3c838ae3cc35047ab033 | [
"MIT"
] | null | null | null | from abc import ABC
from src.domanin.factories.repository_factory import RepositoryFactory
from src.domanin.repositories.schedule_repository import ScheduleRepository
from src.infra.repositories.in_memory.github_data_in_memory_repository import GithubDataInMemoryRepository
from src.infra.repositories.in_memory.schedule_in_memory_repository import ScheduleInMemoryRepository
class InMemoryRepositoryFactory(RepositoryFactory, ABC):
@property
def github_data_repository(self) -> GithubDataInMemoryRepository:
return GithubDataInMemoryRepository()
@property
def schedule_repository(self) -> ScheduleRepository:
return ScheduleInMemoryRepository()
| 37.944444 | 106 | 0.841874 |
63325f7e114e9b3a743fd114a7786dfd7d985715 | 1,385 | py | Python | scripts/python/LaidlawFX/studio_pref.py | LaidlawFX/LaidlawFX | ee8133dbf452b2b14a2e915878cc15c0517b4154 | [
"MIT"
] | 23 | 2019-02-21T05:20:46.000Z | 2021-06-15T06:45:22.000Z | scripts/python/LaidlawFX/studio_pref.py | LaidlawFX/LaidlawFX | ee8133dbf452b2b14a2e915878cc15c0517b4154 | [
"MIT"
] | 1 | 2019-03-16T14:25:15.000Z | 2019-03-25T03:13:30.000Z | scripts/python/LaidlawFX/studio_pref.py | LaidlawFX/LaidlawFX | ee8133dbf452b2b14a2e915878cc15c0517b4154 | [
"MIT"
] | 2 | 2019-08-04T03:46:40.000Z | 2019-08-22T05:46:59.000Z | # =============================================================================
# IMPORTS
# =============================================================================
import hou
# =============================================================================
# FUNCTIONS
# =============================================================================
# -----------------------------------------------------------------------------
# Name: oncreate(kwargs, node_type=None)
# Raises: N/A
# Returns: None
# Desc: Engine setting.
# -----------------------------------------------------------------------------
def oncreate(kwargs, node_type=None):
node = kwargs["node"]
# -----------------------------------------------------------------------------
# Desc: Set studio debug color.
# -----------------------------------------------------------------------------
node.setColor(hou.Color( (0.0, 0.6, 1.0) ) )
# -----------------------------------------------------------------------------
# Desc: Set the frange ranges to be non-variable based.
# -----------------------------------------------------------------------------
if node.parm('f1') and node.parm('f2') :
timeRange = hou.playbar.timelineRange()
node.parm('f1').deleteAllKeyframes()
node.parm('f1').set(timeRange[0])
node.parm('f2').deleteAllKeyframes()
node.parm('f2').set(timeRange[1]) | 37.432432 | 80 | 0.2787 |
182f8fdd560845e3af04944593d97a5667286060 | 38,871 | py | Python | eloquent/query/builder.py | KarthickNamakkalKrishnan/eloquent | 0638b688d5fd0c1a46b7471dd465eeb4c2f84666 | [
"MIT"
] | 47 | 2015-03-19T02:11:36.000Z | 2022-03-29T07:16:42.000Z | eloquent/query/builder.py | KarthickNamakkalKrishnan/eloquent | 0638b688d5fd0c1a46b7471dd465eeb4c2f84666 | [
"MIT"
] | 20 | 2015-03-16T02:56:51.000Z | 2015-05-24T17:51:29.000Z | eloquent/query/builder.py | sdispater/eloquent | 0638b688d5fd0c1a46b7471dd465eeb4c2f84666 | [
"MIT"
] | 4 | 2018-08-29T13:42:50.000Z | 2021-03-14T11:28:31.000Z | # -*- coding: utf-8 -*-
import re
from itertools import chain
from collections import OrderedDict
from .expression import QueryExpression
from .join_clause import JoinClause
from ..support.collection import Collection
from ..utils import basestring, Null
from ..exceptions import ArgumentError
class QueryBuilder(object):
_operators = [
'=', '<', '>', '<=', '>=', '<>', '!=',
'like', 'like binary', 'not like', 'between', 'ilike',
'&', '|', '^', '<<', '>>',
'rlike', 'regexp', 'not regexp',
'~', '~*', '!~', '!~*', 'similar to',
'not similar to',
]
def __init__(self, connection, grammar, processor):
"""
Constructor
:param connection: A Connection instance
:type connection: Connection
:param grammar: A QueryGrammar instance
:type grammar: QueryGrammar
:param processor: A QueryProcessor instance
:type processor: QueryProcessor
"""
self._grammar = grammar
self._processor = processor
self._connection = connection
self._bindings = OrderedDict()
for type in ['select', 'join', 'where', 'having', 'order']:
self._bindings[type] = []
self.aggregate_ = None
self.columns = []
self.distinct_ = False
self.from__ = ''
self.joins = []
self.wheres = []
self.groups = []
self.havings = []
self.orders = []
self.limit_ = None
self.offset_ = None
self.unions = []
self.union_limit = None
self.union_offset = None
self.union_orders = []
self.lock_ = None
self._backups = []
self._use_write_connection = False
def select(self, *columns):
"""
Set the columns to be selected
:param columns: The columns to be selected
:type columns: tuple
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
if not columns:
columns = ['*']
self.columns = columns
return self
def select_raw(self, expression, bindings=None):
"""
Add a new raw select expression to the query
:param expression: The raw expression
:type expression: str
:param bindings: The expression bindings
:type bindings: list
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
self.add_select(QueryExpression(expression))
if bindings:
self.add_binding(bindings, 'select')
return self
def select_sub(self, query, as_):
"""
Add a subselect expression to the query
:param query: A QueryBuilder instance
:type query: QueryBuilder
:param as_: The subselect alias
:type as_: str
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
if isinstance(query, QueryBuilder):
bindings = query.get_bindings()
query = query.to_sql()
elif isinstance(query, basestring):
bindings = []
else:
raise ArgumentError('Invalid subselect')
return self.select_raw('(%s) AS %s' % (query, self._grammar.wrap(as_)), bindings)
def add_select(self, *column):
"""
Add a new select column to query
:param column: The column to add
:type column: str
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
if not column:
column = []
self.columns += column
return self
def distinct(self):
"""
Force the query to return only distinct result
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
self.distinct_ = True
return self
def from_(self, table):
"""
Set the query target table
:param table: The table to target
:type table: str
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
self.from__ = table
return self
def join(self, table, one=None,
operator=None, two=None, type='inner', where=False):
"""
Add a join clause to the query
:param table: The table to join with, can also be a JoinClause instance
:type table: str or JoinClause
:param one: The first column of the join condition
:type one: str
:param operator: The operator of the join condition
:type operator: str
:param two: The second column of the join condition
:type two: str
:param type: The join type
:type type: str
:param where: Whether to use a "where" rather than a "on"
:type where: bool
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
if isinstance(table, JoinClause):
self.joins.append(table)
else:
if one is None:
raise ArgumentError('Missing "one" argument')
join = JoinClause(table, type)
self.joins.append(join.on(
one, operator, two, 'and', where
))
return self
def join_where(self, table, one, operator, two, type='inner'):
"""
Add a "join where" clause to the query
:param table: The table to join with, can also be a JoinClause instance
:type table: str or JoinClause
:param one: The first column of the join condition
:type one: str
:param operator: The operator of the join condition
:type operator: str
:param two: The second column of the join condition
:type two: str
:param type: The join type
:type type: str
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
return self.join(table, one, operator, two, type, True)
def left_join(self, table, one=None, operator=None, two=None):
"""
Add a left join to the query
:param table: The table to join with, can also be a JoinClause instance
:type table: str or JoinClause
:param one: The first column of the join condition
:type one: str
:param operator: The operator of the join condition
:type operator: str
:param two: The second column of the join condition
:type two: str
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
if isinstance(table, JoinClause):
table.type = 'left'
return self.join(table, one, operator, two, 'left')
def left_join_where(self, table, one, operator, two):
"""
Add a "left join where" clause to the query
:param table: The table to join with, can also be a JoinClause instance
:type table: str or JoinClause
:param one: The first column of the join condition
:type one: str
:param operator: The operator of the join condition
:type operator: str
:param two: The second column of the join condition
:type two: str
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
return self.join_where(table, one, operator, two, 'left')
def right_join(self, table, one=None, operator=None, two=None):
"""
Add a right join to the query
:param table: The table to join with, can also be a JoinClause instance
:type table: str or JoinClause
:param one: The first column of the join condition
:type one: str
:param operator: The operator of the join condition
:type operator: str
:param two: The second column of the join condition
:type two: str
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
if isinstance(table, JoinClause):
table.type = 'right'
return self.join(table, one, operator, two, 'right')
def right_join_where(self, table, one, operator, two):
"""
Add a "right join where" clause to the query
:param table: The table to join with, can also be a JoinClause instance
:type table: str or JoinClause
:param one: The first column of the join condition
:type one: str
:param operator: The operator of the join condition
:type operator: str
:param two: The second column of the join condition
:type two: str
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
return self.join_where(table, one, operator, two, 'right')
def where(self, column, operator=Null(), value=None, boolean='and'):
"""
Add a where clause to the query
:param column: The column of the where clause, can also be a QueryBuilder instance for sub where
:type column: str or QueryBuilder
:param operator: The operator of the where clause
:type operator: str
:param value: The value of the where clause
:type value: mixed
:param boolean: The boolean of the where clause
:type boolean: str
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
# If the column is an array, we will assume it is an array of key-value pairs
# and can add them each as a where clause. We will maintain the boolean we
# received when the method was called and pass it into the nested where.
if isinstance(column, dict):
nested = self.new_query()
for key, value in column.items():
nested.where(key, '=', value)
return self.where_nested(nested, boolean)
if isinstance(column, QueryBuilder):
return self.where_nested(column, boolean)
if value is None:
if not isinstance(operator, Null):
value = operator
operator = '='
else:
raise ArgumentError('Value must be provided')
if operator not in self._operators:
value = operator
operator = '='
if isinstance(value, QueryBuilder):
return self._where_sub(column, operator, value, boolean)
if value is None:
return self.where_null(column, boolean, operator != '=')
type = 'basic'
self.wheres.append({
'type': type,
'column': column,
'operator': operator,
'value': value,
'boolean': boolean
})
if not isinstance(value, QueryExpression):
self.add_binding(value, 'where')
return self
def or_where(self, column, operator=None, value=None):
return self.where(column, operator, value, 'or')
def _invalid_operator_and_value(self, operator, value):
is_operator = operator in self._operators
return is_operator and operator != '=' and value is None
def where_raw(self, sql, bindings=None, boolean='and'):
type = 'raw'
self.wheres.append({
'type': type,
'sql': sql,
'boolean': boolean
})
self.add_binding(bindings, 'where')
return self
def or_where_raw(self, sql, bindings=None):
return self.where_raw(sql, bindings, 'or')
def where_between(self, column, values, boolean='and', negate=False):
type = 'between'
self.wheres.append({
'column': column,
'type': type,
'boolean': boolean,
'not': negate
})
self.add_binding(values, 'where')
return self
def or_where_between(self, column, values):
return self.where_between(column, values, 'or')
def where_not_between(self, column, values, boolean='and'):
return self.where_between(column, values, boolean, True)
def or_where_not_between(self, column, values):
return self.where_not_between(column, values, 'or')
def where_nested(self, query, boolean='and'):
query.from_(self.from__)
return self.add_nested_where_query(query, boolean)
def add_nested_where_query(self, query, boolean='and'):
if len(query.wheres):
type = 'nested'
self.wheres.append({
'type': type,
'query': query,
'boolean': boolean
})
self.merge_bindings(query)
return self
def _where_sub(self, column, operator, query, boolean):
type = 'sub'
self.wheres.append({
'type': type,
'column': column,
'operator': operator,
'query': query,
'boolean': boolean
})
self.merge_bindings(query)
return self
def where_exists(self, query, boolean='and', negate=False):
if negate:
type = 'not_exists'
else:
type = 'exists'
self.wheres.append({
'type': type,
'query': query,
'boolean': boolean
})
self.merge_bindings(query)
return self
def or_where_exists(self, callback, negate=False):
return self.where_exists(callback, 'or', negate)
def where_not_exists(self, callback, boolean='and'):
return self.where_exists(callback, boolean, True)
def or_where_not_exists(self, callback):
self.or_where_exists(callback, True)
def where_in(self, column, values, boolean='and', negate=False):
if negate:
type = 'not_in'
else:
type = 'in'
if isinstance(values, QueryBuilder):
return self._where_in_sub(column, values, boolean, negate)
self.wheres.append({
'type': type,
'column': column,
'values': values,
'boolean': boolean
})
self.add_binding(values, 'where')
return self
def or_where_in(self, column, values):
return self.where_in(column, values, 'or')
def where_not_in(self, column, values, boolean='and'):
return self.where_in(column, values, boolean, True)
def or_where_not_in(self, column, values):
return self.where_not_in(column, values, 'or')
def _where_in_sub(self, column, query, boolean, negate=False):
"""
Add a where in with a sub select to the query
:param column: The column
:type column: str
:param query: A QueryBuilder instance
:type query: QueryBuilder
:param boolean: The boolean operator
:type boolean: str
:param negate: Whether it is a not where in
:param negate: bool
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
if negate:
type = 'not_in_sub'
else:
type = 'in_sub'
self.wheres.append({
'type': type,
'column': column,
'query': query,
'boolean': boolean
})
self.merge_bindings(query)
return self
def where_null(self, column, boolean='and', negate=False):
if negate:
type = 'not_null'
else:
type = 'null'
self.wheres.append({
'type': type,
'column': column,
'boolean': boolean
})
return self
def or_where_null(self, column):
return self.where_null(column, 'or')
def where_not_null(self, column, boolean='and'):
return self.where_null(column, boolean, True)
def or_where_not_null(self, column):
return self.where_not_null(column, 'or')
def where_date(self, column, operator, value, boolean='and'):
return self._add_date_based_where('date', column, operator, value, boolean)
def where_day(self, column, operator, value, boolean='and'):
return self._add_date_based_where('day', column, operator, value, boolean)
def where_month(self, column, operator, value, boolean='and'):
return self._add_date_based_where('month', column, operator, value, boolean)
def where_year(self, column, operator, value, boolean='and'):
return self._add_date_based_where('year', column, operator, value, boolean)
def _add_date_based_where(self, type, column, operator, value, boolean='and'):
self.wheres.append({
'type': type,
'column': column,
'boolean': boolean,
'operator': operator,
'value': value
})
self.add_binding(value, 'where')
def dynamic_where(self, method):
finder = method[6:]
def dynamic_where(*parameters):
segments = re.split('_(and|or)_(?=[a-z])', finder, 0, re.I)
connector = 'and'
index = 0
for segment in segments:
if segment.lower() != 'and' and segment.lower() != 'or':
self._add_dynamic(segment, connector, parameters, index)
index += 1
else:
connector = segment
return self
return dynamic_where
def _add_dynamic(self, segment, connector, parameters, index):
self.where(segment, '=', parameters[index], connector)
def group_by(self, *columns):
"""
Add a "group by" clause to the query
:param columns: The columns to group by
:type columns: tuple
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
for column in columns:
self.groups.append(column)
return self
def having(self, column, operator=None, value=None, boolean='and'):
"""
Add a "having" clause to the query
:param column: The column
:type column: str
:param operator: The having clause operator
:type operator: str
:param value: The having clause value
:type value: mixed
:param boolean: Boolean joiner type
:type boolean: str
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
type = 'basic'
self.havings.append({
'type': type,
'column': column,
'operator': operator,
'value': value,
'boolean': boolean
})
if not isinstance(value, QueryExpression):
self.add_binding(value, 'having')
return self
def or_having(self, column, operator=None, value=None):
"""
Add a "having" clause to the query
:param column: The column
:type column: str
:param operator: The having clause operator
:type operator: str
:param value: The having clause value
:type value: mixed
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
return self.having(column, operator, value, 'or')
def having_raw(self, sql, bindings=None, boolean='and'):
"""
Add a raw having clause to the query
:param sql: The raw query
:type sql: str
:param bindings: The query bindings
:type bindings: list
:param boolean: Boolean joiner type
:type boolean: str
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
type = 'raw'
self.havings.append({
'type': type,
'sql': sql,
'boolean': boolean
})
self.add_binding(bindings, 'having')
return self
def or_having_raw(self, sql, bindings=None):
"""
Add a raw having clause to the query
:param sql: The raw query
:type sql: str
:param bindings: The query bindings
:type bindings: list
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
return self.having_raw(sql, bindings, 'or')
def order_by(self, column, direction='asc'):
"""
Add a "order by" clause to the query
:param column: The order by column
:type column: str
:param direction: The direction of the order
:type direction: str
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
if self.unions:
prop = 'union_orders'
else:
prop = 'orders'
if direction.lower() == 'asc':
direction = 'asc'
else:
direction = 'desc'
getattr(self, prop).append({
'column': column,
'direction': direction
})
return self
def latest(self, column='created_at'):
"""
Add an "order by" clause for a timestamp to the query
in descending order
:param column: The order by column
:type column: str
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
return self.order_by(column, 'desc')
def oldest(self, column='created_at'):
"""
Add an "order by" clause for a timestamp to the query
in ascending order
:param column: The order by column
:type column: str
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
return self.order_by(column, 'asc')
def order_by_raw(self, sql, bindings=None):
"""
Add a raw "order by" clause to the query
:param sql: The raw clause
:type sql: str
:param bindings: The bdings
:param bindings: list
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
if bindings is None:
bindings = []
type = 'raw'
self.orders.append({
'type': type,
'sql': sql
})
self.add_binding(bindings, 'order')
return self
def offset(self, value):
if self.unions:
prop = 'union_offset'
else:
prop = 'offset_'
setattr(self, prop, max(0, value))
return self
def skip(self, value):
return self.offset(value)
def limit(self, value):
if self.unions:
prop = 'union_limit'
else:
prop = 'limit_'
if value is None or value > 0:
setattr(self, prop, value)
return self
def take(self, value):
return self.limit(value)
def for_page(self, page, per_page=15):
return self.skip((page - 1) * per_page).take(per_page)
def union(self, query, all=False):
"""
Add a union statement to the query
:param query: A QueryBuilder instance
:type query: QueryBuilder
:param all: Whether it is a "union all" statement
:type all: bool
:return: The query
:rtype: QueryBuilder
"""
self.unions.append({
'query': query,
'all': all
})
return self.merge_bindings(query)
def union_all(self, query):
"""
Add a union all statement to the query
:param query: A QueryBuilder instance
:type query: QueryBuilder
:return: The query
:rtype: QueryBuilder
"""
return self.union(query, True)
def lock(self, value=True):
"""
Lock the selected rows in the table
:param value: Whether it is a lock for update or a shared lock
:type value: bool
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
self.lock_ = value
return self
def lock_for_update(self):
"""
Lock the selected rows in the table for updating.
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
return self.lock(True)
def shared_lock(self):
"""
Share lock the selected rows in the table.
:return: The current QueryBuilder instance
:rtype: QueryBuilder
"""
return self.lock(False)
def to_sql(self):
"""
Get the SQL representation of the query
:return: The SQL representation of the query
:rtype: str
"""
return self._grammar.compile_select(self)
def find(self, id, columns=None):
"""
Execute a query for a single record by id
:param id: The id of the record to retrieve
:type id: mixed
:param columns: The columns of the record to retrive
:type columns: list
:return: mixed
:rtype: mixed
"""
if not columns:
columns = ['*']
return self.where('id', '=', id).first(1, columns)
def pluck(self, column):
"""
Pluck a single column's value from the first results of a query
:param column: The column to pluck the value from
:type column: str
:return: The value of column
:rtype: mixed
"""
result = self.first(1, [column])
if result:
return result[column]
return
def first(self, limit=1, columns=None):
"""
Execute the query and get the first results
:param limit: The number of results to get
:type limit: int
:param columns: The columns to get
:type columns: list
:return: The result
:rtype: mixed
"""
if not columns:
columns = ['*']
results = self.take(limit).get(columns)
if len(results) > 0:
return results[0]
return
def get(self, columns=None):
"""
Execute the query as a "select" statement
:param columns: The columns to get
:type columns: list
:return: The result
:rtype: list
"""
if not columns:
columns = ['*']
return self.get_fresh(columns)
def get_fresh(self, columns=None):
"""
Execute the query as a fresh "select" statement
:param columns: The columns to get
:type columns: list
:return: The result
:rtype: list
"""
if not columns:
columns = ['*']
if not self.columns:
self.columns = columns
return self._processor.process_select(self, self._run_select())
def _run_select(self):
"""
Run the query as a "select" statement against the connection.
:return: The result
:rtype: list
"""
return self._connection.select(
self.to_sql(),
self.get_bindings(),
not self._use_write_connection
)
def chunk(self, count):
"""
Chunk the results of the query
:param count: The chunk size
:type count: int
:return: The current chunk
:rtype: list
"""
page = 1
results = self.for_page(page, count).get()
while len(results) > 0:
yield results
page += 1
results = self.for_page(page, count).get()
def lists(self, column, key=None):
"""
Get a list with the values of a given column
:param column: The column to get the values for
:type column: str
:param key: The key
:type key: str
:return: The list of values
:rtype: list or dict
"""
columns = self._get_list_select(column, key)
if key is not None:
results = {}
for result in self.get(columns):
results[result[key]] = result[column]
else:
results = list(map(lambda x: x[column], self.get(columns)))
return results
def _get_list_select(self, column, key=None):
"""
Get the columns that should be used in a list
:param column: The column to get the values for
:type column: str
:param key: The key
:type key: str
:return: The list of values
:rtype: list
"""
if key is None:
elements = [column]
else:
elements = [column, key]
select = []
for elem in elements:
dot = elem.find('.')
if dot >= 0:
select.append(column[dot + 1:])
else:
select.append(elem)
return select
def implode(self, column, glue=''):
"""
Concatenate values of a given column as a string.
:param column: The column to glue the values for
:type column: str
:param glue: The glue string
:type glue: str
:return: The glued value
:rtype: str
"""
return glue.join(self.lists(column))
def exists(self):
"""
Determine if any rows exist for the current query.
:return: Whether the rows exist or not
:rtype: bool
"""
limit = self.limit_
result = self.limit(1).count() > 0
self.limit(limit)
return result
def count(self, *columns):
"""
Retrieve the "count" result of the query
:param columns: The columns to get
:type columns: tuple
:return: The count
:rtype: int
"""
if not columns:
columns = ['*']
return int(self.aggregate('count', *columns))
def min(self, column):
"""
Retrieve the "min" result of the query
:param column: The column to get the minimun for
:type column: tuple
:return: The min
:rtype: int
"""
return self.aggregate('min', *[column])
def max(self, column):
"""
Retrieve the "max" result of the query
:param column: The column to get the maximum for
:type column: tuple
:return: The max
:rtype: int
"""
if not column:
columns = ['*']
return self.aggregate('max', *[column])
def sum(self, column):
"""
Retrieve the "sum" result of the query
:param column: The column to get the sum for
:type column: tuple
:return: The sum
:rtype: int
"""
return self.aggregate('sum', *[column])
def avg(self, column):
"""
Retrieve the "avg" result of the query
:param column: The column to get the average for
:type column: tuple
:return: The count
:rtype: int
"""
return self.aggregate('avg', *[column])
def aggregate(self, func, *columns):
"""
Execute an aggregate function against the database
:param func: The aggregate function
:type func: str
:param columns: The columns to execute the fnction for
:type columns: tuple
:return: The aggregate result
:rtype: mixed
"""
if not columns:
columns = ['*']
self.aggregate_ = {
'function': func,
'columns': columns
}
previous_columns = self.columns
results = self.get(*columns)
self.aggregate_ = None
self.columns = previous_columns
if len(results) > 0:
return dict((k.lower(), v) for k, v in results[0].items())['aggregate']
def insert(self, _values=None, **values):
"""
Insert a new record into the database
:param _values: The new record values
:type _values: dict or list
:param values: The new record values as keyword arguments
:type values: dict
:return: The result
:rtype: bool
"""
if not values and not _values:
return True
if not isinstance(_values, list):
if _values is not None:
values.update(_values)
values = [values]
else:
values = _values
for i, value in enumerate(values):
values[i] = OrderedDict(sorted(value.items()))
bindings = []
for record in values:
for value in record.values():
bindings.append(value)
sql = self._grammar.compile_insert(self, values)
bindings = self._clean_bindings(bindings)
return self._connection.insert(sql, bindings)
def insert_get_id(self, values, sequence=None):
"""
Insert a new record and get the value of the primary key
:param values: The new record values
:type values: dict
:param sequence: The name of the primary key
:type sequence: str
:return: The value of the primary key
:rtype: int
"""
values = OrderedDict(sorted(values.items()))
sql = self._grammar.compile_insert_get_id(self, values, sequence)
values = self._clean_bindings(values.values())
return self._processor.process_insert_get_id(self, sql, values, sequence)
def update(self, _values=None, **values):
"""
Update a record in the database
:param values: The values of the update
:type values: dict
:return: The number of records affected
:rtype: int
"""
if _values is not None:
values.update(_values)
values = OrderedDict(sorted(values.items()))
bindings = list(values.values()) + self.get_bindings()
sql = self._grammar.compile_update(self, values)
return self._connection.update(sql, self._clean_bindings(bindings))
def increment(self, column, amount=1, extras=None):
"""
Increment a column's value by a given amount
:param column: The column to increment
:type column: str
:param amount: The amount by which to increment
:type amount: int
:param extras: Extra columns
:type extras: dict
:return: The number of rows affected
:rtype: int
"""
wrapped = self._grammar.wrap(column)
if extras is None:
extras = {}
columns = {
column: self.raw('%s + %s' % (wrapped, amount))
}
columns.update(extras)
return self.update(**columns)
def decrement(self, column, amount=1, extras=None):
"""
Decrement a column's value by a given amount
:param column: The column to increment
:type column: str
:param amount: The amount by which to increment
:type amount: int
:param extras: Extra columns
:type extras: dict
:return: The number of rows affected
:rtype: int
"""
wrapped = self._grammar.wrap(column)
if extras is None:
extras = {}
columns = {
column: self.raw('%s - %s' % (wrapped, amount))
}
columns.update(extras)
return self.update(**columns)
def delete(self, id=None):
"""
Delete a record from the database
:param id: The id of the row to delete
:type id: mixed
:return: The number of rows deleted
:rtype: int
"""
if id is not None:
self.where('id', '=', id)
sql = self._grammar.compile_delete(self)
return self._connection.delete(sql, self.get_bindings())
def truncate(self):
"""
Run a truncate statement on the table
:rtype: None
"""
for sql, bindings in self._grammar.compile_truncate(self).items():
self._connection.statement(sql, bindings)
def new_query(self):
"""
Get a new instance of the query builder
:return: A new QueryBuilder instance
:rtype: QueryBuilder
"""
return QueryBuilder(self._connection, self._grammar, self._processor)
def merge_wheres(self, wheres, bindings):
"""
Merge a list of where clauses and bindings
:param wheres: A list of where clauses
:type wheres: list
:param bindings: A list of bindings
:type bindings: list
:rtype: None
"""
self.wheres = self.wheres + wheres
self._bindings['where'] = self._bindings['where'] + bindings
def _clean_bindings(self, bindings):
"""
Remove all of the expressions from bindings
:param bindings: The bindings to clean
:type bindings: list
:return: The cleaned bindings
:rtype: list
"""
return list(filter(lambda b: not isinstance(b, QueryExpression), bindings))
def raw(self, value):
"""
Create a raw database expression
:param value: The value of the raw expression
:type value: mixed
:return: A QueryExpression instance
:rtype: QueryExpression
"""
return self._connection.raw(value)
def get_bindings(self):
return list(chain(*self._bindings.values()))
def get_raw_bindings(self):
return self._bindings
def set_bindings(self, bindings, type='where'):
if type not in self._bindings:
raise ArgumentError('Invalid binding type: %s' % type)
self._bindings[type] = bindings
return self
def add_binding(self, value, type='where'):
if value is None:
return self
if type not in self._bindings:
raise ArgumentError('Invalid binding type: %s' % type)
if isinstance(value, (list, tuple)):
self._bindings[type] += value
else:
self._bindings[type].append(value)
return self
def merge_bindings(self, query):
for type in self._bindings:
self._bindings[type] += query.get_raw_bindings()[type]
return self
def get_connection(self):
"""
Get the query connection
:return: The current connection instance
:rtype: eloquent.connections.connection.Connection
"""
return self._connection
def get_processor(self):
"""
Get the builder processor
:return: The builder processor
:rtype: QueryProcessor
"""
return self._processor
def get_grammar(self):
"""
Get the builder query grammar
:return: The builder query grammar
:rtype: QueryGrammar
"""
return self._grammar
def use_write_connection(self):
self._use_write_connection = True
return self
def __getattr__(self, item):
if item.startswith('where_'):
return self.dynamic_where(item)
object.__getattribute__(self, item)
| 25.708333 | 104 | 0.563119 |
87bae26bde05507bd77650186bbcc417ec202bc1 | 749 | py | Python | st2common/st2common/exceptions/resultstracker.py | avezraj/st2 | 519c7f6819e52fb289c440bb7d1df7b558bb9ed7 | [
"Apache-2.0"
] | null | null | null | st2common/st2common/exceptions/resultstracker.py | avezraj/st2 | 519c7f6819e52fb289c440bb7d1df7b558bb9ed7 | [
"Apache-2.0"
] | null | null | null | st2common/st2common/exceptions/resultstracker.py | avezraj/st2 | 519c7f6819e52fb289c440bb7d1df7b558bb9ed7 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from st2common.exceptions import coditationBaseException
class ReferenceNotFoundError(coditationBaseException):
pass
| 35.666667 | 74 | 0.785047 |
a4d074a27c5903b41c6d6b3a1546a0736402b63d | 3,279 | py | Python | mppsolar/devices/jk24s.py | Northbadge/mpp-solar | fa1e552691669a66632ecd3fb5d18b1cdd2cebfe | [
"MIT"
] | null | null | null | mppsolar/devices/jk24s.py | Northbadge/mpp-solar | fa1e552691669a66632ecd3fb5d18b1cdd2cebfe | [
"MIT"
] | null | null | null | mppsolar/devices/jk24s.py | Northbadge/mpp-solar | fa1e552691669a66632ecd3fb5d18b1cdd2cebfe | [
"MIT"
] | null | null | null | import logging
from .device import AbstractDevice
from ..io.jkbleio import JkBleIO
from ..io.testio import TestIO
log = logging.getLogger("MPP-Solar")
class jk24s(AbstractDevice):
def __init__(self, *args, **kwargs) -> None:
super().__init__()
self._name = kwargs["name"]
self.set_port(**kwargs)
self.set_protocol(**kwargs)
log.debug(f"jk24s __init__ name {self._name}, port {self._port}, protocol {self._protocol}")
log.debug(f"jk24s __init__ args {args}")
log.debug(f"jk24s __init__ kwargs {kwargs}")
def __str__(self):
"""
Build a printable representation of this class
"""
return f"jk24s device - name: {self._name}, port: {self._port}, protocol: {self._protocol}"
def run_command(self, command, show_raw=False) -> dict:
"""
jk24s specific method of running a 'raw' command
"""
log.info(f"Running command {command}")
# TODO: implement protocol self determiniation??
if self._protocol is None:
log.error("Attempted to run command with no protocol defined")
return {"ERROR": ["Attempted to run command with no protocol defined", ""]}
if self._port is None:
log.error(f"No communications port defined - unable to run command {command}")
return {
"ERROR": [
f"No communications port defined - unable to run command {command}",
"",
]
}
if isinstance(self._port, JkBleIO): # JkBleIO is very different from the others
decoded_response = self._port.send_and_receive(command, show_raw, self._protocol)
else: # Copy-pasted from default protocol
full_command = self._protocol.get_full_command(command)
log.info(f"full command {full_command} for command {command}")
# Band-aid solution, can't really segregate TestIO from protocols w/o major rework of TestIO
if isinstance(self._port, TestIO):
raw_response = self._port.send_and_receive(full_command,
self._protocol.get_command_defn(command))
else:
raw_response = self._port.send_and_receive(full_command)
log.debug(f"Send and Receive Response {raw_response}")
# Handle errors; dict is returned on exception
if isinstance(raw_response, dict):
return raw_response
decoded_response = self._protocol.decode(raw_response, show_raw, command)
log.debug(f"Send and Receive Response {decoded_response}")
return decoded_response
def get_status(self, show_raw) -> dict:
# Run all the commands that are defined as status from the protocol definition
data = {}
for command in self._protocol.STATUS_COMMANDS:
data.update(self.run_command(command))
return data
def get_settings(self, show_raw) -> dict:
# Run all the commands that are defined as settings from the protocol definition
data = {}
for command in self._protocol.SETTINGS_COMMANDS:
data.update(self.run_command(command))
return data
| 41.506329 | 104 | 0.620006 |
de4a4132a1c16c37a0367e5d6af5cc08778ca625 | 6,613 | py | Python | Controller/DisplayEmulator/DisplayEmulator.py | colindomoney-hardware/ZeroOne | 3e02f7b7ba1957ab4c35abeba228e40a8a06e810 | [
"MIT"
] | null | null | null | Controller/DisplayEmulator/DisplayEmulator.py | colindomoney-hardware/ZeroOne | 3e02f7b7ba1957ab4c35abeba228e40a8a06e810 | [
"MIT"
] | 4 | 2021-06-08T19:58:45.000Z | 2022-03-08T21:09:20.000Z | Controller/DisplayEmulator/DisplayEmulator.py | colindomoney/ZeroOne | 3e02f7b7ba1957ab4c35abeba228e40a8a06e810 | [
"MIT"
] | null | null | null | import fnmatch, os, socket
import pickle, timer_cm, dill
import ZO
from threading import *
from tkinter import *
from PIL import ImageTk, Image
PIXEL_MULTIPLIER = 10
IMAGE_PATH = '/Users/colind/Documents/Projects/ZeroOne/ZeroOne/Graphics/Images'
def get_images():
return fnmatch.filter(os.listdir(IMAGE_PATH), '*.png')
class DisplayEmulatorApplication(Thread):
POLL_INTERVAL = 20
def __init__(self, root):
self._port = 6999
self._host = socket.gethostname()
self._client = None
self._socket = None
self._canvas = None
self._closing = False
self._root = root
self._render_image = None
self._canvasX = PIXEL_MULTIPLIER * ZO.zero_one.ZO_X_SIZE
self._canvasY = PIXEL_MULTIPLIER * ZO.zero_one.ZO_Y_SIZE
Thread.__init__(self)
# Add the top frame for the buttons
button_frame = Frame(root, bg='white', width=self._canvasX + 10, height=40)
button_frame.pack(fill='x')
exit_button = Button(button_frame, text='Exit', command=self._exit_button_click)
exit_button.pack(side='left', padx=10)
clear_button = Button(button_frame, text='Clear', command=self._clear_button_click)
clear_button.pack(side='left')
# Add the canvas with a black border and a bit of padding
self._canvas = Canvas(root, width=self._canvasX, height=self._canvasY, bg='black')
self._canvas.pack(pady=(10, 10), padx=(10, 10))
# Add the top frame for the buttons
self._status_frame = Frame(root, bg='red', width=self._canvasX + 10, height=20)
self._status_frame.pack(fill='x')
self._root.after(self.POLL_INTERVAL, self._process_messages)
# Exit button handler
def _exit_button_click(self):
print('exit_button_click()')
self._root.destroy()
# Clear button handler
def _clear_button_click(self):
print('clear_button_click()')
self._canvas.delete('all')
self._canvas.update_idletasks()
def _process_messages(self):
if self._render_image != None:
pil_img = self._render_image.resize((self._canvasX, self._canvasY), Image.BILINEAR)
self._img = ImageTk.PhotoImage(pil_img)
self._canvas.create_image(3, 3, anchor=NW, image=self._img)
self._canvas.update_idletasks()
self._render_image = None
self._root.after(self.POLL_INTERVAL, self._process_messages)
def set_connected_state(self, is_connected = False):
if is_connected == True:
self._status_frame.config(bg='green')
else:
self._status_frame.config(bg='red')
def close(self):
# This is called when the main loop wants to shut down
print('close()')
self._closing = True
# Kill the mofo - not sure if we should be using the server or the client ...
if self._client != None:
# TODO : this is still flakey
# self._client.shutdown(socket.SHUT_WR)
self._client.close()
self._server.close()
# print('done close()')
def run(self):
# This is the main listening thread
print('run()')
# Open the socket
self._server = socket.socket(
socket.AF_INET, socket.SOCK_STREAM)
# self._server.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
# Bind and listen
self._server.bind((self._host, self._port))
self._server.listen(5)
# Loop until the app is shutdown
while self._closing == False:
# Await a connection if the socket is closed or None
if self._client == None or self._client._closed == True:
try:
print('accept()')
self._client, info = self._server.accept()
print('CONNECTED !')
self.set_connected_state(True)
except Exception as ex:
print("\n>>> EXCEPTION : {} <<<\n".format(ex))
# Now try get some data
try:
# print('recv()')
data_string = self._client.recv(8192)
# print('after recv()')
if not data_string:
# If we hit this point the socket is broken and we can close it and await a new connection
print('... incomplete ... closing ...')
self._client.close()
self.set_connected_state(False)
else:
# print('len = ', len(data_string))
# print(data_string)
try:
# This is prone to shitting itself so guard it with kid gloves
emulator_command = dill.loads(data_string)
print(emulator_command.command)
self._handle_command(emulator_command)
except Exception as ex:
print("\n>>> EXCEPTION : {} <<<\n".format(ex))
except Exception as ex:
print("\n>>> EXCEPTION : {} <<<\n".format(ex))
print('exit run()')
# Quit TKinter
self._root.destroy()
def _handle_command(self, emulator_command):
print('_handle_command', emulator_command.command)
if emulator_command.command == 'DisplayAll':
self._render_image = Image.frombytes('RGB', (ZO.zero_one.ZO_X_SIZE, ZO.zero_one.ZO_Y_SIZE), emulator_command.data,
'raw')
elif emulator_command.command == 'DisplayZeroOne':
zo_image = ZO.ZO_Image()
self._render_image = zo_image.get_image_from_zero_one_data(emulator_command.data)
elif emulator_command.command == 'ClearDisplay':
self._canvas.delete('all')
self._canvas.update_idletasks()
else:
print('Unknown command')
# Handle the window close event
def close_window():
global app
print('CLOSE()')
app.close()
try:
# Create the root Tk object
root = Tk()
root.title("ZeroOne DisplayEmulator")
# Hook the close window event
root.protocol("WM_DELETE_WINDOW", close_window)
# Now build the app and start the thread in daemon mode
app = DisplayEmulatorApplication(root)
app.daemon = True
app.start()
# Now run the main TKinter lool
# print('before mainloop()')
root.mainloop()
# print('after mainloop()')
except KeyboardInterrupt as ex:
print('Forcing a quit')
pass
finally:
print('Done!')
| 32.737624 | 126 | 0.594738 |
1be716b0ff07c707a51ee95d1ddd3b2c80234984 | 16,857 | py | Python | openfold/config.py | ericmjl/openfold | 3ea45f90dbd314095d56bcf033e515555092dd15 | [
"Apache-2.0"
] | null | null | null | openfold/config.py | ericmjl/openfold | 3ea45f90dbd314095d56bcf033e515555092dd15 | [
"Apache-2.0"
] | null | null | null | openfold/config.py | ericmjl/openfold | 3ea45f90dbd314095d56bcf033e515555092dd15 | [
"Apache-2.0"
] | null | null | null | import copy
import ml_collections as mlc
def set_inf(c, inf):
for k, v in c.items():
if isinstance(v, mlc.ConfigDict):
set_inf(v, inf)
elif k == "inf":
c[k] = inf
def model_config(name, train=False, low_prec=False):
c = copy.deepcopy(config)
if name == "model_1":
pass
elif name == "model_2":
pass
elif name == "model_3":
c.model.template.enabled = False
elif name == "model_4":
c.model.template.enabled = False
elif name == "model_5":
c.model.template.enabled = False
elif name == "model_1_ptm":
c.model.heads.tm.enabled = True
c.loss.tm.weight = 0.1
elif name == "model_2_ptm":
c.model.heads.tm.enabled = True
c.loss.tm.weight = 0.1
elif name == "model_3_ptm":
c.model.template.enabled = False
c.model.heads.tm.enabled = True
c.loss.tm.weight = 0.1
elif name == "model_4_ptm":
c.model.template.enabled = False
c.model.heads.tm.enabled = True
c.loss.tm.weight = 0.1
elif name == "model_5_ptm":
c.model.template.enabled = False
c.model.heads.tm.enabled = True
c.loss.tm.weight = 0.1
else:
raise ValueError("Invalid model name")
if train:
c.globals.blocks_per_ckpt = 1
c.globals.chunk_size = None
if low_prec:
c.globals.eps = 1e-4
# If we want exact numerical parity with the original, inf can't be
# a global constant
set_inf(c, 1e4)
return c
c_z = mlc.FieldReference(128, field_type=int)
c_m = mlc.FieldReference(256, field_type=int)
c_t = mlc.FieldReference(64, field_type=int)
c_e = mlc.FieldReference(64, field_type=int)
c_s = mlc.FieldReference(384, field_type=int)
blocks_per_ckpt = mlc.FieldReference(None, field_type=int)
chunk_size = mlc.FieldReference(4, field_type=int)
aux_distogram_bins = mlc.FieldReference(64, field_type=int)
eps = mlc.FieldReference(1e-8, field_type=float)
templates_enabled = mlc.FieldReference(True, field_type=bool)
embed_template_torsion_angles = mlc.FieldReference(True, field_type=bool)
NUM_RES = "num residues placeholder"
NUM_MSA_SEQ = "msa placeholder"
NUM_EXTRA_SEQ = "extra msa placeholder"
NUM_TEMPLATES = "num templates placeholder"
config = mlc.ConfigDict(
{
"data": {
"common": {
"feat": {
"aatype": [NUM_RES],
"all_atom_mask": [NUM_RES, None],
"all_atom_positions": [NUM_RES, None, None],
"alt_chi_angles": [NUM_RES, None],
"atom14_alt_gt_exists": [NUM_RES, None],
"atom14_alt_gt_positions": [NUM_RES, None, None],
"atom14_atom_exists": [NUM_RES, None],
"atom14_atom_is_ambiguous": [NUM_RES, None],
"atom14_gt_exists": [NUM_RES, None],
"atom14_gt_positions": [NUM_RES, None, None],
"atom37_atom_exists": [NUM_RES, None],
"backbone_rigid_mask": [NUM_RES],
"backbone_rigid_tensor": [NUM_RES, None, None],
"bert_mask": [NUM_MSA_SEQ, NUM_RES],
"chi_angles_sin_cos": [NUM_RES, None, None],
"chi_mask": [NUM_RES, None],
"extra_deletion_value": [NUM_EXTRA_SEQ, NUM_RES],
"extra_has_deletion": [NUM_EXTRA_SEQ, NUM_RES],
"extra_msa": [NUM_EXTRA_SEQ, NUM_RES],
"extra_msa_mask": [NUM_EXTRA_SEQ, NUM_RES],
"extra_msa_row_mask": [NUM_EXTRA_SEQ],
"is_distillation": [],
"msa_feat": [NUM_MSA_SEQ, NUM_RES, None],
"msa_mask": [NUM_MSA_SEQ, NUM_RES],
"msa_row_mask": [NUM_MSA_SEQ],
"no_recycling_iters": [],
"pseudo_beta": [NUM_RES, None],
"pseudo_beta_mask": [NUM_RES],
"residue_index": [NUM_RES],
"residx_atom14_to_atom37": [NUM_RES, None],
"residx_atom37_to_atom14": [NUM_RES, None],
"resolution": [],
"rigidgroups_alt_gt_frames": [NUM_RES, None, None, None],
"rigidgroups_group_exists": [NUM_RES, None],
"rigidgroups_group_is_ambiguous": [NUM_RES, None],
"rigidgroups_gt_exists": [NUM_RES, None],
"rigidgroups_gt_frames": [NUM_RES, None, None, None],
"seq_length": [],
"seq_mask": [NUM_RES],
"target_feat": [NUM_RES, None],
"template_aatype": [NUM_TEMPLATES, NUM_RES],
"template_all_atom_mask": [NUM_TEMPLATES, NUM_RES, None],
"template_all_atom_positions": [
NUM_TEMPLATES, NUM_RES, None, None,
],
"template_alt_torsion_angles_sin_cos": [
NUM_TEMPLATES, NUM_RES, None, None,
],
"template_backbone_rigid_mask": [NUM_TEMPLATES, NUM_RES],
"template_backbone_rigid_tensor": [
NUM_TEMPLATES, NUM_RES, None, None,
],
"template_mask": [NUM_TEMPLATES],
"template_pseudo_beta": [NUM_TEMPLATES, NUM_RES, None],
"template_pseudo_beta_mask": [NUM_TEMPLATES, NUM_RES],
"template_sum_probs": [NUM_TEMPLATES, None],
"template_torsion_angles_mask": [
NUM_TEMPLATES, NUM_RES, None,
],
"template_torsion_angles_sin_cos": [
NUM_TEMPLATES, NUM_RES, None, None,
],
"true_msa": [NUM_MSA_SEQ, NUM_RES],
"use_clamped_fape": [],
},
"masked_msa": {
"profile_prob": 0.1,
"same_prob": 0.1,
"uniform_prob": 0.1,
},
"max_extra_msa": 1024,
"max_recycling_iters": 3,
"msa_cluster_features": True,
"reduce_msa_clusters_by_max_templates": False,
"resample_msa_in_recycling": True,
"template_features": [
"template_all_atom_positions",
"template_sum_probs",
"template_aatype",
"template_all_atom_mask",
],
"unsupervised_features": [
"aatype",
"residue_index",
"msa",
"num_alignments",
"seq_length",
"between_segment_residues",
"deletion_matrix",
"no_recycling_iters",
],
"use_templates": templates_enabled,
"use_template_torsion_angles": embed_template_torsion_angles,
},
"supervised": {
"clamp_prob": 0.9,
"supervised_features": [
"all_atom_mask",
"all_atom_positions",
"resolution",
"use_clamped_fape",
"is_distillation",
],
},
"predict": {
"fixed_size": True,
"subsample_templates": False, # We want top templates.
"masked_msa_replace_fraction": 0.15,
"max_msa_clusters": 128,
"max_template_hits": 4,
"max_templates": 4,
"crop": False,
"crop_size": None,
"supervised": False,
"subsample_recycling": False,
"uniform_recycling": False,
},
"eval": {
"fixed_size": True,
"subsample_templates": False, # We want top templates.
"masked_msa_replace_fraction": 0.15,
"max_msa_clusters": 128,
"max_template_hits": 4,
"max_templates": 4,
"crop": False,
"crop_size": None,
"supervised": True,
"subsample_recycling": False,
"uniform_recycling": False,
},
"train": {
"fixed_size": True,
"subsample_templates": True,
"masked_msa_replace_fraction": 0.15,
"max_msa_clusters": 128,
"max_template_hits": 4,
"max_templates": 4,
"shuffle_top_k_prefiltered": 20,
"crop": True,
"crop_size": 256,
"supervised": True,
"clamp_prob": 0.9,
"subsample_recycling": True,
"max_distillation_msa_clusters": 1000,
"uniform_recycling": True,
},
"data_module": {
"use_small_bfd": False,
"data_loaders": {
"batch_size": 1,
"num_workers": 8,
},
},
},
# Recurring FieldReferences that can be changed globally here
"globals": {
"blocks_per_ckpt": blocks_per_ckpt,
"chunk_size": chunk_size,
"c_z": c_z,
"c_m": c_m,
"c_t": c_t,
"c_e": c_e,
"c_s": c_s,
"eps": eps,
},
"model": {
"_mask_trans": False,
"input_embedder": {
"tf_dim": 22,
"msa_dim": 49,
"c_z": c_z,
"c_m": c_m,
"relpos_k": 32,
},
"recycling_embedder": {
"c_z": c_z,
"c_m": c_m,
"min_bin": 3.25,
"max_bin": 20.75,
"no_bins": 15,
"inf": 1e8,
},
"template": {
"distogram": {
"min_bin": 3.25,
"max_bin": 50.75,
"no_bins": 39,
},
"template_angle_embedder": {
# DISCREPANCY: c_in is supposed to be 51.
"c_in": 57,
"c_out": c_m,
},
"template_pair_embedder": {
"c_in": 88,
"c_out": c_t,
},
"template_pair_stack": {
"c_t": c_t,
# DISCREPANCY: c_hidden_tri_att here is given in the supplement
# as 64. In the code, it's 16.
"c_hidden_tri_att": 16,
"c_hidden_tri_mul": 64,
"no_blocks": 2,
"no_heads": 4,
"pair_transition_n": 2,
"dropout_rate": 0.25,
"blocks_per_ckpt": blocks_per_ckpt,
"inf": 1e9,
},
"template_pointwise_attention": {
"c_t": c_t,
"c_z": c_z,
# DISCREPANCY: c_hidden here is given in the supplement as 64.
# It's actually 16.
"c_hidden": 16,
"no_heads": 4,
"inf": 1e5, # 1e9,
},
"inf": 1e5, # 1e9,
"eps": eps, # 1e-6,
"enabled": templates_enabled,
"embed_angles": embed_template_torsion_angles,
},
"extra_msa": {
"extra_msa_embedder": {
"c_in": 25,
"c_out": c_e,
},
"extra_msa_stack": {
"c_m": c_e,
"c_z": c_z,
"c_hidden_msa_att": 8,
"c_hidden_opm": 32,
"c_hidden_mul": 128,
"c_hidden_pair_att": 32,
"no_heads_msa": 8,
"no_heads_pair": 4,
"no_blocks": 4,
"transition_n": 4,
"msa_dropout": 0.15,
"pair_dropout": 0.25,
"blocks_per_ckpt": blocks_per_ckpt,
"clear_cache_between_blocks": True,
"inf": 1e9,
"eps": eps, # 1e-10,
},
"enabled": True,
},
"evoformer_stack": {
"c_m": c_m,
"c_z": c_z,
"c_hidden_msa_att": 32,
"c_hidden_opm": 32,
"c_hidden_mul": 128,
"c_hidden_pair_att": 32,
"c_s": c_s,
"no_heads_msa": 8,
"no_heads_pair": 4,
"no_blocks": 48,
"transition_n": 4,
"msa_dropout": 0.15,
"pair_dropout": 0.25,
"blocks_per_ckpt": blocks_per_ckpt,
"clear_cache_between_blocks": False,
"inf": 1e9,
"eps": eps, # 1e-10,
},
"structure_module": {
"c_s": c_s,
"c_z": c_z,
"c_ipa": 16,
"c_resnet": 128,
"no_heads_ipa": 12,
"no_qk_points": 4,
"no_v_points": 8,
"dropout_rate": 0.1,
"no_blocks": 8,
"no_transition_layers": 1,
"no_resnet_blocks": 2,
"no_angles": 7,
"trans_scale_factor": 10,
"epsilon": eps, # 1e-12,
"inf": 1e5,
},
"heads": {
"lddt": {
"no_bins": 50,
"c_in": c_s,
"c_hidden": 128,
},
"distogram": {
"c_z": c_z,
"no_bins": aux_distogram_bins,
},
"tm": {
"c_z": c_z,
"no_bins": aux_distogram_bins,
"enabled": False,
},
"masked_msa": {
"c_m": c_m,
"c_out": 23,
},
"experimentally_resolved": {
"c_s": c_s,
"c_out": 37,
},
},
},
"relax": {
"max_iterations": 0, # no max
"tolerance": 2.39,
"stiffness": 10.0,
"max_outer_iterations": 20,
"exclude_residues": [],
},
"loss": {
"distogram": {
"min_bin": 2.3125,
"max_bin": 21.6875,
"no_bins": 64,
"eps": eps, # 1e-6,
"weight": 0.3,
},
"experimentally_resolved": {
"eps": eps, # 1e-8,
"min_resolution": 0.1,
"max_resolution": 3.0,
"weight": 0.0,
},
"fape": {
"backbone": {
"clamp_distance": 10.0,
"loss_unit_distance": 10.0,
"weight": 0.5,
},
"sidechain": {
"clamp_distance": 10.0,
"length_scale": 10.0,
"weight": 0.5,
},
"eps": 1e-4,
"weight": 1.0,
},
"lddt": {
"min_resolution": 0.1,
"max_resolution": 3.0,
"cutoff": 15.0,
"no_bins": 50,
"eps": eps, # 1e-10,
"weight": 0.01,
},
"masked_msa": {
"eps": eps, # 1e-8,
"weight": 2.0,
},
"supervised_chi": {
"chi_weight": 0.5,
"angle_norm_weight": 0.01,
"eps": eps, # 1e-6,
"weight": 1.0,
},
"violation": {
"violation_tolerance_factor": 12.0,
"clash_overlap_tolerance": 1.5,
"eps": eps, # 1e-6,
"weight": 0.0,
},
"tm": {
"max_bin": 31,
"no_bins": 64,
"min_resolution": 0.1,
"max_resolution": 3.0,
"eps": eps, # 1e-8,
"weight": 0.0,
},
"eps": eps,
},
"ema": {"decay": 0.999},
}
)
| 36.408207 | 83 | 0.422436 |
7537b597059cd49759121a5a0a524a0a9e6ef57b | 727 | py | Python | Get Retroativo/2019_03_2.py | paulowiz/AiesecBot | ac77cc5426ed6382772603afa8015208020c0fba | [
"MIT"
] | 6 | 2019-10-18T17:47:30.000Z | 2021-03-18T06:04:06.000Z | Get Retroativo/2019_03_2.py | paulowiz/AiesecBot | ac77cc5426ed6382772603afa8015208020c0fba | [
"MIT"
] | 1 | 2020-09-24T08:17:29.000Z | 2020-09-28T08:16:39.000Z | Get Retroativo/2019_03_2.py | paulowiz/AiesecBot | ac77cc5426ed6382772603afa8015208020c0fba | [
"MIT"
] | 3 | 2019-10-20T18:40:20.000Z | 2021-04-15T01:27:59.000Z | import psycopg2.extras
from controller import RobotRotine as rr
from api import graphqlconsume, querygraphql
import time
import datetime
import numpy as np
"""
current = np.datetime64(datetime.datetime.now())
currentab = np.datetime64(current) + np.timedelta64(5, 'h')
lastdate = np.datetime64(currentab) - np.timedelta64(15, 'm')
print(lastdate)
print(currentab)
print('-')
"""
robo5 = rr.RobotRotine()
i = 0
dtinit = '2019-03-15T00:00:00'
while i < 18:
print(dtinit)
dtfim = np.datetime64(dtinit) + np.timedelta64(24, 'h')
robo5.ExecutaRotina('created_at', dtinit,
dtfim, 1)
i = i+1
dtinit = np.datetime64(dtinit) + np.timedelta64(24, 'h')
print('Periodo Executado com sucesso')
| 25.964286 | 61 | 0.69326 |
c7896cdf7d7239b9719ca7d56ec0fa1d9f532e5b | 2,345 | py | Python | kokon/utils/db.py | KoalicjaOtwartyKrakow/backend | 636eb0216ef8c236daec1dfc68f3118cf494d992 | [
"MIT"
] | null | null | null | kokon/utils/db.py | KoalicjaOtwartyKrakow/backend | 636eb0216ef8c236daec1dfc68f3118cf494d992 | [
"MIT"
] | null | null | null | kokon/utils/db.py | KoalicjaOtwartyKrakow/backend | 636eb0216ef8c236daec1dfc68f3118cf494d992 | [
"MIT"
] | null | null | null | """Module containing database connection."""
from contextlib import contextmanager
import sqlalchemy
from sqlalchemy.orm import sessionmaker
from kokon import settings
class DB:
_db_connection_pool = None
@classmethod
def _get_db_connection_pool(cls):
"""Get database engine."""
if cls._db_connection_pool is None:
cls._db_connection_pool = sqlalchemy.create_engine(
# See:
# https://cloud.google.com/sql/docs/postgres/connect-functions#connect_to
sqlalchemy.engine.url.URL.create(
drivername="postgresql+pg8000",
username=settings.DB_USER, # e.g. "my-database-user"
password=settings.DB_PASS, # e.g. "my-database-password"
database=settings.DB_NAME, # e.g. "my-database-name"
query=settings.DB_QUERY,
),
# Cloud SQL imposes a maximum limit on concurrent connections, and these
# limits may vary depending on the database engine chosen (see Cloud SQL
# Quotas and Limits). It's recommended to use a connection with Cloud
# Functions, but it is important to set the maximum number of
# connections to 1.
#
# Note: Cloud Functions limits concurrent executions to one per
# instance. You never have a situation where a single function instance
# is processing two requests at the same time. In most situations, only
# a single database connection is needed.
#
# https://cloud.google.com/sql/docs/mysql/connect-functions#connection-limits
pool_size=1,
max_overflow=0,
)
print(f"Connecting to query={settings.DB_QUERY}, name={settings.DB_NAME}")
return cls._db_connection_pool
@contextmanager
def acquire(self):
"""Usage:
with DB().acquire() as session:
# use session here
"""
session = sessionmaker(bind=self._get_db_connection_pool())()
try:
yield session
session.commit()
except Exception:
session.rollback()
raise
finally:
session.close()
| 37.822581 | 93 | 0.583369 |
23d49e5363da7fb23ebfa4c602173c1479523e4f | 7,021 | py | Python | DQM/SiPixelMonitorClient/test/pixel_dqm_sourceclient-file_cfg.py | SWuchterl/cmssw | 769b4a7ef81796579af7d626da6039dfa0347b8e | [
"Apache-2.0"
] | 6 | 2017-09-08T14:12:56.000Z | 2022-03-09T23:57:01.000Z | DQM/SiPixelMonitorClient/test/pixel_dqm_sourceclient-file_cfg.py | SWuchterl/cmssw | 769b4a7ef81796579af7d626da6039dfa0347b8e | [
"Apache-2.0"
] | 545 | 2017-09-19T17:10:19.000Z | 2022-03-07T16:55:27.000Z | DQM/SiPixelMonitorClient/test/pixel_dqm_sourceclient-file_cfg.py | SWuchterl/cmssw | 769b4a7ef81796579af7d626da6039dfa0347b8e | [
"Apache-2.0"
] | 14 | 2017-10-04T09:47:21.000Z | 2019-10-23T18:04:45.000Z | import FWCore.ParameterSet.Config as cms
process = cms.Process("SIPIXELDQM")
##----## Geometry and other global parameters:
process.load("Configuration.StandardSequences.GeometryRecoDB_cff")
process.load('Configuration.StandardSequences.MagneticField_AutoFromDBCurrent_cff')
###process.load("DQM.Integration.test.FrontierCondition_GT_cfi")
##----## Reco:
process.load("Configuration.StandardSequences.Reconstruction_cff")
#process.load("Configuration.StandardSequences.RawToDigi_cff")
process.load("EventFilter.SiPixelRawToDigi.SiPixelRawToDigi_cfi")
process.siPixelDigis.InputLabel = 'source'
process.siPixelDigis.IncludeErrors = True
process.load("RecoLocalTracker.SiPixelClusterizer.SiPixelClusterizer_cfi")
#process.load("EventFilter.SiStripRawToDigi.SiStripDigis_cfi")
process.load("EventFilter.SiStripRawToDigi.SiStripRawToDigis_standard_cff")
process.siStripDigis.ProductLabel = 'source'
process.load("RecoVertex.BeamSpotProducer.BeamSpot_cff")
##----## Central DQM:
process.load("DQMServices.Core.DQM_cfg")
process.load("DQMServices.Components.DQMEnvironment_cfi")
process.DQM.collectorHost = ''
process.dqmSaver.convention = 'Online'
process.dqmSaver.producer = 'DQM'
process.dqmEnv.subSystemFolder = 'Pixel'
process.dqmSaver.dirName = '/tmp/merkelp/'
process.dqmSaver.saveByLumiSection = 1
process.dqmSaver.saveByRun = 1
process.dqmSaver.saveAtJobEnd = True
from DQMServices.Core.DQMQualityTester import DQMQualityTester
process.qTester = DQMQualityTester(
qtList = cms.untracked.FileInPath('DQM/SiPixelMonitorClient/test/sipixel_qualitytest_config.xml'),
prescaleFactor = cms.untracked.int32(1),
getQualityTestsFromFile = cms.untracked.bool(True),
testInEventloop = cms.untracked.bool(False),
qtestOnEndRun = cms.untracked.bool(True),
qtestOnEndJob = cms.untracked.bool(True),
qtestOnEndLumi = cms.untracked.bool(True),
verboseQT = cms.untracked.bool(True)
)
##----## Pixel DQM P5/OFFLINE:
#process.load("DQM.SiPixelCommon.SiPixelOfflineDQM_source_cff")
#process.load("DQM.SiPixelCommon.SiPixelOfflineDQM_client_cff")
process.load("DQM.SiPixelCommon.SiPixelP5DQM_source_cff")
process.load("DQM.SiPixelCommon.SiPixelP5DQM_client_cff")
## the following sequences are declared therein:
## siPixelOfflineDQM_source, siPixelOfflineDQM_cosmics_source, siPixelOfflineDQM_source_woTrack
## PixelOfflineDQMClient, PixelOfflineDQMClientWithDataCertification
## siPixelP5DQM_source, siPixelP5DQM_cosmics_source, siPixelP5DQM_source_woTrack
## PixelP5DQMClient, PixelP5DQMClientWithDataCertification
##----## Other stuff:
process.MessageLogger = cms.Service("MessageLogger",
debugModules = cms.untracked.vstring('siPixelDigis',
'SiPixelRawDataErrorSource',
'SiPixelDigiSource',
'SiPixelClusterSource',
'SiPixelRecHitSource',
'sipixelEDAClient'),
cout = cms.untracked.PSet(
threshold = cms.untracked.string('ERROR')
),
destinations = cms.untracked.vstring('cout')
)
process.AdaptorConfig = cms.Service("AdaptorConfig")
process.ModuleWebRegistry = cms.Service("ModuleWebRegistry")
process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(True) )
##----## Global tag and input data:
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
#process.GlobalTag.connect ="sqlite_file:/afs/cern.ch/user/m/malgeri/public/globtag/CRZT210_V1.db"
#process.GlobalTag.connect = "frontier://FrontierProd/CMS_COND_21X_GLOBALTAG"
###process.GlobalTag.globaltag = "GR09_R_V4::All"
###process.GlobalTag.globaltag = "CRAFT09_R_V9::All"
process.GlobalTag.globaltag = "GR_R_38X_V8::All"
process.es_prefer_GlobalTag = cms.ESPrefer('PoolDBESSource','GlobalTag')
process.source = cms.Source("PoolSource",
#debugFlag = cms.untracked.bool(True),
#debugVebosity = cms.untracked.uint32(1),
#fileNames = cms.untracked.vstring('rfio:/castor/cern.ch/user/c/chiochia/cmssw/Muon_FullValidation_150pre3.root')
#fileNames = cms.untracked.vstring('rfio:/castor/cern.ch/cms/store/relval/2008/6/6/RelVal-RelValTTbar-1212531852-IDEAL_V1-2nd-02/0000/081018D5-EC33-DD11-A623-000423D6CA42.root')
fileNames = cms.untracked.vstring(
#'/store/express/BeamCommissioning09/StreamExpress/ALCARECO/v2/000/124/275/ECC2F9D5-F7E9-DE11-BF99-001D09F282F5.root',
#'/store/express/BeamCommissioning09/StreamExpress/ALCARECO/v2/000/124/275/DE9A0B4E-F4E9-DE11-BF54-001D09F251CC.root',
#'/store/express/BeamCommissioning09/StreamExpress/ALCARECO/v2/000/124/275/7AD672E3-F2E9-DE11-8173-001D09F24498.root',
#'/store/express/BeamCommissioning09/StreamExpress/ALCARECO/v2/000/124/275/42B6AB0A-F5E9-DE11-8A92-001D09F2546F.root',
#'/store/express/BeamCommissioning09/StreamExpress/ALCARECO/v2/000/124/275/228BA375-F6E9-DE11-8D89-000423D6A6F4.root',
#'/store/express/BeamCommissioning09/StreamExpress/ALCARECO/v2/000/124/275/026B6140-F9E9-DE11-A392-001D09F28755.root'
#'/store/data/BeamCommissioning09/MinimumBias/RAW/v1/000/124/275/FA12DE16-FCE9-DE11-8FFE-001D09F24DA8.root',
#'/store/data/BeamCommissioning09/MinimumBias/RAW/v1/000/124/275/DE3F1AC9-F7E9-DE11-85C9-001D09F24303.root',
#'/store/data/BeamCommissioning09/MinimumBias/RAW/v1/000/124/275/C83B2F2C-FEE9-DE11-8F2E-001D09F24934.root',
#'/store/data/BeamCommissioning09/MinimumBias/RAW/v1/000/124/275/A8E551ED-F9E9-DE11-A59E-001D09F29849.root'
#'/store/data/Commissioning10/Cosmics/RAW/v4/000/133/874/F40E13A3-6B4F-DF11-A156-000423D987FC.root',
#'/store/data/Commissioning10/Cosmics/RAW/v4/000/133/874/F0F602C6-794F-DF11-B259-001D09F23A84.root'
'/store/data/Commissioning10/MinimumBias/RAW/v4/000/133/877/FAC1761E-A64F-DF11-BD37-003048D2BDD8.root',
'/store/data/Commissioning10/MinimumBias/RAW/v4/000/133/877/FADF1B51-BF4F-DF11-9CE2-001D09F24353.root',
'/store/data/Commissioning10/MinimumBias/RAW/v4/000/135/575/F03B0CDF-8261-DF11-8354-001D09F2960F.root'
#'/store/data/Commissioning10/MinimumBias/RAW/v4/000/135/575/F058E355-7C61-DF11-ACEB-0030487D0D3A.root'
#'/store/data/Run2010A/Cosmics/RAW/v1/000/136/902/3A8627D6-B56E-DF11-A09E-003048D3750A.root'
)
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
##----## Sequences and Paths:
process.LocalReco = cms.Sequence(process.siPixelDigis*process.siPixelClusters*process.siPixelRecHits)
process.TrackReco = cms.Sequence(process.siStripDigis*process.offlineBeamSpot*process.trackerlocalreco*process.recopixelvertexing*process.ckftracks)
process.DQMmodules = cms.Sequence(process.qTester*process.dqmEnv*process.dqmSaver)
#put proces.dump in the path where you want to print all event content
#process.dump=cms.EDAnalyzer('EventContentAnalyzer')
process.p = cms.Path(
process.LocalReco*
#process.TrackReco*
process.DQMmodules*
process.siPixelP5DQM_source_woTrack*
#process.siPixelP5DQM_source*
process.PixelP5DQMClientWithDataCertification
#process.siPixelOfflineDQM_source_woTrack*
#process.PixelOfflineDQMClientWithDataCertification
)
| 51.248175 | 181 | 0.788634 |
a62c6a92c2904a8ad835a9dd15e4cf25ba3dd805 | 1,444 | py | Python | data_aug/gaussian_blur.py | avihu111/SimCLR_OCC | f8a68881f5f02c2e34807ff2b86f68f2412ffc29 | [
"MIT"
] | 2 | 2021-09-10T04:19:44.000Z | 2022-02-09T11:45:50.000Z | data_aug/gaussian_blur.py | avihu111/SimCLR_OCC | f8a68881f5f02c2e34807ff2b86f68f2412ffc29 | [
"MIT"
] | null | null | null | data_aug/gaussian_blur.py | avihu111/SimCLR_OCC | f8a68881f5f02c2e34807ff2b86f68f2412ffc29 | [
"MIT"
] | null | null | null | import numpy as np
import torch
from torch import nn
from torchvision.transforms import transforms
class GaussianBlur(object):
"""blur a single image on CPU"""
def __init__(self, kernel_size):
radias = kernel_size // 2
kernel_size = radias * 2 + 1
self.blur_h = nn.Conv2d(3, 3, kernel_size=(kernel_size, 1),
stride=1, padding=0, bias=False, groups=3)
self.blur_v = nn.Conv2d(3, 3, kernel_size=(1, kernel_size),
stride=1, padding=0, bias=False, groups=3)
self.k = kernel_size
self.r = radias
self.blur = nn.Sequential(
nn.ReflectionPad2d(radias),
self.blur_h,
self.blur_v
)
self.pil_to_tensor = transforms.ToTensor()
self.tensor_to_pil = transforms.ToPILImage()
def __call__(self, img):
img = self.pil_to_tensor(img).unsqueeze(0)
sigma = np.random.uniform(0.1, 2.0)
x = np.arange(-self.r, self.r + 1)
x = np.exp(-np.power(x, 2) / (2 * sigma * sigma))
x = x / x.sum()
x = torch.from_numpy(x).view(1, -1).repeat(3, 1)
self.blur_h.weight.data.copy_(x.view(3, 1, self.k, 1))
self.blur_v.weight.data.copy_(x.view(3, 1, 1, self.k))
with torch.no_grad():
img = self.blur(img)
img = img.squeeze()
img = self.tensor_to_pil(img)
return img
| 30.083333 | 74 | 0.560942 |
151a96bd796511d165b70ec731a39543306c73ae | 6,013 | py | Python | nssrc/com/citrix/netscaler/nitro/resource/config/ns/nsdiameter.py | mahabs/nitro | be74e1e177f5c205c16126bc9b023f2348788409 | [
"Apache-2.0"
] | null | null | null | nssrc/com/citrix/netscaler/nitro/resource/config/ns/nsdiameter.py | mahabs/nitro | be74e1e177f5c205c16126bc9b023f2348788409 | [
"Apache-2.0"
] | null | null | null | nssrc/com/citrix/netscaler/nitro/resource/config/ns/nsdiameter.py | mahabs/nitro | be74e1e177f5c205c16126bc9b023f2348788409 | [
"Apache-2.0"
] | null | null | null | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class nsdiameter(base_resource) :
""" Configuration for Diameter Parameters resource. """
def __init__(self) :
self._identity = ""
self._realm = ""
self._serverclosepropagation = ""
@property
def identity(self) :
"""DiameterIdentity to be used by NS. DiameterIdentity is used to identify a Diameter node uniquely. Before setting up diameter configuration, Netscaler (as a Diameter node) MUST be assigned a unique DiameterIdentity.
example =>
set ns diameter -identity netscaler.com
Now whenever Netscaler system needs to use identity in diameter messages. It will use 'netscaler.com' as Origin-Host AVP as defined in RFC3588
.<br/>Minimum length = 1.
"""
try :
return self._identity
except Exception as e:
raise e
@identity.setter
def identity(self, identity) :
"""DiameterIdentity to be used by NS. DiameterIdentity is used to identify a Diameter node uniquely. Before setting up diameter configuration, Netscaler (as a Diameter node) MUST be assigned a unique DiameterIdentity.
example =>
set ns diameter -identity netscaler.com
Now whenever Netscaler system needs to use identity in diameter messages. It will use 'netscaler.com' as Origin-Host AVP as defined in RFC3588
.<br/>Minimum length = 1
"""
try :
self._identity = identity
except Exception as e:
raise e
@property
def realm(self) :
"""Diameter Realm to be used by NS.
example =>
set ns diameter -realm com
Now whenever Netscaler system needs to use realm in diameter messages. It will use 'com' as Origin-Realm AVP as defined in RFC3588
.<br/>Minimum length = 1.
"""
try :
return self._realm
except Exception as e:
raise e
@realm.setter
def realm(self, realm) :
"""Diameter Realm to be used by NS.
example =>
set ns diameter -realm com
Now whenever Netscaler system needs to use realm in diameter messages. It will use 'com' as Origin-Realm AVP as defined in RFC3588
.<br/>Minimum length = 1
"""
try :
self._realm = realm
except Exception as e:
raise e
@property
def serverclosepropagation(self) :
"""when a Server connection goes down, whether to close the corresponding client connection if there were requests pending on the server.<br/>Default value: NO<br/>Possible values = YES, NO.
"""
try :
return self._serverclosepropagation
except Exception as e:
raise e
@serverclosepropagation.setter
def serverclosepropagation(self, serverclosepropagation) :
"""when a Server connection goes down, whether to close the corresponding client connection if there were requests pending on the server.<br/>Default value: NO<br/>Possible values = YES, NO
"""
try :
self._serverclosepropagation = serverclosepropagation
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(nsdiameter_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.nsdiameter
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
return None
except Exception as e :
raise e
@classmethod
def update(cls, client, resource) :
""" Use this API to update nsdiameter.
"""
try :
if type(resource) is not list :
updateresource = nsdiameter()
updateresource.identity = resource.identity
updateresource.realm = resource.realm
updateresource.serverclosepropagation = resource.serverclosepropagation
return updateresource.update_resource(client)
except Exception as e :
raise e
@classmethod
def unset(cls, client, resource, args) :
""" Use this API to unset the properties of nsdiameter resource.
Properties that need to be unset are specified in args array.
"""
try :
if type(resource) is not list :
unsetresource = nsdiameter()
return unsetresource.unset_resource(client, args)
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
""" Use this API to fetch all the nsdiameter resources that are configured on netscaler.
"""
try :
if not name :
obj = nsdiameter()
response = obj.get_resources(client, option_)
return response
except Exception as e :
raise e
class Serverclosepropagation:
YES = "YES"
NO = "NO"
class nsdiameter_response(base_response) :
def __init__(self, length=1) :
self.nsdiameter = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.nsdiameter = [nsdiameter() for _ in range(length)]
| 33.405556 | 219 | 0.731582 |
a604eebd4b7da47e102903ed7f40bbeb32ede8ca | 514 | py | Python | examples/sparql_update_example.py | mwatts15/rdflib | 047e3e9781a28966ff9b6fd46ec20e459a5a0f11 | [
"BSD-3-Clause"
] | 1 | 2021-08-09T16:32:00.000Z | 2021-08-09T16:32:00.000Z | examples/sparql_update_example.py | mwatts15/rdflib | 047e3e9781a28966ff9b6fd46ec20e459a5a0f11 | [
"BSD-3-Clause"
] | null | null | null | examples/sparql_update_example.py | mwatts15/rdflib | 047e3e9781a28966ff9b6fd46ec20e459a5a0f11 | [
"BSD-3-Clause"
] | null | null | null |
"""
SPARQL Update statements can be applied with :meth:`rdflib.graph.Graph.update`
"""
import rdflib
if __name__ == '__main__':
g = rdflib.Graph()
g.load("foaf.rdf")
g.update('''
PREFIX foaf: <http://xmlns.com/foaf/0.1/>
PREFIX dbpedia: <http://dbpedia.org/resource/>
INSERT
{ ?s a dbpedia:Human . }
WHERE
{ ?s a foaf:Person . }
''')
for x in g.subjects(
rdflib.RDF.type, rdflib.URIRef('http://dbpedia.org/resource/Human')):
print(x)
| 19.037037 | 81 | 0.579767 |
b92bd6d986a62b16ef685747cfee462141c41cf4 | 7,018 | py | Python | library/ucs_chassis_template.py | sisudhir/ucsm-ansible | aae0adb1ba347c301d0d2943e5461b241553f07c | [
"NTP"
] | 6 | 2019-10-03T15:14:22.000Z | 2021-01-28T07:23:48.000Z | library/ucs_chassis_template.py | sisudhir/ucsm-ansible | aae0adb1ba347c301d0d2943e5461b241553f07c | [
"NTP"
] | 1 | 2021-06-01T23:48:07.000Z | 2021-06-01T23:48:07.000Z | library/ucs_chassis_template.py | sisudhir/ucsm-ansible | aae0adb1ba347c301d0d2943e5461b241553f07c | [
"NTP"
] | 6 | 2019-09-17T19:04:53.000Z | 2021-07-05T06:34:42.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: ucs_chassis_profile_template
short_description: Configures Chassis Profile Templates on Cisco UCS Manager
description:
- Configures Chassis Profile Templates on Cisco UCS Manager.
- Examples can be used with the UCS Platform Emulator U(https://communities.cisco.com/ucspe).
extends_documentation_fragment: ucs
options:
state:
description:
- If C(present), will verify Chassis Profile Templates are present and will create if needed.
- If C(absent), will verify Chassis Profile Templates are absent and will delete if needed.
choices: [present, absent]
default: present
name:
description:
- The name of the chassis profile template.
- This name can be between 2 and 32 alphanumeric characters.
- "You cannot use spaces or any special characters other than - (hyphen), \"_\" (underscore), : (colon), and . (period)."
- This name must be unique across all chassis profiles and chassis profile templates within the same organization.
required: yes
template_type:
description:
- "The template type field which can be one of the following:"
- "initial-template — Any chassis profiles created from this template are not updated if the template changes."
- "updating-template — Any chassis profiles created from this template are updated if the template changes."
choices: [initial-template, updating-template]
default: initial-template
description:
description:
- A user-defined description of the chassis profile template.
- Enter up to 256 characters.
- "You can use any characters or spaces except the following:"
- "` (accent mark), \ (backslash), ^ (carat), \" (double quote), = (equal sign), > (greater than), < (less than), or ' (single quote)."
aliases: [ descr ]
maintenance_policy:
description:
- The name of the chassis maintenance policy you want to associate with chassis profiles created from this template.
default: no maintenance policy
firmware_package:
description:
- The name of the firmware package version you want to use with chassis profiles created from this template.
default: no firmware package
compute_connection_policy:
description:
- The name of the compute connection policy you want to associate with chassis profiles created from this template.
default: no compute connection policy
sas_expander_policy:
description:
- The name of the sas expander configuration policy you want to associate with chassis profiles created from this template.
default: no sas expander configuration policy
disk_zoning_policy: no disk zoning policy
description:
- The name of the disk zoning policy you want to associate with chassis profiles created from this template.
default: no disk zoning policy
requirements:
- ucsmsdk
author:
- Olli Walsdorf (@owalsdor)
- CiscoUcs (@CiscoUcs)
version_added: '2.5'
'''
EXAMPLES = r'''
- name: Configure Chassis Profile Template
ucs_chassis_profile_template:
hostname: 172.16.143.150
username: admin
password: password
name: S3260_Template
template_type: updating-template
maintenance_policy: S3260_Maint
firmware_package: S3260_FW
compute_connection_policy: S3260_Conn
sas_expander_policy: S3260_SAS
disk_zoning_policy: S3260_Disk
- name: Remove Service Profile Template
ucs_service_profile_template:
hostname: 172.16.143.150
username: admin
password: password
name: S3260_Template
state: absent
'''
RETURN = r'''
#
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.remote_management.ucs import UCSModule, ucs_argument_spec
def main():
argument_spec = ucs_argument_spec
argument_spec.update(
org_dn=dict(type='str', default='org-root'),
name=dict(type='str', required=True),
description=dict(type='str', default=''),
template_type=dict(type='str', default='initial-template', choices=['initial-template', 'updating-template']),
maintenance_policy=dict(type='str', default=''),
firmware_package=dict(type='str', default=''),
compute_connection_policy=dict(type='str', default=''),
#sas_expander_policy=dict(type='str', default=''),
disk_zoning_policy=dict(type='str', default=''),
state=dict(type='str', default='present', choices=['present', 'absent']),
)
module = AnsibleModule(
argument_spec,
supports_check_mode=True,
)
ucs = UCSModule(module)
err = False
# UCSModule creation above verifies ucsmsdk is present and exits on failure. Additional imports are done below.
from ucsmsdk.mometa.equipment.EquipmentChassisProfile import EquipmentChassisProfile
changed = False
try:
mo_exists = False
props_match = False
dn_base = 'org-root'
dn = dn_base + '/cp-' + module.params['name']
mo = ucs.login_handle.query_dn(dn)
if mo:
mo_exists = True
if module.params['state'] == 'absent':
# mo must exist but all properties do not have to match
if mo_exists:
if not module.check_mode:
ucs.login_handle.remove_mo(mo)
ucs.login_handle.commit()
changed = True
else:
if mo_exists:
# check top-level mo props
kwargs = dict(name=module.params['name'])
kwargs['descr'] = module.params['description']
kwargs['chassis_fw_policy_name'] = module.params['firmware_package']
kwargs['compute_conn_policy_name'] = module.params['compute_connection_policy']
kwargs['disk_zoning_policy_name'] = module.params['disk_zoning_policy']
if (mo.check_prop_match(**kwargs)):
props_match = True
if not props_match:
if not module.check_mode:
# create if mo does not already exist
mo = EquipmentChassisProfile(
parent_mo_or_dn=dn_base,
name=module.params['name'],
descr=module.params['description'],
type=module.params['template_type'],
maint_policy_name=module.params['maintenance_policy'],
chassis_fw_policy_name=module.params['firmware_package'],
compute_conn_policy_name=module.params['compute_connection_policy'],
#sas_expander_config_policy_name=module.params['sas_expander_policy'],
disk_zoning_policy_name=module.params['disk_zoning_policy']
)
ucs.login_handle.add_mo(mo, True)
ucs.login_handle.commit()
changed = True
except Exception as e:
err = True
ucs.result['msg'] = "setup error: %s " % str(e)
ucs.result['changed'] = changed
if err:
module.fail_json(**ucs.result)
module.exit_json(**ucs.result)
if __name__ == '__main__':
main() | 36.362694 | 140 | 0.708179 |
754bc7416226a34ea92559599aa70617d01c5bb8 | 3,670 | py | Python | tia/util/windows.py | lsternlicht/tia | fe74d1876260a946e52bd733bc32da0698749f2c | [
"BSD-3-Clause"
] | 23 | 2017-11-13T01:05:49.000Z | 2022-03-30T01:38:00.000Z | tia/util/windows.py | lsternlicht/tia | fe74d1876260a946e52bd733bc32da0698749f2c | [
"BSD-3-Clause"
] | 1 | 2018-09-19T21:59:04.000Z | 2018-09-19T21:59:04.000Z | tia/util/windows.py | lsternlicht/tia | fe74d1876260a946e52bd733bc32da0698749f2c | [
"BSD-3-Clause"
] | 13 | 2018-11-26T21:53:36.000Z | 2022-01-09T00:10:27.000Z | """
collection of utilities for use on windows systems
"""
import os
def send_outlook_email(to, subject, body, attachments=None, cc=None, bcc=None, is_html=0):
""" Send an email using your local outlook client """
import win32com.client
asarr = lambda v: None if not v else isinstance(v, str) and [v] or v
def update_recipients(robj, users, type):
users = asarr(to)
if users:
for u in users:
r = robj.Add(u)
r.Type = type
outlook = win32com.client.gencache.EnsureDispatch("Outlook.Application")
mapi = outlook.GetNamespace("MAPI")
constants = win32com.client.constants
msg = outlook.CreateItem(0)
# setup the recipients
recipients = msg.Recipients
to and update_recipients(recipients, to, constants.olTo)
cc and update_recipients(recipients, cc, constants.olCC)
bcc and update_recipients(recipients, bcc, constants.olBCC)
recipients.ResolveAll()
msg.Subject = subject
if is_html:
msg.BodyFormat = constants.olFormatHTML
msg.HTMLBody = body
else:
msg.Body = body
list(map(lambda fpath: msg.Attachments.Add(fpath), attachments or []))
msg.Send()
class WinSCPBatch(object):
""" Provide a utility class which invokes the Winscp processes via the command line.
Example
-------
batch = WinSCPBatch('your_session_name', logfile='c:\\temp\\winscp\\mylog.log')
batch.add_download('remotefile.txt', 'c:\\temp\\winscp\\localfile.txt')
batch.execute()
"""
def __init__(self, session, logfile=None):
self.session = session
self.logfile = logfile
self.cmds = []
self.double_quote = lambda s: s and '""' + s + '""' or ''
def add_download(self, remote, local):
cmd = 'get %s %s' % (self.double_quote(remote), self.double_quote(local))
self.cmds.append(cmd)
def add_downloads(self, filemap):
"""Add the dict of downloads. (Note the Winscp command line accepts wildcards)
Parameters
----------
filemap: dict, (remote_filename -> local_filename)
"""
[self.add_download(k, v) for k, v in filemap.items()]
def add_upload(self, remote, local):
cmd = 'put %s %s' % (self.double_quote(local), self.double_quote(remote))
self.cmds.append(cmd)
def add_uploads(self, filemap):
"""Add the dict of uploads
Parameters
----------
filemap: dict, (remote_filename -> local_filename)
"""
[self.add_upload(k, v) for k, v in filemap.items()]
def add_cd(self, remote_dir):
cmd = 'cd %s' % remote_dir
self.cmds.append(cmd)
def execute(self):
env = os.environ['PATH']
if 'WinSCP' not in env:
if os.path.exists('C:\Program Files (x86)\WinSCP'):
os.environ['PATH'] = env + ';C:\Program Files (x86)\WinSCP'
elif os.path.exists('C:\Program Files\WinSCP'):
os.environ['PATH'] = env + ';C:\Program Files\WinSCP'
cmd = 'winscp.exe'
if self.logfile:
cmd += ' /log="%s"' % self.logfile
cmd += ' /command'
cmd += ' "option batch abort"'
cmd += ' "option confirm off"'
cmd += ' "open %s"' % self.session
for c in self.cmds:
cmd += ' "%s"' % c.strip()
cmd += ' "exit"'
# not able to detect failures - but can raise failures when looking for expected files
os.system(cmd)
#import subprocess as sub
#p = sub.Popen(cmd, stdout=sub.PIPE, stderr=sub.PIPE)
#output, errors = p.communicate()
#print output
| 32.192982 | 94 | 0.596458 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.