repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
680k
|
---|---|---|---|---|
goubertbrent/oca-backend | src/add_2_zip_imports.py | b9f59cc02568aecb55d4b54aec05245790ea25fd | # -*- coding: utf-8 -*-
# Copyright 2020 Green Valley Belgium NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@license_version:1.7@@
from google.appengine.api import users as gusers
from mcfw.cache import CachedModelMixIn
from mcfw.consts import MISSING
from mcfw.restapi import register_postcall_hook, INJECTED_FUNCTIONS
from mcfw.rpc import serialize_value, get_type_details
from rogerthat.rpc import users
from rogerthat.utils import OFFLOAD_TYPE_WEB, offload
from rogerthat.utils.transactions import on_trans_committed
dummy = lambda: None
def log_restapi_call_result(function, success, kwargs, result_or_error):
if function.meta['silent']:
request_data = "****"
else:
kwarg_types = function.meta[u"kwarg_types"]
request_data = dict()
for arg, value in kwargs.iteritems():
if arg == 'accept_missing':
continue
if value == MISSING:
continue
request_data[arg] = serialize_value(value, *get_type_details(kwarg_types[arg], value), skip_missing=True)
if function.meta['silent_result']:
result = "****"
elif isinstance(result_or_error, Exception):
result = unicode(result_or_error)
else:
result = result_or_error
offload(users.get_current_user() or gusers.get_current_user(), OFFLOAD_TYPE_WEB, request_data,
result, function.meta['uri'], success)
register_postcall_hook(log_restapi_call_result)
INJECTED_FUNCTIONS.get_current_session = users.get_current_session
del log_restapi_call_result
CachedModelMixIn.on_trans_committed = lambda self, f, *args, **kwargs: on_trans_committed(f, *args, **kwargs)
| [((1919, 1966), 'mcfw.restapi.register_postcall_hook', 'register_postcall_hook', (['log_restapi_call_result'], {}), '(log_restapi_call_result)\n', (1941, 1966), False, 'from mcfw.restapi import register_postcall_hook, INJECTED_FUNCTIONS\n'), ((2135, 2173), 'rogerthat.utils.transactions.on_trans_committed', 'on_trans_committed', (['f', '*args'], {}), '(f, *args, **kwargs)\n', (2153, 2173), False, 'from rogerthat.utils.transactions import on_trans_committed\n'), ((1779, 1803), 'rogerthat.rpc.users.get_current_user', 'users.get_current_user', ([], {}), '()\n', (1801, 1803), False, 'from rogerthat.rpc import users\n'), ((1807, 1832), 'google.appengine.api.users.get_current_user', 'gusers.get_current_user', ([], {}), '()\n', (1830, 1832), True, 'from google.appengine.api import users as gusers\n'), ((1507, 1548), 'mcfw.rpc.get_type_details', 'get_type_details', (['kwarg_types[arg]', 'value'], {}), '(kwarg_types[arg], value)\n', (1523, 1548), False, 'from mcfw.rpc import serialize_value, get_type_details\n')] |
Galaxyinternship/Galaxy | lib/galaxy/model/migrate/versions/0026_cloud_tables.py | 204be086a8c16d6684584cefa9053ed7c86a1784 | """
This script adds tables needed for Galaxy cloud functionality.
"""
from __future__ import print_function
import datetime
import logging
from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT
now = datetime.datetime.utcnow
log = logging.getLogger( __name__ )
metadata = MetaData()
CloudImage_table = Table( "cloud_image", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "provider_type", TEXT ),
Column( "image_id", TEXT, nullable=False ),
Column( "manifest", TEXT ),
Column( "state", TEXT ),
Column( "architecture", TEXT ),
Column( "deleted", Boolean, default=False ) )
""" UserConfiguredInstance (UCI) table """
UCI_table = Table( "cloud_uci", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
Column( "credentials_id", Integer, ForeignKey( "cloud_user_credentials.id" ), index=True ),
Column( "key_pair_name", TEXT ),
Column( "key_pair_material", TEXT ),
Column( "name", TEXT ),
Column( "state", TEXT ),
Column( "error", TEXT ),
Column( "total_size", Integer ),
Column( "launch_time", DateTime ),
Column( "deleted", Boolean, default=False ) )
CloudInstance_table = Table( "cloud_instance", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "launch_time", DateTime ),
Column( "stop_time", DateTime ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
Column( "uci_id", Integer, ForeignKey( "cloud_uci.id" ), index=True ),
Column( "type", TEXT ),
Column( "reservation_id", TEXT ),
Column( "instance_id", TEXT ),
Column( "mi_id", Integer, ForeignKey( "cloud_image.id" ), index=True ),
Column( "state", TEXT ),
Column( "error", TEXT ),
Column( "public_dns", TEXT ),
Column( "private_dns", TEXT ),
Column( "security_group", TEXT ),
Column( "availability_zone", TEXT ) )
CloudStore_table = Table( "cloud_store", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "attach_time", DateTime ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
Column( "uci_id", Integer, ForeignKey( "cloud_uci.id" ), index=True, nullable=False ),
Column( "volume_id", TEXT ),
Column( "size", Integer, nullable=False ),
Column( "availability_zone", TEXT ),
Column( "inst_id", Integer, ForeignKey( "cloud_instance.id" ) ),
Column( "status", TEXT ),
Column( "device", TEXT ),
Column( "space_consumed", Integer ),
Column( "error", TEXT ),
Column( "deleted", Boolean, default=False ) )
CloudSnapshot_table = Table( "cloud_snapshot", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
Column( "uci_id", Integer, ForeignKey( "cloud_uci.id" ), index=True ),
Column( "store_id", Integer, ForeignKey( "cloud_store.id" ), index=True, nullable=False ),
Column( "snapshot_id", TEXT ),
Column( "status", TEXT ),
Column( "description", TEXT ),
Column( "error", TEXT ),
Column( "deleted", Boolean, default=False ) )
CloudUserCredentials_table = Table( "cloud_user_credentials", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
Column( "provider_id", Integer, ForeignKey( "cloud_provider.id" ), index=True, nullable=False ),
Column( "name", TEXT ),
Column( "access_key", TEXT ),
Column( "secret_key", TEXT ),
Column( "deleted", Boolean, default=False ) )
CloudProvider_table = Table( "cloud_provider", metadata,
Column( "id", Integer, primary_key=True ),
Column( "create_time", DateTime, default=now ),
Column( "update_time", DateTime, default=now, onupdate=now ),
Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True, nullable=False ),
Column( "type", TEXT, nullable=False ),
Column( "name", TEXT ),
Column( "region_connection", TEXT ),
Column( "region_name", TEXT ),
Column( "region_endpoint", TEXT ),
Column( "is_secure", Boolean ),
Column( "host", TEXT ),
Column( "port", Integer ),
Column( "proxy", TEXT ),
Column( "proxy_port", TEXT ),
Column( "proxy_user", TEXT ),
Column( "proxy_pass", TEXT ),
Column( "debug", Integer ),
Column( "https_connection_factory", TEXT ),
Column( "path", TEXT ),
Column( "deleted", Boolean, default=False ) )
def upgrade(migrate_engine):
metadata.bind = migrate_engine
print(__doc__)
# Load existing tables
metadata.reflect()
try:
CloudProvider_table.create()
CloudUserCredentials_table.create()
CloudImage_table.create()
UCI_table.create()
CloudInstance_table.create()
CloudStore_table.create()
CloudSnapshot_table.create()
except Exception:
log.exception("Creating cloud tables failed.")
def downgrade(migrate_engine):
metadata.bind = migrate_engine
metadata.reflect()
try:
CloudSnapshot_table.drop()
CloudStore_table.drop()
CloudInstance_table.drop()
UCI_table.drop()
CloudImage_table.drop()
CloudUserCredentials_table.drop()
CloudProvider_table.drop()
except Exception:
log.exception("Dropping cloud tables failed.")
| [((273, 300), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (290, 300), False, 'import logging\n'), ((314, 324), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (322, 324), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((403, 442), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (409, 442), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((472, 516), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (478, 516), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((546, 604), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (552, 604), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((634, 663), 'sqlalchemy.Column', 'Column', (['"""provider_type"""', 'TEXT'], {}), "('provider_type', TEXT)\n", (640, 663), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((693, 733), 'sqlalchemy.Column', 'Column', (['"""image_id"""', 'TEXT'], {'nullable': '(False)'}), "('image_id', TEXT, nullable=False)\n", (699, 733), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((763, 787), 'sqlalchemy.Column', 'Column', (['"""manifest"""', 'TEXT'], {}), "('manifest', TEXT)\n", (769, 787), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((817, 838), 'sqlalchemy.Column', 'Column', (['"""state"""', 'TEXT'], {}), "('state', TEXT)\n", (823, 838), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((868, 896), 'sqlalchemy.Column', 'Column', (['"""architecture"""', 'TEXT'], {}), "('architecture', TEXT)\n", (874, 896), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((926, 967), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'default': '(False)'}), "('deleted', Boolean, default=False)\n", (932, 967), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((1077, 1116), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (1083, 1116), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((1139, 1183), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (1145, 1183), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((1206, 1264), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (1212, 1264), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((1507, 1536), 'sqlalchemy.Column', 'Column', (['"""key_pair_name"""', 'TEXT'], {}), "('key_pair_name', TEXT)\n", (1513, 1536), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((1559, 1592), 'sqlalchemy.Column', 'Column', (['"""key_pair_material"""', 'TEXT'], {}), "('key_pair_material', TEXT)\n", (1565, 1592), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((1615, 1635), 'sqlalchemy.Column', 'Column', (['"""name"""', 'TEXT'], {}), "('name', TEXT)\n", (1621, 1635), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((1658, 1679), 'sqlalchemy.Column', 'Column', (['"""state"""', 'TEXT'], {}), "('state', TEXT)\n", (1664, 1679), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((1702, 1723), 'sqlalchemy.Column', 'Column', (['"""error"""', 'TEXT'], {}), "('error', TEXT)\n", (1708, 1723), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((1746, 1775), 'sqlalchemy.Column', 'Column', (['"""total_size"""', 'Integer'], {}), "('total_size', Integer)\n", (1752, 1775), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((1798, 1829), 'sqlalchemy.Column', 'Column', (['"""launch_time"""', 'DateTime'], {}), "('launch_time', DateTime)\n", (1804, 1829), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((1852, 1893), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'default': '(False)'}), "('deleted', Boolean, default=False)\n", (1858, 1893), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((1985, 2024), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (1991, 2024), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((2057, 2101), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (2063, 2101), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((2134, 2192), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (2140, 2192), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((2225, 2256), 'sqlalchemy.Column', 'Column', (['"""launch_time"""', 'DateTime'], {}), "('launch_time', DateTime)\n", (2231, 2256), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((2289, 2318), 'sqlalchemy.Column', 'Column', (['"""stop_time"""', 'DateTime'], {}), "('stop_time', DateTime)\n", (2295, 2318), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((2570, 2590), 'sqlalchemy.Column', 'Column', (['"""type"""', 'TEXT'], {}), "('type', TEXT)\n", (2576, 2590), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((2623, 2653), 'sqlalchemy.Column', 'Column', (['"""reservation_id"""', 'TEXT'], {}), "('reservation_id', TEXT)\n", (2629, 2653), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((2686, 2713), 'sqlalchemy.Column', 'Column', (['"""instance_id"""', 'TEXT'], {}), "('instance_id', TEXT)\n", (2692, 2713), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((2847, 2868), 'sqlalchemy.Column', 'Column', (['"""state"""', 'TEXT'], {}), "('state', TEXT)\n", (2853, 2868), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((2901, 2922), 'sqlalchemy.Column', 'Column', (['"""error"""', 'TEXT'], {}), "('error', TEXT)\n", (2907, 2922), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((2955, 2981), 'sqlalchemy.Column', 'Column', (['"""public_dns"""', 'TEXT'], {}), "('public_dns', TEXT)\n", (2961, 2981), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((3014, 3041), 'sqlalchemy.Column', 'Column', (['"""private_dns"""', 'TEXT'], {}), "('private_dns', TEXT)\n", (3020, 3041), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((3074, 3104), 'sqlalchemy.Column', 'Column', (['"""security_group"""', 'TEXT'], {}), "('security_group', TEXT)\n", (3080, 3104), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((3137, 3170), 'sqlalchemy.Column', 'Column', (['"""availability_zone"""', 'TEXT'], {}), "('availability_zone', TEXT)\n", (3143, 3170), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((3253, 3292), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (3259, 3292), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((3322, 3366), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (3328, 3366), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((3396, 3454), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (3402, 3454), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((3484, 3515), 'sqlalchemy.Column', 'Column', (['"""attach_time"""', 'DateTime'], {}), "('attach_time', DateTime)\n", (3490, 3515), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((3774, 3799), 'sqlalchemy.Column', 'Column', (['"""volume_id"""', 'TEXT'], {}), "('volume_id', TEXT)\n", (3780, 3799), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((3829, 3868), 'sqlalchemy.Column', 'Column', (['"""size"""', 'Integer'], {'nullable': '(False)'}), "('size', Integer, nullable=False)\n", (3835, 3868), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((3898, 3931), 'sqlalchemy.Column', 'Column', (['"""availability_zone"""', 'TEXT'], {}), "('availability_zone', TEXT)\n", (3904, 3931), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((4052, 4074), 'sqlalchemy.Column', 'Column', (['"""status"""', 'TEXT'], {}), "('status', TEXT)\n", (4058, 4074), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((4104, 4126), 'sqlalchemy.Column', 'Column', (['"""device"""', 'TEXT'], {}), "('device', TEXT)\n", (4110, 4126), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((4156, 4189), 'sqlalchemy.Column', 'Column', (['"""space_consumed"""', 'Integer'], {}), "('space_consumed', Integer)\n", (4162, 4189), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((4219, 4240), 'sqlalchemy.Column', 'Column', (['"""error"""', 'TEXT'], {}), "('error', TEXT)\n", (4225, 4240), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((4270, 4311), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'default': '(False)'}), "('deleted', Boolean, default=False)\n", (4276, 4311), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((4403, 4442), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (4409, 4442), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((4475, 4519), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (4481, 4519), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((4552, 4610), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (4558, 4610), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((4982, 5009), 'sqlalchemy.Column', 'Column', (['"""snapshot_id"""', 'TEXT'], {}), "('snapshot_id', TEXT)\n", (4988, 5009), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((5042, 5064), 'sqlalchemy.Column', 'Column', (['"""status"""', 'TEXT'], {}), "('status', TEXT)\n", (5048, 5064), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((5097, 5124), 'sqlalchemy.Column', 'Column', (['"""description"""', 'TEXT'], {}), "('description', TEXT)\n", (5103, 5124), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((5157, 5178), 'sqlalchemy.Column', 'Column', (['"""error"""', 'TEXT'], {}), "('error', TEXT)\n", (5163, 5178), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((5211, 5252), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'default': '(False)'}), "('deleted', Boolean, default=False)\n", (5217, 5252), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((5366, 5405), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (5372, 5405), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((5445, 5489), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (5451, 5489), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((5529, 5587), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (5535, 5587), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((5886, 5906), 'sqlalchemy.Column', 'Column', (['"""name"""', 'TEXT'], {}), "('name', TEXT)\n", (5892, 5906), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((5946, 5972), 'sqlalchemy.Column', 'Column', (['"""access_key"""', 'TEXT'], {}), "('access_key', TEXT)\n", (5952, 5972), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((6012, 6038), 'sqlalchemy.Column', 'Column', (['"""secret_key"""', 'TEXT'], {}), "('secret_key', TEXT)\n", (6018, 6038), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((6078, 6119), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'default': '(False)'}), "('deleted', Boolean, default=False)\n", (6084, 6119), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((6211, 6250), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (6217, 6250), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((6283, 6327), 'sqlalchemy.Column', 'Column', (['"""create_time"""', 'DateTime'], {'default': 'now'}), "('create_time', DateTime, default=now)\n", (6289, 6327), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((6360, 6418), 'sqlalchemy.Column', 'Column', (['"""update_time"""', 'DateTime'], {'default': 'now', 'onupdate': 'now'}), "('update_time', DateTime, default=now, onupdate=now)\n", (6366, 6418), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((6570, 6606), 'sqlalchemy.Column', 'Column', (['"""type"""', 'TEXT'], {'nullable': '(False)'}), "('type', TEXT, nullable=False)\n", (6576, 6606), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((6639, 6659), 'sqlalchemy.Column', 'Column', (['"""name"""', 'TEXT'], {}), "('name', TEXT)\n", (6645, 6659), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((6692, 6725), 'sqlalchemy.Column', 'Column', (['"""region_connection"""', 'TEXT'], {}), "('region_connection', TEXT)\n", (6698, 6725), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((6758, 6785), 'sqlalchemy.Column', 'Column', (['"""region_name"""', 'TEXT'], {}), "('region_name', TEXT)\n", (6764, 6785), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((6818, 6849), 'sqlalchemy.Column', 'Column', (['"""region_endpoint"""', 'TEXT'], {}), "('region_endpoint', TEXT)\n", (6824, 6849), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((6882, 6910), 'sqlalchemy.Column', 'Column', (['"""is_secure"""', 'Boolean'], {}), "('is_secure', Boolean)\n", (6888, 6910), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((6943, 6963), 'sqlalchemy.Column', 'Column', (['"""host"""', 'TEXT'], {}), "('host', TEXT)\n", (6949, 6963), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((6996, 7019), 'sqlalchemy.Column', 'Column', (['"""port"""', 'Integer'], {}), "('port', Integer)\n", (7002, 7019), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((7052, 7073), 'sqlalchemy.Column', 'Column', (['"""proxy"""', 'TEXT'], {}), "('proxy', TEXT)\n", (7058, 7073), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((7106, 7132), 'sqlalchemy.Column', 'Column', (['"""proxy_port"""', 'TEXT'], {}), "('proxy_port', TEXT)\n", (7112, 7132), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((7165, 7191), 'sqlalchemy.Column', 'Column', (['"""proxy_user"""', 'TEXT'], {}), "('proxy_user', TEXT)\n", (7171, 7191), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((7224, 7250), 'sqlalchemy.Column', 'Column', (['"""proxy_pass"""', 'TEXT'], {}), "('proxy_pass', TEXT)\n", (7230, 7250), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((7283, 7307), 'sqlalchemy.Column', 'Column', (['"""debug"""', 'Integer'], {}), "('debug', Integer)\n", (7289, 7307), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((7340, 7380), 'sqlalchemy.Column', 'Column', (['"""https_connection_factory"""', 'TEXT'], {}), "('https_connection_factory', TEXT)\n", (7346, 7380), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((7413, 7433), 'sqlalchemy.Column', 'Column', (['"""path"""', 'TEXT'], {}), "('path', TEXT)\n", (7419, 7433), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((7466, 7507), 'sqlalchemy.Column', 'Column', (['"""deleted"""', 'Boolean'], {'default': '(False)'}), "('deleted', Boolean, default=False)\n", (7472, 7507), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((1315, 1343), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (1325, 1343), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((1431, 1470), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""cloud_user_credentials.id"""'], {}), "('cloud_user_credentials.id')\n", (1441, 1470), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((2379, 2407), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (2389, 2407), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((2497, 2523), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""cloud_uci.id"""'], {}), "('cloud_uci.id')\n", (2507, 2523), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((2772, 2800), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""cloud_image.id"""'], {}), "('cloud_image.id')\n", (2782, 2800), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((3573, 3601), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (3583, 3601), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((3688, 3714), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""cloud_uci.id"""'], {}), "('cloud_uci.id')\n", (3698, 3714), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((3989, 4020), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""cloud_instance.id"""'], {}), "('cloud_instance.id')\n", (3999, 4020), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((4671, 4699), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (4681, 4699), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((4789, 4815), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""cloud_uci.id"""'], {}), "('cloud_uci.id')\n", (4799, 4815), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((4891, 4919), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""cloud_store.id"""'], {}), "('cloud_store.id')\n", (4901, 4919), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((5655, 5683), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (5665, 5683), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((5785, 5816), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""cloud_provider.id"""'], {}), "('cloud_provider.id')\n", (5795, 5816), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n'), ((6479, 6507), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""galaxy_user.id"""'], {}), "('galaxy_user.id')\n", (6489, 6507), False, 'from sqlalchemy import Boolean, Column, DateTime, ForeignKey, Integer, MetaData, Table, TEXT\n')] |
mrf-foundation/ckios_v1 | apps/user/urls.py | 3556a99ba5e01f00e137fd124903ace77d2cba28 | # -*- encoding: utf-8 -*-
"""
Copyright (c) 2021 ronyman.com
"""
from django.contrib import admin
from django.contrib.auth import views as auth_views
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
from apps.user import views as user_views
from.views import EditProfilePage
urlpatterns = [
#User
path('admin/', admin.site.urls),
path('register/', user_views.register, name='register'),
path('login/', auth_views.LoginView.as_view(template_name='registration/login.html'), name='login'),
path('profile/', user_views.profile, name='profile'),
path('edit_profile/', user_views.edit_profile, name='edit_profile'),
path("myprofile/", user_views.myprofile, name="Myprofile"),
path('logout/', auth_views.LogoutView.as_view(template_name='users/logout.html'), name='logout'),
#path('tinymce/', include('tinymce.urls')),
path('edit_profile_page/', user_views.EditProfilePage.as_view(template_name='registration/edit_profile_page.html'), name='edit_profile_page'),
# For PasswordPresset
path('admin/password_reset/',auth_views.PasswordResetView.as_view(),name='admin_password_reset',),
path('admin/password_reset/done/',auth_views.PasswordResetDoneView.as_view(),name='password_reset_done',),
path('reset/<uidb64>/<token>/',auth_views.PasswordResetConfirmView.as_view(),name='password_reset_confirm',),
path('reset/done/',auth_views.PasswordResetCompleteView.as_view(),name='password_reset_complete',),
] | [((373, 404), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (377, 404), False, 'from django.urls import path, include\n'), ((410, 465), 'django.urls.path', 'path', (['"""register/"""', 'user_views.register'], {'name': '"""register"""'}), "('register/', user_views.register, name='register')\n", (414, 465), False, 'from django.urls import path, include\n'), ((576, 628), 'django.urls.path', 'path', (['"""profile/"""', 'user_views.profile'], {'name': '"""profile"""'}), "('profile/', user_views.profile, name='profile')\n", (580, 628), False, 'from django.urls import path, include\n'), ((634, 701), 'django.urls.path', 'path', (['"""edit_profile/"""', 'user_views.edit_profile'], {'name': '"""edit_profile"""'}), "('edit_profile/', user_views.edit_profile, name='edit_profile')\n", (638, 701), False, 'from django.urls import path, include\n'), ((707, 765), 'django.urls.path', 'path', (['"""myprofile/"""', 'user_views.myprofile'], {'name': '"""Myprofile"""'}), "('myprofile/', user_views.myprofile, name='Myprofile')\n", (711, 765), False, 'from django.urls import path, include\n'), ((486, 555), 'django.contrib.auth.views.LoginView.as_view', 'auth_views.LoginView.as_view', ([], {'template_name': '"""registration/login.html"""'}), "(template_name='registration/login.html')\n", (514, 555), True, 'from django.contrib.auth import views as auth_views\n'), ((787, 851), 'django.contrib.auth.views.LogoutView.as_view', 'auth_views.LogoutView.as_view', ([], {'template_name': '"""users/logout.html"""'}), "(template_name='users/logout.html')\n", (816, 851), True, 'from django.contrib.auth import views as auth_views\n'), ((948, 1040), 'apps.user.views.EditProfilePage.as_view', 'user_views.EditProfilePage.as_view', ([], {'template_name': '"""registration/edit_profile_page.html"""'}), "(template_name=\n 'registration/edit_profile_page.html')\n", (982, 1040), True, 'from apps.user import views as user_views\n'), ((1124, 1162), 'django.contrib.auth.views.PasswordResetView.as_view', 'auth_views.PasswordResetView.as_view', ([], {}), '()\n', (1160, 1162), True, 'from django.contrib.auth import views as auth_views\n'), ((1232, 1274), 'django.contrib.auth.views.PasswordResetDoneView.as_view', 'auth_views.PasswordResetDoneView.as_view', ([], {}), '()\n', (1272, 1274), True, 'from django.contrib.auth import views as auth_views\n'), ((1340, 1385), 'django.contrib.auth.views.PasswordResetConfirmView.as_view', 'auth_views.PasswordResetConfirmView.as_view', ([], {}), '()\n', (1383, 1385), True, 'from django.contrib.auth import views as auth_views\n'), ((1442, 1488), 'django.contrib.auth.views.PasswordResetCompleteView.as_view', 'auth_views.PasswordResetCompleteView.as_view', ([], {}), '()\n', (1486, 1488), True, 'from django.contrib.auth import views as auth_views\n')] |
tflati/ncbi-search | sra_django_api/user/migrations/0003_auto_20180914_1242.py | 2f31c57ffb95c2c874b65c03c58edd96eb822dfb | # Generated by Django 2.0.3 on 2018-09-14 12:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('user', '0002_project'),
]
operations = [
migrations.RenameField(
model_name='project',
old_name='file_path',
new_name='base_path',
),
migrations.AlterField(
model_name='project',
name='creation_date',
field=models.DateTimeField(auto_now_add=True),
),
]
| [((221, 314), 'django.db.migrations.RenameField', 'migrations.RenameField', ([], {'model_name': '"""project"""', 'old_name': '"""file_path"""', 'new_name': '"""base_path"""'}), "(model_name='project', old_name='file_path', new_name\n ='base_path')\n", (243, 314), False, 'from django.db import migrations, models\n'), ((475, 514), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (495, 514), False, 'from django.db import migrations, models\n')] |
frankgh/deep-visualization-toolbox | image_misc.py | c9bb26eacae0b4d1a25d3844538c2830026add76 | #! /usr/bin/env python
import cv2
import matplotlib.pyplot as plt
import skimage
import skimage.io
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib.figure import Figure
from matplotlib.pyplot import cm
from mpl_toolkits.axes_grid1 import make_axes_locatable
from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, \
fromstring, ceil, dtype, float32, sqrt, dot, zeros
from misc import WithTimer
def norm01(arr):
arr = arr.copy()
arr -= arr.min()
arr /= arr.max() + 1e-10
return arr
def norm01c(arr, center):
'''Maps the input range to [0,1] such that the center value maps to .5'''
arr = arr.copy()
arr -= center
arr /= max(2 * arr.max(), -2 * arr.min()) + 1e-10
arr += .5
assert arr.min() >= 0
assert arr.max() <= 1
return arr
def norm0255(arr):
'''Maps the input range to [0,255] as dtype uint8'''
arr = arr.copy()
arr -= arr.min()
arr *= 255.0 / (arr.max() + 1e-10)
arr = array(arr, 'uint8')
return arr
def cv2_read_cap_rgb(cap, saveto=None):
rval, frame = cap.read()
if saveto:
cv2.imwrite(saveto, frame)
if len(frame.shape) == 2:
# Upconvert single channel grayscale to color
frame = frame[:, :, newaxis]
if frame.shape[2] == 1:
frame = tile(frame, (1, 1, 3))
if frame.shape[2] > 3:
# Chop off transparency
frame = frame[:, :, :3]
frame = frame[:, :, ::-1] # Convert native OpenCV BGR -> RGB
return frame
def plt_plot_signal(data, labels, zoom_level=-1, offset=0, markers=None, title=None):
fig = Figure(figsize=(5, 5))
canvas = FigureCanvas(fig)
ax = None
if len(data.shape) == 1:
data = expand_dims(data, axis=1)
if zoom_level == -1:
zoom_level = data.shape[0]
color = iter(cm.rainbow(linspace(0, 1, data.shape[1])))
s = offset
e = s + zoom_level
x = arange(s, e)
for i in range(data.shape[1]):
c = next(color)
label = labels[i] if labels is not None else 'Signal {}'.format(i + 1)
ax = fig.add_subplot(data.shape[1], 1, (i + 1), sharex=ax)
ax.plot(x, data[s:e, i], lw=1, label=label, c=c)
# # ax.set_adjustable('box-forced')
# ax.set_xlim(left=0, right=zoom_level)
# ax.get_xaxis().set_visible(i == data.shape[1] - 1)
# ax.xaxis.set_ticks(arange(s, e + 1, (e - s) / 10.0))
# ax.xaxis.set_major_formatter(ticker.FormatStrFormatter('%0.1f'))
ax.legend(loc='lower right')
if markers is not None and i in markers:
for val in markers[i]:
if val >= s and val < e:
ax.axvline(x=val)
if title is not None:
fig.suptitle(title)
fig.tight_layout()
fig.subplots_adjust(hspace=0)
canvas.draw() # draw the canvas, cache the renderer
l, b, w, h = fig.bbox.bounds
w, h = int(w), int(h)
im = fromstring(canvas.tostring_rgb(), dtype='uint8')
im.shape = h, w, 3
return im
def plt_plot_heatmap(data,
shape,
rows,
cols,
title=None,
x_axis_label=None,
y_axis_label=None,
x_axis_values=None,
y_axis_values=None,
hide_axis=True,
vmin=None,
vmax=None):
res = []
shape = (max(2, ceil(shape[1] / 80 / cols)), max(2, ceil(shape[0] / 80 / rows)))
fig, ax = plt.subplots(1, 1, figsize=shape)
canvas = FigureCanvas(fig)
# for i in xrange(y.shape[0]):
# sns.heatmap(y[i], ax=ax, vmin=minn, vmax=maxx)
# canvas.draw() # draw the canvas, cache the renderer
#
# l, b, w, h = fig.bbox.bounds
# w, h = int(w), int(h)
# im = fromstring(canvas.tostring_rgb(), dtype='uint8')
# im.shape = h, w, 3
# res.append(im)
img = ax.imshow(
zeros((data.shape[1], data.shape[2])),
cmap='viridis',
vmin=vmin if vmin is not None else data.min(),
vmax=vmax if vmax is not None else data.max(),
interpolation='none',
aspect='auto'
)
# get rid of spines and fix range of axes, rotate x-axis labels
ax.spines['left'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.spines['bottom'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
if hide_axis:
ax.set_xticks([])
ax.set_yticks([])
ax.xaxis.set_ticklabels([])
ax.yaxis.set_ticklabels([])
fig.subplots_adjust(left=0.05, bottom=0.05, right=0.95, top=0.95, hspace=0, wspace=0)
else:
if title is not None:
plt.title(title)
if x_axis_label is not None:
ax.set_xlabel(x_axis_label)
if y_axis_label is not None:
ax.set_ylabel(y_axis_label)
if x_axis_values is not None:
a = arange(0, x_axis_values.shape[0], 3) + 0.5
b = arange(x_axis_values.min(), x_axis_values.max() + 1.5, 1.5)
ax.set_xticks(a)
ax.set_xticklabels(b, rotation=90)
if y_axis_values is not None:
a = arange(0, y_axis_values.shape[0], 3) + 0.5
# c = roundup((y_axis_values.max() - y_axis_values.min()) / 11)
# b = arange(y_axis_values.min(), y_axis_values.max(), c)
b = linspace(y_axis_values.min(), y_axis_values.max(), num=10, dtype=int)
ax.set_yticks(a)
ax.set_yticklabels(b)
# for tick in ax.get_xticklabels():
# tick.set_rotation(90)
if not hide_axis:
divider = make_axes_locatable(ax)
# colorbar on the right of ax. Colorbar width in % of ax and space between them is defined by pad in inches
cax = divider.append_axes('right', size='5%', pad=0.07)
cb = fig.colorbar(img, cax=cax)
# remove colorbar frame/spines
cb.outline.set_visible(False)
# don't stop after each subfigure change
plt.show(block=False)
if not hide_axis:
fig.tight_layout()
canvas.draw() # draw the canvas, cache the renderer
# keep bg in memory
background = fig.canvas.copy_from_bbox(ax.bbox)
# start = time.time()
for i in xrange(data.shape[0]):
img.set_array(data[i])
# restore background
fig.canvas.restore_region(background)
ax.draw_artist(img)
# fill in the axes rectangle
fig.canvas.blit(ax.bbox)
# loop through array
# for i in xrange(data.shape[0]):
# time.sleep(0.005)
# img.set_array(data[i])
# canvas.draw()
l, b, w, h = fig.bbox.bounds
w, h = int(w), int(h)
im = fromstring(canvas.tostring_rgb(), dtype='uint8')
im.shape = h, w, 3
res.append(im)
fig.clf()
plt.clf()
plt.close()
return array(res)
def plt_plot_filter(x, y, title, x_axis_label, y_axis_label, log_scale):
fig, ax = plt.subplots(1, 1, figsize=(4, 4))
canvas = FigureCanvas(fig)
x = arange(0, y.shape[0]) if x is None else x
if log_scale == 1:
ax.semilogy(x, y, lw=2)
else:
ax.plot(x, y, lw=2)
ax.set(xlabel=x_axis_label, ylabel=y_axis_label, title=title)
fig.tight_layout()
canvas.draw() # draw the canvas, cache the renderer
l, b, w, h = fig.bbox.bounds
w, h = int(w), int(h)
im = fromstring(canvas.tostring_rgb(), dtype='uint8')
im.shape = h, w, 3
fig.clf()
plt.clf()
plt.close()
return im
def plt_plot_filters_blit(y, x, shape, rows, cols,
title=None,
x_axis_label=None,
y_axis_label=None,
log_scale=0,
hide_axis=False):
res = []
x = arange(0, y.shape[1]) if x is None else x
# if log_scale == 1:
# y = log(y)
# elif log_scale == 2:
# x = log(x)
# elif log_scale == 3:
# x = log(x)
# y = log(y)
shape = (max(2, ceil(shape[1] / 80 / cols)), max(2, ceil(shape[0] / 80 / rows)))
fig, ax = plt.subplots(1, 1, figsize=shape)
canvas = FigureCanvas(fig)
ax.set_xlim(min(x), max(x))
ax.set_ylim(y.min(), y.max())
if hide_axis:
ax.xaxis.set_ticklabels([])
ax.yaxis.set_ticklabels([])
fig.subplots_adjust(left=0.05, bottom=0.05, right=0.95, top=0.95, hspace=0, wspace=0)
else:
if x_axis_label is not None:
ax.set_xlabel(x_axis_label)
if y_axis_label is not None:
ax.set_ylabel(y_axis_label)
if title is not None:
plt.title(title)
line, = ax.plot([], [], lw=2)
if not hide_axis:
fig.tight_layout()
canvas.draw() # draw the canvas, cache the renderer
# keep bg in memory
background = fig.canvas.copy_from_bbox(ax.bbox)
for i in xrange(y.shape[0]):
line.set_data(x, y[i])
# line.set_color()
# restore background
fig.canvas.restore_region(background)
# redraw just the points
ax.draw_artist(line)
# fill in the axes rectangle
fig.canvas.blit(ax.bbox)
l, b, w, h = fig.bbox.bounds
w, h = int(w), int(h)
im = fromstring(canvas.tostring_rgb(), dtype='uint8')
im.shape = h, w, 3
res.append(im)
fig.clf()
plt.clf()
plt.close()
return array(res)
def plt_plot_filters_fast(y, x, shape, rows, cols,
title=None,
x_axis_label=None,
y_axis_label=None,
share_axes=True,
log_scale=0):
res = []
shape = (ceil(shape[1] / 80 / cols), ceil(shape[0] / 80 / rows))
fig, ax = plt.subplots(1, 1, figsize=shape)
canvas = FigureCanvas(fig)
# ax.set_aspect('equal')
if share_axes:
if x is not None:
min_x, max_x = min(x), max(x)
else:
min_x, max_x = 0, y.shape[1]
min_y, max_y = y.min(), y.max()
ax.set_xlim(min_x, max_x)
ax.set_ylim(min_y, max_y)
# ax.hold(True)
plt.subplots_adjust(left=0.185, bottom=0.125, right=0.98, top=0.98)
# plt.show(False)
# plt.draw()
# background = fig.canvas.copy_from_bbox(ax.bbox)
# points = ax.plot(x[0], linewidth=1)[0]
for i in xrange(y.shape[0]):
if x is not None:
if log_scale == 1:
ax.semilogy(x, y[i], linewidth=1)
else:
ax.plot(x, y[i], linewidth=1)
else:
if log_scale == 1:
ax.semilogy(y[i], linewidth=1)
else:
ax.plot(y[i], linewidth=1)
if x_axis_label is not None:
ax.set_xlabel(x_axis_label)
if y_axis_label is not None:
ax.set_ylabel(y_axis_label)
if title is not None:
plt.title(title)
# plt.autoscale(enable=True, axis='y', tight=True)
# plt.tight_layout()
# Turn off axes and set axes limits
# ax.axis('off')
canvas.draw() # draw the canvas, cache the renderer
l, b, w, h = fig.bbox.bounds
w, h = int(w), int(h)
im = fromstring(canvas.tostring_rgb(), dtype='uint8')
im.shape = h, w, 3
res.append(im)
# ax.cla()
fig.clf()
return array(res)
def plt_plot_filters(x, y, shape, rows, cols,
selected_unit=None,
selected_unit_color=None,
title=None,
x_axis_label=None,
y_axis_label=None,
share_axes=True,
log_scale=0):
shape = (ceil(shape[1] / 80), ceil(shape[0] / 80))
fig = Figure(figsize=shape)
canvas = FigureCanvas(fig)
ax, highlighted_ax, right_ax, bottom_ax, curr, right, bottom = None, None, None, None, None, None, None
if selected_unit is not None:
row = selected_unit / cols
col = selected_unit % cols
curr = selected_unit
bottom = (selected_unit + cols) if row < rows - 1 else None
right = (selected_unit + 1) if col < cols - 1 else None
for i in xrange(x.shape[0]):
if share_axes:
ax = fig.add_subplot(rows, cols, (i + 1), axisbelow=False, sharex=ax, sharey=ax)
else:
ax = fig.add_subplot(rows, cols, (i + 1), axisbelow=False)
if y is not None:
if log_scale == 1:
ax.semilogy(y, x[i], linewidth=1)
else:
ax.plot(y, x[i], linewidth=1)
else:
if log_scale == 1:
ax.semilogy(x[i], linewidth=1)
else:
ax.plot(x[i], linewidth=1)
ax.set_xlim(left=0, right=x.shape[1] - 1)
ax.get_xaxis().set_visible(i >= ((rows - 1) * cols))
ax.get_yaxis().set_visible(i % cols == 0)
if i == curr:
highlighted_ax = ax
if i == bottom:
bottom_ax = ax
if i == right:
right_ax = ax
if x_axis_label is not None:
ax.set_xlabel(x_axis_label)
if y_axis_label is not None:
ax.set_ylabel(y_axis_label)
if highlighted_ax is not None:
for axis in ['top', 'bottom', 'left', 'right']:
highlighted_ax.spines[axis].set_linewidth(2.5)
highlighted_ax.spines[axis].set_color(selected_unit_color)
if bottom_ax is not None:
bottom_ax.spines['top'].set_linewidth(2)
bottom_ax.spines['top'].set_color(selected_unit_color)
if right_ax is not None:
right_ax.spines['left'].set_linewidth(2)
right_ax.spines['left'].set_color(selected_unit_color)
if title is not None:
fig.suptitle(title)
fig.tight_layout()
fig.subplots_adjust(hspace=0, wspace=0)
canvas.draw() # draw the canvas, cache the renderer
l, b, w, h = fig.bbox.bounds
w, h = int(w), int(h)
im = fromstring(canvas.tostring_rgb(), dtype='uint8')
im.shape = h, w, 3
return im
def cv2_read_file_rgb(filename):
'''Reads an image from file. Always returns (x,y,3)'''
im = cv2.imread(filename)
if len(im.shape) == 2:
# Upconvert single channel grayscale to color
im = im[:, :, newaxis]
if im.shape[2] == 1:
im = tile(im, (1, 1, 3))
if im.shape[2] > 3:
# Chop off transparency
im = im[:, :, :3]
return cv2.cvtColor(im, cv2.COLOR_BGR2RGB) # Convert native OpenCV BGR -> RGB
def crop_to_square(frame):
i_size, j_size = frame.shape[0], frame.shape[1]
if j_size > i_size:
# landscape
offset = (j_size - i_size) / 2
return frame[:, offset:offset + i_size, :]
else:
# portrait
offset = (i_size - j_size) / 2
return frame[offset:offset + j_size, :, :]
def cv2_imshow_rgb(window_name, img):
# Convert native OpenCV BGR -> RGB before displaying
cv2.imshow(window_name, cv2.cvtColor(img, cv2.COLOR_BGR2RGB))
def caffe_load_image(filename, color=True, as_uint=False):
'''
Copied from Caffe to simplify potential import problems.
Load an image converting from grayscale or alpha as needed.
Take
filename: string
color: flag for color format. True (default) loads as RGB while False
loads as intensity (if image is already grayscale).
Give
image: an image with type float32 in range [0, 1]
of size (H x W x 3) in RGB or
of size (H x W x 1) in grayscale.
'''
with WithTimer('imread', quiet=True):
if as_uint:
img = skimage.io.imread(filename)
else:
img = skimage.img_as_float(skimage.io.imread(filename)).astype(float32)
if img.ndim == 2:
img = img[:, :, newaxis]
if color:
img = tile(img, (1, 1, 3))
elif img.shape[2] == 4:
img = img[:, :, :3]
return img
def get_tiles_height_width(n_tiles, desired_width=None):
'''Get a height x width size that will fit n_tiles tiles.'''
if desired_width == None:
# square
width = int(ceil(sqrt(n_tiles)))
height = width
else:
assert isinstance(desired_width, int)
width = desired_width
height = int(ceil(float(n_tiles) / width))
return height, width
def get_tiles_height_width_ratio(n_tiles, width_ratio=1.0):
'''Get a height x width size that will fit n_tiles tiles.'''
width = int(ceil(sqrt(n_tiles * width_ratio)))
return get_tiles_height_width(n_tiles, desired_width=width)
def tile_images_normalize(data, c01=False, boost_indiv=0.0, boost_gamma=1.0, single_tile=False, scale_range=1.0,
neg_pos_colors=None):
data = data.copy()
if single_tile:
# promote 2D image -> 3D batch (01 -> b01) or 3D image -> 4D batch (01c -> b01c OR c01 -> bc01)
data = data[newaxis]
if c01:
# Convert bc01 -> b01c
assert len(data.shape) == 4, 'expected bc01 data'
data = data.transpose(0, 2, 3, 1)
if neg_pos_colors:
neg_clr, pos_clr = neg_pos_colors
neg_clr = array(neg_clr).reshape((1, 3))
pos_clr = array(pos_clr).reshape((1, 3))
# Keep 0 at 0
data /= max(data.max(), -data.min()) + 1e-10 # Map data to [-1, 1]
# data += .5 * scale_range # now in [0, scale_range]
# assert data.min() >= 0
# assert data.max() <= scale_range
if len(data.shape) == 3:
data = data.reshape(data.shape + (1,))
assert data.shape[3] == 1, 'neg_pos_color only makes sense if color data is not provided (channels should be 1)'
data = dot((data > 0) * data, pos_clr) + dot((data < 0) * -data, neg_clr)
data -= data.min()
data *= scale_range / (data.max() + 1e-10)
# sqrt-scale (0->0, .1->.3, 1->1)
assert boost_indiv >= 0 and boost_indiv <= 1, 'boost_indiv out of range'
# print 'using boost_indiv:', boost_indiv
if boost_indiv > 0:
if len(data.shape) == 4:
mm = (data.max(-1).max(-1).max(-1) + 1e-10) ** -boost_indiv
else:
mm = (data.max(-1).max(-1) + 1e-10) ** -boost_indiv
data = (data.T * mm).T
if boost_gamma != 1.0:
data = data ** boost_gamma
# Promote single-channel data to 3 channel color
if len(data.shape) == 3:
# b01 -> b01c
data = tile(data[:, :, :, newaxis], 3)
return data
def tile_images_make_tiles(data, padsize=1, padval=0, hw=None, highlights=None):
if hw:
height, width = hw
else:
height, width = get_tiles_height_width(data.shape[0])
assert height * width >= data.shape[0], '{} rows x {} columns cannot fit {} tiles'.format(height, width,
data.shape[0])
# First iteration: one-way padding, no highlights
# padding = ((0, width*height - data.shape[0]), (0, padsize), (0, padsize)) + ((0, 0),) * (data.ndim - 3)
# data = pad(data, padding, mode='constant', constant_values=(padval, padval))
# Second iteration: padding with highlights
# padding = ((0, width*height - data.shape[0]), (padsize, padsize), (padsize, padsize)) + ((0, 0),) * (data.ndim - 3)
# print 'tile_images: data min,max =', data.min(), data.max()
# padder = SmartPadder()
##data = pad(data, padding, mode=jy_pad_fn)
# data = pad(data, padding, mode=padder.pad_function)
# print 'padder.calls =', padder.calls
# Third iteration: two-way padding with highlights
if highlights is not None:
assert len(highlights) == data.shape[0]
padding = ((0, width * height - data.shape[0]), (padsize, padsize), (padsize, padsize)) + ((0, 0),) * (
data.ndim - 3)
# First pad with constant vals
try:
len(padval)
except:
padval = tuple((padval,))
assert len(padval) in (1, 3), 'padval should be grayscale (len 1) or color (len 3)'
if len(padval) == 1:
data = pad(data, padding, mode='constant', constant_values=(padval, padval))
else:
data = pad(data, padding, mode='constant', constant_values=(0, 0))
for cc in (0, 1, 2):
# Replace 0s with proper color in each channel
data[:padding[0][0], :, :, cc] = padval[cc]
if padding[0][1] > 0:
data[-padding[0][1]:, :, :, cc] = padval[cc]
data[:, :padding[1][0], :, cc] = padval[cc]
if padding[1][1] > 0:
data[:, -padding[1][1]:, :, cc] = padval[cc]
data[:, :, :padding[2][0], cc] = padval[cc]
if padding[2][1] > 0:
data[:, :, -padding[2][1]:, cc] = padval[cc]
if highlights is not None:
# Then highlight if necessary
for ii, highlight in enumerate(highlights):
if highlight is not None:
data[ii, :padding[1][0], :, :] = highlight
if padding[1][1] > 0:
data[ii, -padding[1][1]:, :, :] = highlight
data[ii, :, :padding[2][0], :] = highlight
if padding[2][1] > 0:
data[ii, :, -padding[2][1]:, :] = highlight
# tile the filters into an image
data = data.reshape((height, width) + data.shape[1:]).transpose((0, 2, 1, 3) + tuple(range(4, data.ndim + 1)))
data = data.reshape((height * data.shape[1], width * data.shape[3]) + data.shape[4:])
data = data[0:-padsize, 0:-padsize] # remove excess padding
return (height, width), data
def to_255(vals_01):
'''Convert vals in [0,1] to [0,255]'''
try:
ret = [v * 255 for v in vals_01]
if type(vals_01) is tuple:
return tuple(ret)
else:
return ret
except TypeError:
# Not iterable (single int or float)
return vals_01 * 255
def ensure_uint255_and_resize_to_fit(img, out_max_shape,
shrink_interpolation=cv2.INTER_LINEAR,
grow_interpolation=cv2.INTER_NEAREST):
as_uint255 = ensure_uint255(img)
return resize_to_fit(as_uint255, out_max_shape,
dtype_out='uint8',
shrink_interpolation=shrink_interpolation,
grow_interpolation=grow_interpolation)
def ensure_uint255(arr):
'''If data is float, multiply by 255 and convert to uint8. Else leave as uint8.'''
if arr.dtype == 'uint8':
return arr
elif arr.dtype in ('float32', 'float64'):
# print 'extra check...'
# assert arr.max() <= 1.1
return array(arr * 255, dtype='uint8')
else:
raise Exception('ensure_uint255 expects uint8 or float input but got %s with range [%g,%g,].' % (
arr.dtype, arr.min(), arr.max()))
def ensure_float01(arr, dtype_preference='float32'):
'''If data is uint, convert to float and divide by 255. Else leave at float.'''
if arr.dtype == 'uint8':
# print 'extra check...'
# assert arr.max() <= 256
return array(arr, dtype=dtype_preference) / 255
elif arr.dtype in ('float32', 'float64'):
return arr
else:
raise Exception('ensure_float01 expects uint8 or float input but got %s with range [%g,%g,].' % (
arr.dtype, arr.min(), arr.max()))
def resize_to_fit(img, out_max_shape,
dtype_out=None,
shrink_interpolation=cv2.INTER_LINEAR,
grow_interpolation=cv2.INTER_NEAREST):
'''Resizes to fit within out_max_shape. If ratio is different,
returns an image that fits but is smaller along one of the two
dimensions.
If one of the out_max_shape dimensions is None, then use only the other dimension to perform resizing.
Timing info on MBP Retina with OpenBlas:
- conclusion: uint8 is always tied or faster. float64 is slower.
Scaling down:
In [79]: timeit.Timer('resize_to_fit(aa, (200,200))', setup='from kerasvis.app import resize_to_fit; import numpy as np; aa = array(np.random.uniform(0,255,(1000,1000,3)), dtype="uint8")').timeit(100)
Out[79]: 0.04950380325317383
In [77]: timeit.Timer('resize_to_fit(aa, (200,200))', setup='from kerasvis.app import resize_to_fit; import numpy as np; aa = array(np.random.uniform(0,255,(1000,1000,3)), dtype="float32")').timeit(100)
Out[77]: 0.049156904220581055
In [76]: timeit.Timer('resize_to_fit(aa, (200,200))', setup='from kerasvis.app import resize_to_fit; import numpy as np; aa = array(np.random.uniform(0,255,(1000,1000,3)), dtype="float64")').timeit(100)
Out[76]: 0.11808204650878906
Scaling up:
In [68]: timeit.Timer('resize_to_fit(aa, (2000,2000))', setup='from kerasvis.app import resize_to_fit; import numpy as np; aa = array(np.random.uniform(0,255,(1000,1000,3)), dtype="uint8")').timeit(100)
Out[68]: 0.4357950687408447
In [70]: timeit.Timer('resize_to_fit(aa, (2000,2000))', setup='from kerasvis.app import resize_to_fit; import numpy as np; aa = array(np.random.uniform(0,255,(1000,1000,3)), dtype="float32")').timeit(100)
Out[70]: 1.3411099910736084
In [73]: timeit.Timer('resize_to_fit(aa, (2000,2000))', setup='from kerasvis.app import resize_to_fit; import numpy as np; aa = array(np.random.uniform(0,255,(1000,1000,3)), dtype="float64")').timeit(100)
Out[73]: 2.6078310012817383
'''
if dtype_out is not None and img.dtype != dtype_out:
dtype_in_size = img.dtype.itemsize
dtype_out_size = dtype(dtype_out).itemsize
convert_early = (dtype_out_size < dtype_in_size)
convert_late = not convert_early
else:
convert_early = False
convert_late = False
if img.shape[0] == 0 and img.shape[1] == 0:
scale = 1
elif out_max_shape[0] is None or img.shape[0] == 0:
scale = float(out_max_shape[1]) / img.shape[1]
elif out_max_shape[1] is None or img.shape[1] == 0:
scale = float(out_max_shape[0]) / img.shape[0]
else:
scale = min(float(out_max_shape[0]) / img.shape[0],
float(out_max_shape[1]) / img.shape[1])
if convert_early:
img = array(img, dtype=dtype_out)
out = cv2.resize(img,
(int(img.shape[1] * scale), int(img.shape[0] * scale)), # in (c,r) order
interpolation=grow_interpolation if scale > 1 else shrink_interpolation)
if convert_late:
out = array(out, dtype=dtype_out)
return out
class FormattedString(object):
def __init__(self, string, defaults, face=None, fsize=None, clr=None, thick=None, align=None, width=None):
self.string = string
self.face = face if face else defaults['face']
self.fsize = fsize if fsize else defaults['fsize']
self.clr = clr if clr else defaults['clr']
self.thick = thick if thick else defaults['thick']
self.width = width # if None: calculate width automatically
self.align = align if align else defaults.get('align', 'left')
def cv2_typeset_text(data, lines, loc, between=' ', string_spacing=0, line_spacing=0, wrap=False):
'''Typesets mutliple strings on multiple lines of text, where each string may have its own formatting.
Given:
data: as in cv2.putText
loc: as in cv2.putText
lines: list of lists of FormattedString objects, may be modified by this function!
between: what to insert between each string on each line, ala str.join
string_spacing: extra spacing to insert between strings on a line
line_spacing: extra spacing to insert between lines
wrap: if true, wraps words to next line
Returns:
locy: new y location = loc[1] + y-offset resulting from lines of text
'''
data_width = data.shape[1]
# lines_modified = False
# lines = lines_in # will be deepcopied if modification is needed later
if isinstance(lines, FormattedString):
lines = [lines]
assert isinstance(lines,
list), 'lines must be a list of lines or list of FormattedString objects or a single FormattedString object'
if len(lines) == 0:
return loc[1]
if not isinstance(lines[0], list):
# If a single line of text is given as a list of strings, convert to multiline format
lines = [lines]
locy = loc[1]
line_num = 0
while line_num < len(lines):
line = lines[line_num]
maxy = 0
locx = loc[0]
for ii, fs in enumerate(line):
last_on_line = (ii == len(line) - 1)
if not last_on_line:
fs.string += between
boxsize, _ = cv2.getTextSize(fs.string, fs.face, fs.fsize, fs.thick)
if fs.width is not None:
if fs.align == 'right':
locx += fs.width - boxsize[0]
elif fs.align == 'center':
locx += (fs.width - boxsize[0]) / 2
# print 'right boundary is', locx + boxsize[0], '(%s)' % fs.string
# print 'HERE'
right_edge = locx + boxsize[0]
if wrap and ii > 0 and right_edge > data_width:
# Wrap rest of line to the next line
# if not lines_modified:
# lines = deepcopy(lines_in)
# lines_modified = True
new_this_line = line[:ii]
new_next_line = line[ii:]
lines[line_num] = new_this_line
lines.insert(line_num + 1, new_next_line)
break
###line_num += 1
###continue
cv2.putText(data, fs.string, (locx, locy), fs.face, fs.fsize, fs.clr, fs.thick)
maxy = max(maxy, boxsize[1])
if fs.width is not None:
if fs.align == 'right':
locx += boxsize[0]
elif fs.align == 'left':
locx += fs.width
elif fs.align == 'center':
locx += fs.width - (fs.width - boxsize[0]) / 2
else:
locx += boxsize[0]
locx += string_spacing
line_num += 1
locy += maxy + line_spacing
return locy
def saveimage(filename, im):
'''Saves an image with pixel values in [0,1]'''
# matplotlib.image.imsave(filename, im)
if len(im.shape) == 3:
# Reverse RGB to OpenCV BGR order for color images
cv2.imwrite(filename, 255 * im[:, :, ::-1])
else:
cv2.imwrite(filename, 255 * im)
def saveimagesc(filename, im):
saveimage(filename, norm01(im))
def saveimagescc(filename, im, center):
saveimage(filename, norm01c(im, center))
| [((1017, 1036), 'numpy.array', 'array', (['arr', '"""uint8"""'], {}), "(arr, 'uint8')\n", (1022, 1036), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((1633, 1655), 'matplotlib.figure.Figure', 'Figure', ([], {'figsize': '(5, 5)'}), '(figsize=(5, 5))\n', (1639, 1655), False, 'from matplotlib.figure import Figure\n'), ((1669, 1686), 'matplotlib.backends.backend_agg.FigureCanvasAgg', 'FigureCanvas', (['fig'], {}), '(fig)\n', (1681, 1686), True, 'from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas\n'), ((1941, 1953), 'numpy.arange', 'arange', (['s', 'e'], {}), '(s, e)\n', (1947, 1953), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((3556, 3589), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': 'shape'}), '(1, 1, figsize=shape)\n', (3568, 3589), True, 'import matplotlib.pyplot as plt\n'), ((3603, 3620), 'matplotlib.backends.backend_agg.FigureCanvasAgg', 'FigureCanvas', (['fig'], {}), '(fig)\n', (3615, 3620), True, 'from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas\n'), ((6141, 6162), 'matplotlib.pyplot.show', 'plt.show', ([], {'block': '(False)'}), '(block=False)\n', (6149, 6162), True, 'import matplotlib.pyplot as plt\n'), ((6985, 6994), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (6992, 6994), True, 'import matplotlib.pyplot as plt\n'), ((6999, 7010), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (7008, 7010), True, 'import matplotlib.pyplot as plt\n'), ((7022, 7032), 'numpy.array', 'array', (['res'], {}), '(res)\n', (7027, 7032), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((7122, 7156), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': '(4, 4)'}), '(1, 1, figsize=(4, 4))\n', (7134, 7156), True, 'import matplotlib.pyplot as plt\n'), ((7170, 7187), 'matplotlib.backends.backend_agg.FigureCanvasAgg', 'FigureCanvas', (['fig'], {}), '(fig)\n', (7182, 7187), True, 'from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas\n'), ((7637, 7646), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (7644, 7646), True, 'import matplotlib.pyplot as plt\n'), ((7651, 7662), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (7660, 7662), True, 'import matplotlib.pyplot as plt\n'), ((8267, 8300), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': 'shape'}), '(1, 1, figsize=shape)\n', (8279, 8300), True, 'import matplotlib.pyplot as plt\n'), ((8314, 8331), 'matplotlib.backends.backend_agg.FigureCanvasAgg', 'FigureCanvas', (['fig'], {}), '(fig)\n', (8326, 8331), True, 'from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas\n'), ((9520, 9529), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (9527, 9529), True, 'import matplotlib.pyplot as plt\n'), ((9534, 9545), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (9543, 9545), True, 'import matplotlib.pyplot as plt\n'), ((9557, 9567), 'numpy.array', 'array', (['res'], {}), '(res)\n', (9562, 9567), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((9928, 9961), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': 'shape'}), '(1, 1, figsize=shape)\n', (9940, 9961), True, 'import matplotlib.pyplot as plt\n'), ((9975, 9992), 'matplotlib.backends.backend_agg.FigureCanvasAgg', 'FigureCanvas', (['fig'], {}), '(fig)\n', (9987, 9992), True, 'from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas\n'), ((10298, 10365), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'left': '(0.185)', 'bottom': '(0.125)', 'right': '(0.98)', 'top': '(0.98)'}), '(left=0.185, bottom=0.125, right=0.98, top=0.98)\n', (10317, 10365), True, 'import matplotlib.pyplot as plt\n'), ((11525, 11535), 'numpy.array', 'array', (['res'], {}), '(res)\n', (11530, 11535), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((11923, 11944), 'matplotlib.figure.Figure', 'Figure', ([], {'figsize': 'shape'}), '(figsize=shape)\n', (11929, 11944), False, 'from matplotlib.figure import Figure\n'), ((11958, 11975), 'matplotlib.backends.backend_agg.FigureCanvasAgg', 'FigureCanvas', (['fig'], {}), '(fig)\n', (11970, 11975), True, 'from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas\n'), ((14352, 14372), 'cv2.imread', 'cv2.imread', (['filename'], {}), '(filename)\n', (14362, 14372), False, 'import cv2\n'), ((14636, 14671), 'cv2.cvtColor', 'cv2.cvtColor', (['im', 'cv2.COLOR_BGR2RGB'], {}), '(im, cv2.COLOR_BGR2RGB)\n', (14648, 14671), False, 'import cv2\n'), ((1146, 1172), 'cv2.imwrite', 'cv2.imwrite', (['saveto', 'frame'], {}), '(saveto, frame)\n', (1157, 1172), False, 'import cv2\n'), ((1338, 1360), 'numpy.tile', 'tile', (['frame', '(1, 1, 3)'], {}), '(frame, (1, 1, 3))\n', (1342, 1360), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((1746, 1771), 'numpy.expand_dims', 'expand_dims', (['data'], {'axis': '(1)'}), '(data, axis=1)\n', (1757, 1771), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((4002, 4039), 'numpy.zeros', 'zeros', (['(data.shape[1], data.shape[2])'], {}), '((data.shape[1], data.shape[2]))\n', (4007, 4039), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((5770, 5793), 'mpl_toolkits.axes_grid1.make_axes_locatable', 'make_axes_locatable', (['ax'], {}), '(ax)\n', (5789, 5793), False, 'from mpl_toolkits.axes_grid1 import make_axes_locatable\n'), ((7196, 7217), 'numpy.arange', 'arange', (['(0)', 'y.shape[0]'], {}), '(0, y.shape[0])\n', (7202, 7217), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((7962, 7983), 'numpy.arange', 'arange', (['(0)', 'y.shape[1]'], {}), '(0, y.shape[1])\n', (7968, 7983), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((8783, 8799), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (8792, 8799), True, 'import matplotlib.pyplot as plt\n'), ((9858, 9884), 'numpy.ceil', 'ceil', (['(shape[1] / 80 / cols)'], {}), '(shape[1] / 80 / cols)\n', (9862, 9884), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((9886, 9912), 'numpy.ceil', 'ceil', (['(shape[0] / 80 / rows)'], {}), '(shape[0] / 80 / rows)\n', (9890, 9912), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((11871, 11890), 'numpy.ceil', 'ceil', (['(shape[1] / 80)'], {}), '(shape[1] / 80)\n', (11875, 11890), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((11892, 11911), 'numpy.ceil', 'ceil', (['(shape[0] / 80)'], {}), '(shape[0] / 80)\n', (11896, 11911), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((14523, 14542), 'numpy.tile', 'tile', (['im', '(1, 1, 3)'], {}), '(im, (1, 1, 3))\n', (14527, 14542), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((15167, 15203), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2RGB'], {}), '(img, cv2.COLOR_BGR2RGB)\n', (15179, 15203), False, 'import cv2\n'), ((15726, 15757), 'misc.WithTimer', 'WithTimer', (['"""imread"""'], {'quiet': '(True)'}), "('imread', quiet=True)\n", (15735, 15757), False, 'from misc import WithTimer\n'), ((18568, 18601), 'numpy.tile', 'tile', (['data[:, :, :, (newaxis)]', '(3)'], {}), '(data[:, :, :, (newaxis)], 3)\n', (18572, 18601), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((20200, 20269), 'numpy.pad', 'pad', (['data', 'padding'], {'mode': '"""constant"""', 'constant_values': '(padval, padval)'}), "(data, padding, mode='constant', constant_values=(padval, padval))\n", (20203, 20269), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((20295, 20354), 'numpy.pad', 'pad', (['data', 'padding'], {'mode': '"""constant"""', 'constant_values': '(0, 0)'}), "(data, padding, mode='constant', constant_values=(0, 0))\n", (20298, 20354), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((26336, 26363), 'numpy.array', 'array', (['img'], {'dtype': 'dtype_out'}), '(img, dtype=dtype_out)\n', (26341, 26363), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((26615, 26642), 'numpy.array', 'array', (['out'], {'dtype': 'dtype_out'}), '(out, dtype=dtype_out)\n', (26620, 26642), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((30594, 30637), 'cv2.imwrite', 'cv2.imwrite', (['filename', '(255 * im[:, :, ::-1])'], {}), '(filename, 255 * im[:, :, ::-1])\n', (30605, 30637), False, 'import cv2\n'), ((30656, 30687), 'cv2.imwrite', 'cv2.imwrite', (['filename', '(255 * im)'], {}), '(filename, 255 * im)\n', (30667, 30687), False, 'import cv2\n'), ((1862, 1891), 'numpy.linspace', 'linspace', (['(0)', '(1)', 'data.shape[1]'], {}), '(0, 1, data.shape[1])\n', (1870, 1891), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((3477, 3503), 'numpy.ceil', 'ceil', (['(shape[1] / 80 / cols)'], {}), '(shape[1] / 80 / cols)\n', (3481, 3503), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((3513, 3539), 'numpy.ceil', 'ceil', (['(shape[0] / 80 / rows)'], {}), '(shape[0] / 80 / rows)\n', (3517, 3539), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((4840, 4856), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (4849, 4856), True, 'import matplotlib.pyplot as plt\n'), ((8188, 8214), 'numpy.ceil', 'ceil', (['(shape[1] / 80 / cols)'], {}), '(shape[1] / 80 / cols)\n', (8192, 8214), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((8224, 8250), 'numpy.ceil', 'ceil', (['(shape[0] / 80 / rows)'], {}), '(shape[0] / 80 / rows)\n', (8228, 8250), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((11062, 11078), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (11071, 11078), True, 'import matplotlib.pyplot as plt\n'), ((15797, 15824), 'skimage.io.imread', 'skimage.io.imread', (['filename'], {}), '(filename)\n', (15814, 15824), False, 'import skimage\n'), ((16014, 16034), 'numpy.tile', 'tile', (['img', '(1, 1, 3)'], {}), '(img, (1, 1, 3))\n', (16018, 16034), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((16651, 16678), 'numpy.sqrt', 'sqrt', (['(n_tiles * width_ratio)'], {}), '(n_tiles * width_ratio)\n', (16655, 16678), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((17848, 17879), 'numpy.dot', 'dot', (['((data > 0) * data)', 'pos_clr'], {}), '((data > 0) * data, pos_clr)\n', (17851, 17879), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((17882, 17914), 'numpy.dot', 'dot', (['((data < 0) * -data)', 'neg_clr'], {}), '((data < 0) * -data, neg_clr)\n', (17885, 17914), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((22799, 22830), 'numpy.array', 'array', (['(arr * 255)'], {'dtype': '"""uint8"""'}), "(arr * 255, dtype='uint8')\n", (22804, 22830), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((23243, 23277), 'numpy.array', 'array', (['arr'], {'dtype': 'dtype_preference'}), '(arr, dtype=dtype_preference)\n', (23248, 23277), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((25687, 25703), 'numpy.dtype', 'dtype', (['dtype_out'], {}), '(dtype_out)\n', (25692, 25703), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((28793, 28848), 'cv2.getTextSize', 'cv2.getTextSize', (['fs.string', 'fs.face', 'fs.fsize', 'fs.thick'], {}), '(fs.string, fs.face, fs.fsize, fs.thick)\n', (28808, 28848), False, 'import cv2\n'), ((29785, 29864), 'cv2.putText', 'cv2.putText', (['data', 'fs.string', '(locx, locy)', 'fs.face', 'fs.fsize', 'fs.clr', 'fs.thick'], {}), '(data, fs.string, (locx, locy), fs.face, fs.fsize, fs.clr, fs.thick)\n', (29796, 29864), False, 'import cv2\n'), ((5068, 5104), 'numpy.arange', 'arange', (['(0)', 'x_axis_values.shape[0]', '(3)'], {}), '(0, x_axis_values.shape[0], 3)\n', (5074, 5104), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((5318, 5354), 'numpy.arange', 'arange', (['(0)', 'y_axis_values.shape[0]', '(3)'], {}), '(0, y_axis_values.shape[0], 3)\n', (5324, 5354), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((16302, 16315), 'numpy.sqrt', 'sqrt', (['n_tiles'], {}), '(n_tiles)\n', (16306, 16315), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((17311, 17325), 'numpy.array', 'array', (['neg_clr'], {}), '(neg_clr)\n', (17316, 17325), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((17360, 17374), 'numpy.array', 'array', (['pos_clr'], {}), '(pos_clr)\n', (17365, 17374), False, 'from numpy import arange, array, newaxis, tile, linspace, pad, expand_dims, fromstring, ceil, dtype, float32, sqrt, dot, zeros\n'), ((15878, 15905), 'skimage.io.imread', 'skimage.io.imread', (['filename'], {}), '(filename)\n', (15895, 15905), False, 'import skimage\n')] |
Kedyn/PingPong | text.py | 47e39a9d30e1a3a7b828c5b5e85b0666d67b0d7b | import pygame.font
import copy
class Text:
"""Draws a text to the screen."""
def __init__(self, rect, size, color, screen, text):
self.screen = screen
self.rect = copy.deepcopy(rect)
self.text = text
self.color = color
self.font = pygame.font.SysFont(None, size)
self.text_image = None
self.text_image_rect = None
self.prep_img()
def prep_img(self):
"""Turn msg into a rendered image, and center text on the button."""
self.text_image = self.font.render(self.text, True,
self.color)
self.text_image_rect = self.text_image.get_rect()
self.text_image_rect.center = self.rect.center
def render(self):
self.screen.blit(self.text_image, self.text_image_rect)
| [((190, 209), 'copy.deepcopy', 'copy.deepcopy', (['rect'], {}), '(rect)\n', (203, 209), False, 'import copy\n')] |
ey-advisory-technology-testing/selenium | py/test/selenium/webdriver/common/window_tests.py | 7e342d3b8eb913a9626475a158c4bc6ae5d68315 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import pytest
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.support.wait import WebDriverWait
# @pytest.mark.xfail_ie
# @pytest.mark.xfail_chromiumedge(reason="Fails on Travis")
# @pytest.mark.xfail_firefox(reason="Fails on Travis")
# @pytest.mark.xfail_remote(reason="Fails on Travis")
# def testShouldMaximizeTheWindow(driver):
# resize_timeout = 5
# wait = WebDriverWait(driver, resize_timeout)
# old_size = driver.get_window_size()
# driver.set_window_size(200, 200)
# wait.until(
# lambda dr: dr.get_window_size() != old_size if old_size["width"] != 200 and old_size["height"] != 200 else True)
# size = driver.get_window_size()
# driver.maximize_window()
# wait.until(lambda dr: dr.get_window_size() != size)
# new_size = driver.get_window_size()
# assert new_size["width"] > size["width"]
# assert new_size["height"] > size["height"]
def test_should_get_the_size_of_the_current_window(driver):
size = driver.get_window_size()
assert size.get('width') > 0
assert size.get('height') > 0
def test_should_set_the_size_of_the_current_window(driver):
size = driver.get_window_size()
target_width = size.get('width') - 20
target_height = size.get('height') - 20
driver.set_window_size(width=target_width, height=target_height)
new_size = driver.get_window_size()
assert new_size.get('width') == target_width
assert new_size.get('height') == target_height
def test_should_get_the_position_of_the_current_window(driver):
position = driver.get_window_position()
assert position.get('x') >= 0
assert position.get('y') >= 0
def test_should_set_the_position_of_the_current_window(driver):
position = driver.get_window_position()
target_x = position.get('x') + 10
target_y = position.get('y') + 10
driver.set_window_position(x=target_x, y=target_y)
WebDriverWait(driver, 2)\
.until(lambda d: d.get_window_position()['x'] != position['x'] and d.get_window_position()['y'] != position['y'])
new_position = driver.get_window_position()
assert new_position.get('x') == target_x
assert new_position.get('y') == target_y
@pytest.mark.xfail_safari(raises=WebDriverException,
reason='Get Window Rect command not implemented')
def test_should_get_the_rect_of_the_current_window(driver):
rect = driver.get_window_rect()
assert rect.get('x') >= 0
assert rect.get('y') >= 0
assert rect.get('width') >= 0
assert rect.get('height') >= 0
@pytest.mark.xfail_safari(raises=WebDriverException,
reason='Get Window Rect command not implemented')
def test_should_set_the_rect_of_the_current_window(driver):
rect = driver.get_window_rect()
target_x = rect.get('x') + 10
target_y = rect.get('y') + 10
target_width = rect.get('width') + 10
target_height = rect.get('height') + 10
driver.set_window_rect(x=target_x, y=target_y, width=target_width, height=target_height)
WebDriverWait(driver, 2)\
.until(lambda d: d.get_window_position()['x'] != rect['x'] and d.get_window_position()['y'] != rect['y'])
new_rect = driver.get_window_rect()
assert new_rect.get('x') == target_x
assert new_rect.get('y') == target_y
assert new_rect.get('width') == target_width
assert new_rect.get('height') == target_height
# @pytest.mark.xfail_safari(raises=WebDriverException,
# reason='Fullscreen command not implemented')
# @pytest.mark.skipif(os.environ.get('TRAVIS') == 'true',
# reason='Fullscreen command causes Travis to hang')
# @pytest.mark.no_driver_after_test
# def test_should_fullscreen_the_current_window(driver):
# start_width = driver.execute_script('return window.innerWidth;')
# start_height = driver.execute_script('return window.innerHeight;')
# driver.fullscreen_window()
# WebDriverWait(driver, 2)\
# .until(lambda d: driver.execute_script('return window.innerWidth;') > start_width)
# end_width = driver.execute_script('return window.innerWidth;')
# end_height = driver.execute_script('return window.innerHeight;')
# driver.quit() # Kill driver so we aren't running fullscreen after
# assert end_width > start_width
# assert end_height > start_height
# @pytest.mark.xfail_safari(raises=WebDriverException,
# reason='Minimize command not implemented')
# @pytest.mark.skipif(os.environ.get('TRAVIS') == 'true',
# reason='Minimize command causes Travis to hang')
# @pytest.mark.no_driver_after_test
# def test_should_minimize_the_current_window(driver):
# driver.minimize_window()
# minimized = driver.execute_script('return document.hidden;')
# driver.quit() # Kill driver so we aren't running minimized after
# assert minimized is True
| [((3004, 3110), 'pytest.mark.xfail_safari', 'pytest.mark.xfail_safari', ([], {'raises': 'WebDriverException', 'reason': '"""Get Window Rect command not implemented"""'}), "(raises=WebDriverException, reason=\n 'Get Window Rect command not implemented')\n", (3028, 3110), False, 'import pytest\n'), ((3360, 3466), 'pytest.mark.xfail_safari', 'pytest.mark.xfail_safari', ([], {'raises': 'WebDriverException', 'reason': '"""Get Window Rect command not implemented"""'}), "(raises=WebDriverException, reason=\n 'Get Window Rect command not implemented')\n", (3384, 3466), False, 'import pytest\n'), ((2714, 2738), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['driver', '(2)'], {}), '(driver, 2)\n', (2727, 2738), False, 'from selenium.webdriver.support.wait import WebDriverWait\n'), ((3838, 3862), 'selenium.webdriver.support.wait.WebDriverWait', 'WebDriverWait', (['driver', '(2)'], {}), '(driver, 2)\n', (3851, 3862), False, 'from selenium.webdriver.support.wait import WebDriverWait\n')] |
mayuri-dhote/psydac | psydac/cad/geometry.py | 01ddbe2d049a599684c45060912d01c2658160a3 | # coding: utf-8
#
# a Geometry class contains the list of patches and additional information about
# the topology i.e. connectivity, boundaries
# For the moment, it is used as a container, that can be loaded from a file
# (hdf5)
from itertools import product
from collections import abc
import numpy as np
import string
import random
import h5py
import yaml
import os
import string
import random
from mpi4py import MPI
from psydac.fem.splines import SplineSpace
from psydac.fem.tensor import TensorFemSpace
from psydac.mapping.discrete import SplineMapping, NurbsMapping
from sympde.topology import Domain, Line, Square, Cube, NCubeInterior
from sympde.topology.basic import Union
#==============================================================================
class Geometry( object ):
_ldim = None
_pdim = None
_patches = []
_topology = None
#--------------------------------------------------------------------------
# Option [1]: from a (domain, mappings) or a file
#--------------------------------------------------------------------------
def __init__( self, domain=None, mappings=None,
filename=None, comm=MPI.COMM_WORLD ):
# ... read the geometry if the filename is given
if not( filename is None ):
self.read(filename, comm=comm)
elif not( domain is None ):
assert( isinstance( domain, Domain ) )
assert( not( mappings is None ))
assert isinstance( mappings, dict)
# ... check sanity
interior_names = sorted(domain.interior_names)
mappings_keys = sorted(list(mappings.keys()))
assert( interior_names == mappings_keys )
# ...
self._domain = domain
self._ldim = domain.dim
self._pdim = domain.dim # TODO must be given => only dim is defined for a Domain
self._mappings = mappings
else:
raise ValueError('Wrong input')
# ...
self._comm = comm
#--------------------------------------------------------------------------
# Option [2]: from a discrete mapping
#--------------------------------------------------------------------------
@classmethod
def from_discrete_mapping( cls, mapping, comm=None ):
"""Create a geometry from one discrete mapping."""
if mapping.ldim in [1]:
raise NotImplementedError('')
if mapping.ldim == 2:
domain = Square(name='Omega')
mappings = {'Omega': mapping}
return Geometry(domain=domain, mappings=mappings, comm=comm)
elif mapping.ldim == 3:
domain = Cube(name='Omega')
mappings = {'Omega': mapping}
return Geometry(domain=domain, mappings=mappings, comm=comm)
#--------------------------------------------------------------------------
# Option [3]: discrete topological line/square/cube
#--------------------------------------------------------------------------
@classmethod
def from_topological_domain(cls, domain, ncells, comm=None):
interior = domain.interior
if not isinstance(interior, Union):
interior = [interior]
for itr in interior:
if not isinstance(itr, NCubeInterior):
msg = "Topological domain must be an NCube;"\
" got {} instead.".format(type(itr))
raise TypeError(msg)
mappings = {itr.name: None for itr in interior}
geo = Geometry(domain=domain, mappings=mappings, comm=comm)
geo.ncells = ncells
return geo
#--------------------------------------------------------------------------
@property
def ldim(self):
return self._ldim
@property
def pdim(self):
return self._pdim
@property
def comm(self):
return self._comm
@property
def domain(self):
return self._domain
@property
def mappings(self):
return self._mappings
def __len__(self):
return len(self.domain)
def read( self, filename, comm=MPI.COMM_WORLD ):
# ... check extension of the file
basename, ext = os.path.splitext(filename)
if not(ext == '.h5'):
raise ValueError('> Only h5 files are supported')
# ...
# read the topological domain
domain = Domain.from_file(filename)
if not(comm is None):
kwargs = dict( driver='mpio', comm=comm ) if comm.size > 1 else {}
else:
kwargs = {}
h5 = h5py.File( filename, mode='r', **kwargs )
yml = yaml.load( h5['geometry.yml'][()], Loader=yaml.SafeLoader )
ldim = yml['ldim']
pdim = yml['pdim']
n_patches = len( yml['patches'] )
# ...
if n_patches == 0:
h5.close()
raise ValueError( "Input file contains no patches." )
# ...
# ... read patchs
mappings = {}
for i_patch in range( n_patches ):
item = yml['patches'][i_patch]
patch_name = item['name']
mapping_id = item['mapping_id']
dtype = item['type']
patch = h5[mapping_id]
if dtype in ['SplineMapping', 'NurbsMapping']:
degree = [int (p) for p in patch.attrs['degree' ]]
periodic = [bool(b) for b in patch.attrs['periodic']]
knots = [patch['knots_{}'.format(d)][:] for d in range( ldim )]
spaces = [SplineSpace( degree=p, knots=k, periodic=b )
for p,k,b in zip( degree, knots, periodic )]
tensor_space = TensorFemSpace( *spaces, comm=comm )
if dtype == 'SplineMapping':
mapping = SplineMapping.from_control_points( tensor_space,
patch['points'][..., :pdim] )
elif dtype == 'NurbsMapping':
mapping = NurbsMapping.from_control_points_weights( tensor_space,
patch['points'][..., :pdim],
patch['weights'] )
mapping.set_name( item['name'] )
mappings[patch_name] = mapping
# ...
# ... close the h5 file
h5.close()
# ...
# ...
self._ldim = ldim
self._pdim = pdim
self._mappings = mappings
self._domain = domain
# ...
def export( self, filename ):
"""
Parameters
----------
filename : str
Name of HDF5 output file.
"""
# ...
comm = self.comm
# ...
# Create dictionary with geometry metadata
yml = {}
yml['ldim'] = self.ldim
yml['pdim'] = self.pdim
# ... information about the patches
if not( self.mappings ):
raise ValueError('No mappings were found')
patches_info = []
i_mapping = 0
for patch_name, mapping in self.mappings.items():
name = '{}'.format( patch_name )
mapping_id = 'mapping_{}'.format( i_mapping )
dtype = '{}'.format( type( mapping ).__name__ )
patches_info += [{'name': name,
'mapping_id': mapping_id,
'type': dtype}]
i_mapping += 1
yml['patches'] = patches_info
# ...
# ... topology
topo_yml = self.domain.todict()
# ...
# Create HDF5 file (in parallel mode if MPI communicator size > 1)
if not(comm is None) and comm.size > 1:
kwargs = dict( driver='mpio', comm=comm )
else:
kwargs = {}
h5 = h5py.File( filename, mode='w', **kwargs )
# ...
# Dump geometry metadata to string in YAML file format
geo = yaml.dump( data = yml, sort_keys=False)
# Write geometry metadata as fixed-length array of ASCII characters
h5['geometry.yml'] = np.array( geo, dtype='S' )
# ...
# ...
# Dump geometry metadata to string in YAML file format
geo = yaml.dump( data = topo_yml, sort_keys=False)
# Write topology metadata as fixed-length array of ASCII characters
h5['topology.yml'] = np.array( geo, dtype='S' )
# ...
i_mapping = 0
for patch_name, mapping in self.mappings.items():
space = mapping.space
# Create group for patch 0
group = h5.create_group( yml['patches'][i_mapping]['mapping_id'] )
group.attrs['shape' ] = space.vector_space.npts
group.attrs['degree' ] = space.degree
group.attrs['rational' ] = False # TODO remove
group.attrs['periodic' ] = space.periodic
for d in range( self.ldim ):
group['knots_{}'.format( d )] = space.spaces[d].knots
# Collective: create dataset for control points
shape = [n for n in space.vector_space.npts] + [self.pdim]
dtype = space.vector_space.dtype
dset = group.create_dataset( 'points', shape=shape, dtype=dtype )
# Independent: write control points to dataset
starts = space.vector_space.starts
ends = space.vector_space.ends
index = [slice(s, e+1) for s, e in zip(starts, ends)] + [slice(None)]
index = tuple( index )
dset[index] = mapping.control_points[index]
# case of NURBS
if isinstance(mapping, NurbsMapping):
# Collective: create dataset for weights
shape = [n for n in space.vector_space.npts]
dtype = space.vector_space.dtype
dset = group.create_dataset( 'weights', shape=shape, dtype=dtype )
# Independent: write weights to dataset
starts = space.vector_space.starts
ends = space.vector_space.ends
index = [slice(s, e+1) for s, e in zip(starts, ends)]
index = tuple( index )
dset[index] = mapping.weights[index]
i_mapping += 1
# Close HDF5 file
h5.close()
#==============================================================================
def export_nurbs_to_hdf5(filename, nurbs, periodic=None, comm=None ):
"""
Export a single-patch igakit NURBS object to a Psydac geometry file in HDF5 format
Parameters
----------
filename : <str>
Name of output geometry file, e.g. 'geo.h5'
nurbs : <igakit.nurbs.NURBS>
igakit geometry nurbs object
comm : <MPI.COMM>
mpi communicator
"""
import os.path
import igakit
assert isinstance(nurbs, igakit.nurbs.NURBS)
extension = os.path.splitext(filename)[-1]
if not extension == '.h5':
raise ValueError('> Only h5 extension is allowed for filename')
yml = {}
yml['ldim'] = nurbs.dim
yml['pdim'] = nurbs.dim
patches_info = []
i_mapping = 0
i = 0
rational = not abs(nurbs.weights-1).sum()<1e-15
patch_name = 'patch_{}'.format(i)
name = '{}'.format( patch_name )
mapping_id = 'mapping_{}'.format( i_mapping )
dtype = 'NurbsMapping' if rational else 'SplineMapping'
patches_info += [{'name': name , 'mapping_id':mapping_id, 'type':dtype}]
yml['patches'] = patches_info
# ...
# Create HDF5 file (in parallel mode if MPI communicator size > 1)
if not(comm is None) and comm.size > 1:
kwargs = dict( driver='mpio', comm=comm )
else:
kwargs = {}
h5 = h5py.File( filename, mode='w', **kwargs )
# ...
# Dump geometry metadata to string in YAML file format
geom = yaml.dump( data = yml, sort_keys=False)
# Write geometry metadata as fixed-length array of ASCII characters
h5['geometry.yml'] = np.array( geom, dtype='S' )
# ...
# ... topology
if nurbs.dim == 1:
bounds1 = (float(nurbs.breaks(0)[0]), float(nurbs.breaks(0)[-1]))
domain = Line(patch_name, bounds1=bounds1)
elif nurbs.dim == 2:
bounds1 = (float(nurbs.breaks(0)[0]), float(nurbs.breaks(0)[-1]))
bounds2 = (float(nurbs.breaks(1)[0]), float(nurbs.breaks(1)[-1]))
domain = Square(patch_name, bounds1=bounds1, bounds2=bounds2)
elif nurbs.dim == 3:
bounds1 = (float(nurbs.breaks(0)[0]), float(nurbs.breaks(0)[-1]))
bounds2 = (float(nurbs.breaks(1)[0]), float(nurbs.breaks(1)[-1]))
bounds3 = (float(nurbs.breaks(2)[0]), float(nurbs.breaks(2)[-1]))
domain = Cube(patch_name, bounds1=bounds1, bounds2=bounds2, bounds3=bounds3)
topo_yml = domain.todict()
# Dump geometry metadata to string in YAML file format
geom = yaml.dump( data = topo_yml, sort_keys=False)
# Write topology metadata as fixed-length array of ASCII characters
h5['topology.yml'] = np.array( geom, dtype='S' )
group = h5.create_group( yml['patches'][i]['mapping_id'] )
group.attrs['degree' ] = nurbs.degree
group.attrs['rational' ] = rational
group.attrs['periodic' ] = tuple( False for d in range( nurbs.dim ) ) if periodic is None else periodic
for d in range( nurbs.dim ):
group['knots_{}'.format( d )] = nurbs.knots[d]
group['points'] = nurbs.points[...,:nurbs.dim]
if rational:
group['weights'] = nurbs.weights
h5.close()
#==============================================================================
def refine_nurbs(nrb, ncells=None, degree=None, multiplicity=None, tol=1e-9):
"""
This function refines the nurbs object.
It contructs a new grid based on the new number of cells, and it adds the new break points to the nrb grid,
such that the total number of cells is equal to the new number of cells.
We use knot insertion to construct the new knot sequence , so the geometry is identical to the previous one.
It also elevates the degree of the nrb object based on the new degree.
Parameters
----------
nrb : <igakit.nurbs.NURBS>
geometry nurbs object
ncells : <list>
total number of cells in each direction
degree : <list>
degree in each direction
multiplicity : <list>
multiplicity of each knot in the knot sequence in each direction
tol : <float>
Minimum distance between two break points.
Returns
-------
nrb : <igakit.nurbs.NURBS>
the refined geometry nurbs object
"""
if multiplicity is None:
multiplicity = [1]*nrb.dim
nrb = nrb.clone()
if ncells is not None:
for axis in range(0,nrb.dim):
ub = nrb.breaks(axis)[0]
ue = nrb.breaks(axis)[-1]
knots = np.linspace(ub,ue,ncells[axis]+1)
index = nrb.knots[axis].searchsorted(knots)
nrb_knots = nrb.knots[axis][index]
for m,(nrb_k, k) in enumerate(zip(nrb_knots, knots)):
if abs(k-nrb_k)<tol:
knots[m] = np.nan
knots = knots[~np.isnan(knots)]
indices = np.round(np.linspace(0, len(knots) - 1, ncells[axis]+1-len(nrb.breaks(axis)))).astype(int)
knots = knots[indices]
if len(knots)>0:
nrb.refine(axis, knots)
if degree is not None:
for axis in range(0,nrb.dim):
d = degree[axis] - nrb.degree[axis]
if d<0:
raise ValueError('The degree {} must be >= {}'.format(degree, nrb.degree))
nrb.elevate(axis, times=d)
for axis in range(nrb.dim):
decimals = abs(np.floor(np.log10(np.abs(tol))).astype(int))
knots, counts = np.unique(nrb.knots[axis].round(decimals=decimals), return_counts=True)
counts = multiplicity[axis] - counts
counts[counts<0] = 0
knots = np.repeat(knots, counts)
nrb = nrb.refine(axis, knots)
return nrb
def refine_knots(knots, ncells, degree, multiplicity=None, tol=1e-9):
"""
This function refines the knot sequence.
It contructs a new grid based on the new number of cells, and it adds the new break points to the nrb grid,
such that the total number of cells is equal to the new number of cells.
We use knot insertion to construct the new knot sequence , so the geometry is identical to the previous one.
It also elevates the degree of the nrb object based on the new degree.
Parameters
----------
knots : <list>
list of knot sequences in each direction
ncells : <list>
total number of cells in each direction
degree : <list>
degree in each direction
multiplicity : <list>
multiplicity of each knot in the knot sequence in each direction
tol : <float>
Minimum distance between two break points.
Returns
-------
knots : <list>
the refined knot sequences in each direction
"""
from igakit.nurbs import NURBS
dim = len(ncells)
if multiplicity is None:
multiplicity = [1]*dim
assert len(knots) == dim
nrb = NURBS(knots)
for axis in range(dim):
ub = nrb.breaks(axis)[0]
ue = nrb.breaks(axis)[-1]
knots = np.linspace(ub,ue,ncells[axis]+1)
index = nrb.knots[axis].searchsorted(knots)
nrb_knots = nrb.knots[axis][index]
for m,(nrb_k, k) in enumerate(zip(nrb_knots, knots)):
if abs(k-nrb_k)<tol:
knots[m] = np.nan
knots = knots[~np.isnan(knots)]
indices = np.round(np.linspace(0, len(knots) - 1, ncells[axis]+1-len(nrb.breaks(axis)))).astype(int)
knots = knots[indices]
if len(knots)>0:
nrb.refine(axis, knots)
for axis in range(dim):
d = degree[axis] - nrb.degree[axis]
if d<0:
raise ValueError('The degree {} must be >= {}'.format(degree, nrb.degree))
nrb.elevate(axis, times=d)
for axis in range(dim):
decimals = abs(np.floor(np.log10(np.abs(tol))).astype(int))
knots, counts = np.unique(nrb.knots[axis].round(decimals=decimals), return_counts=True)
counts = multiplicity[axis] - counts
counts[counts<0] = 0
knots = np.repeat(knots, counts)
nrb = nrb.refine(axis, knots)
return nrb.knots
#==============================================================================
def import_geopdes_to_nurbs(filename):
"""
This function reads a geopdes geometry file and convert it to igakit nurbs object
Parameters
----------
filename : <str>
the filename of the geometry file
Returns
-------
nrb : <igakit.nurbs.NURBS>
the geometry nurbs object
"""
extension = os.path.splitext(filename)[-1]
if not extension == '.txt':
raise ValueError('> Expected .txt extension')
f = open(filename)
lines = f.readlines()
f.close()
lines = [line for line in lines if line[0].strip() != "#"]
data = _read_header(lines[0])
n_dim = data[0]
r_dim = data[1]
n_patchs = data[2]
n_lines_per_patch = 3*n_dim + 1
list_begin_line = _get_begin_line(lines, n_patchs)
nrb = _read_patch(lines, 1, n_lines_per_patch, list_begin_line)
return nrb
def _read_header(line):
chars = line.split(" ")
data = []
for c in chars:
try:
data.append(int(c))
except:
pass
return data
def _extract_patch_line(lines, i_patch):
text = "PATCH " + str(i_patch)
for i_line,line in enumerate(lines):
r = line.find(text)
if r != -1:
return i_line
return None
def _get_begin_line(lines, n_patchs):
list_begin_line = []
for i_patch in range(0, n_patchs):
r = _extract_patch_line(lines, i_patch+1)
if r is not None:
list_begin_line.append(r)
else:
raise ValueError(" could not parse the input file")
return list_begin_line
def _read_line(line):
chars = line.split(" ")
data = []
for c in chars:
try:
data.append(int(c))
except:
try:
data.append(float(c))
except:
pass
return data
def _read_patch(lines, i_patch, n_lines_per_patch, list_begin_line):
from igakit.nurbs import NURBS
i_begin_line = list_begin_line[i_patch-1]
data_patch = []
for i in range(i_begin_line+1, i_begin_line + n_lines_per_patch+1):
data_patch.append(_read_line(lines[i]))
degree = data_patch[0]
shape = data_patch[1]
xl = [np.array(i) for i in data_patch[2:2+len(degree)] ]
xp = [np.array(i) for i in data_patch[2+len(degree):2+2*len(degree)] ]
w = np.array(data_patch[2+2*len(degree)])
X = [i.reshape(shape, order='F') for i in xp]
W = w.reshape(shape, order='F')
points = np.zeros((*shape, 3))
for i in range(len(shape)):
points[..., i] = X[i]
knots = xl
nrb = NURBS(knots, control=points, weights=W)
return nrb
| [((11899, 11938), 'h5py.File', 'h5py.File', (['filename'], {'mode': '"""w"""'}), "(filename, mode='w', **kwargs)\n", (11908, 11938), False, 'import h5py\n'), ((12022, 12058), 'yaml.dump', 'yaml.dump', ([], {'data': 'yml', 'sort_keys': '(False)'}), '(data=yml, sort_keys=False)\n', (12031, 12058), False, 'import yaml\n'), ((12161, 12186), 'numpy.array', 'np.array', (['geom'], {'dtype': '"""S"""'}), "(geom, dtype='S')\n", (12169, 12186), True, 'import numpy as np\n'), ((13050, 13091), 'yaml.dump', 'yaml.dump', ([], {'data': 'topo_yml', 'sort_keys': '(False)'}), '(data=topo_yml, sort_keys=False)\n', (13059, 13091), False, 'import yaml\n'), ((13194, 13219), 'numpy.array', 'np.array', (['geom'], {'dtype': '"""S"""'}), "(geom, dtype='S')\n", (13202, 13219), True, 'import numpy as np\n'), ((17363, 17375), 'igakit.nurbs.NURBS', 'NURBS', (['knots'], {}), '(knots)\n', (17368, 17375), False, 'from igakit.nurbs import NURBS\n'), ((21140, 21161), 'numpy.zeros', 'np.zeros', (['(*shape, 3)'], {}), '((*shape, 3))\n', (21148, 21161), True, 'import numpy as np\n'), ((21251, 21290), 'igakit.nurbs.NURBS', 'NURBS', (['knots'], {'control': 'points', 'weights': 'W'}), '(knots, control=points, weights=W)\n', (21256, 21290), False, 'from igakit.nurbs import NURBS\n'), ((4253, 4279), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (4269, 4279), False, 'import os\n'), ((4442, 4468), 'sympde.topology.Domain.from_file', 'Domain.from_file', (['filename'], {}), '(filename)\n', (4458, 4468), False, 'from sympde.topology import Domain, Line, Square, Cube, NCubeInterior\n'), ((4633, 4672), 'h5py.File', 'h5py.File', (['filename'], {'mode': '"""r"""'}), "(filename, mode='r', **kwargs)\n", (4642, 4672), False, 'import h5py\n'), ((4689, 4746), 'yaml.load', 'yaml.load', (["h5['geometry.yml'][()]"], {'Loader': 'yaml.SafeLoader'}), "(h5['geometry.yml'][()], Loader=yaml.SafeLoader)\n", (4698, 4746), False, 'import yaml\n'), ((7965, 8004), 'h5py.File', 'h5py.File', (['filename'], {'mode': '"""w"""'}), "(filename, mode='w', **kwargs)\n", (7974, 8004), False, 'import h5py\n'), ((8099, 8135), 'yaml.dump', 'yaml.dump', ([], {'data': 'yml', 'sort_keys': '(False)'}), '(data=yml, sort_keys=False)\n', (8108, 8135), False, 'import yaml\n'), ((8247, 8271), 'numpy.array', 'np.array', (['geo'], {'dtype': '"""S"""'}), "(geo, dtype='S')\n", (8255, 8271), True, 'import numpy as np\n'), ((8380, 8421), 'yaml.dump', 'yaml.dump', ([], {'data': 'topo_yml', 'sort_keys': '(False)'}), '(data=topo_yml, sort_keys=False)\n', (8389, 8421), False, 'import yaml\n'), ((8532, 8556), 'numpy.array', 'np.array', (['geo'], {'dtype': '"""S"""'}), "(geo, dtype='S')\n", (8540, 8556), True, 'import numpy as np\n'), ((11050, 11076), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (11066, 11076), False, 'import os\n'), ((12334, 12367), 'sympde.topology.Line', 'Line', (['patch_name'], {'bounds1': 'bounds1'}), '(patch_name, bounds1=bounds1)\n', (12338, 12367), False, 'from sympde.topology import Domain, Line, Square, Cube, NCubeInterior\n'), ((16125, 16149), 'numpy.repeat', 'np.repeat', (['knots', 'counts'], {}), '(knots, counts)\n', (16134, 16149), True, 'import numpy as np\n'), ((17487, 17524), 'numpy.linspace', 'np.linspace', (['ub', 'ue', '(ncells[axis] + 1)'], {}), '(ub, ue, ncells[axis] + 1)\n', (17498, 17524), True, 'import numpy as np\n'), ((18485, 18509), 'numpy.repeat', 'np.repeat', (['knots', 'counts'], {}), '(knots, counts)\n', (18494, 18509), True, 'import numpy as np\n'), ((18992, 19018), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (19008, 19018), False, 'import os\n'), ((20858, 20869), 'numpy.array', 'np.array', (['i'], {}), '(i)\n', (20866, 20869), True, 'import numpy as np\n'), ((20923, 20934), 'numpy.array', 'np.array', (['i'], {}), '(i)\n', (20931, 20934), True, 'import numpy as np\n'), ((2530, 2550), 'sympde.topology.Square', 'Square', ([], {'name': '"""Omega"""'}), "(name='Omega')\n", (2536, 2550), False, 'from sympde.topology import Domain, Line, Square, Cube, NCubeInterior\n'), ((12560, 12612), 'sympde.topology.Square', 'Square', (['patch_name'], {'bounds1': 'bounds1', 'bounds2': 'bounds2'}), '(patch_name, bounds1=bounds1, bounds2=bounds2)\n', (12566, 12612), False, 'from sympde.topology import Domain, Line, Square, Cube, NCubeInterior\n'), ((15030, 15067), 'numpy.linspace', 'np.linspace', (['ub', 'ue', '(ncells[axis] + 1)'], {}), '(ub, ue, ncells[axis] + 1)\n', (15041, 15067), True, 'import numpy as np\n'), ((2721, 2739), 'sympde.topology.Cube', 'Cube', ([], {'name': '"""Omega"""'}), "(name='Omega')\n", (2725, 2739), False, 'from sympde.topology import Domain, Line, Square, Cube, NCubeInterior\n'), ((5741, 5775), 'psydac.fem.tensor.TensorFemSpace', 'TensorFemSpace', (['*spaces'], {'comm': 'comm'}), '(*spaces, comm=comm)\n', (5755, 5775), False, 'from psydac.fem.tensor import TensorFemSpace\n'), ((12879, 12946), 'sympde.topology.Cube', 'Cube', (['patch_name'], {'bounds1': 'bounds1', 'bounds2': 'bounds2', 'bounds3': 'bounds3'}), '(patch_name, bounds1=bounds1, bounds2=bounds2, bounds3=bounds3)\n', (12883, 12946), False, 'from sympde.topology import Domain, Line, Square, Cube, NCubeInterior\n'), ((5591, 5633), 'psydac.fem.splines.SplineSpace', 'SplineSpace', ([], {'degree': 'p', 'knots': 'k', 'periodic': 'b'}), '(degree=p, knots=k, periodic=b)\n', (5602, 5633), False, 'from psydac.fem.splines import SplineSpace\n'), ((5853, 5931), 'psydac.mapping.discrete.SplineMapping.from_control_points', 'SplineMapping.from_control_points', (['tensor_space', "patch['points'][(...), :pdim]"], {}), "(tensor_space, patch['points'][(...), :pdim])\n", (5886, 5931), False, 'from psydac.mapping.discrete import SplineMapping, NurbsMapping\n'), ((17771, 17786), 'numpy.isnan', 'np.isnan', (['knots'], {}), '(knots)\n', (17779, 17786), True, 'import numpy as np\n'), ((6074, 6182), 'psydac.mapping.discrete.NurbsMapping.from_control_points_weights', 'NurbsMapping.from_control_points_weights', (['tensor_space', "patch['points'][(...), :pdim]", "patch['weights']"], {}), "(tensor_space, patch['points'][(...\n ), :pdim], patch['weights'])\n", (6114, 6182), False, 'from psydac.mapping.discrete import SplineMapping, NurbsMapping\n'), ((15338, 15353), 'numpy.isnan', 'np.isnan', (['knots'], {}), '(knots)\n', (15346, 15353), True, 'import numpy as np\n'), ((15912, 15923), 'numpy.abs', 'np.abs', (['tol'], {}), '(tol)\n', (15918, 15923), True, 'import numpy as np\n'), ((18272, 18283), 'numpy.abs', 'np.abs', (['tol'], {}), '(tol)\n', (18278, 18283), True, 'import numpy as np\n')] |
ok1zjf/AMNet | utils.py | 51b163eec63d6d1e2e3dbc140d19afdc7b4273ee | __author__ = 'Jiri Fajtl'
__email__ = 'ok1zjf@gmail.com'
__version__= '2.2'
__status__ = "Research"
__date__ = "28/1/2018"
__license__= "MIT License"
import os
import numpy as np
import glob
import subprocess
import platform
import sys
import pkg_resources
import torch
import PIL as Image
try:
import cv2
except:
print("WARNING: Could not load OpenCV python package. Some functionality may not be available.")
def list_files(path, extensions=[], sort=True, max_len=-1):
if os.path.isdir(path):
filenames = [os.path.join(path, fn) for fn in os.listdir(path) if
any([fn.endswith(ext) for ext in extensions])]
else:
print("ERROR. ", path,' is not a directory!')
return []
if sort:
filenames.sort()
if max_len>-1:
filenames = filenames[:max_len]
return filenames
def get_video_list(video_path, max_len=-1):
return list_files(video_path, extensions=['avi', 'flv', 'mpg', 'mp4'], sort=True, max_len=max_len)
def get_image_list(video_path, max_len=-1):
return list_files(video_path, extensions=['jpg', 'jpeg', 'png'], sort=True, max_len=max_len)
def get_split_files(dataset_path, splits_path, split_name, absolute_path=False):
path = os.path.join(dataset_path, splits_path, split_name)
files = glob.glob(path)
files.sort()
if not absolute_path:
files_out = []
for file in files:
_,filename = os.path.split(file)
files_out.append(filename)
return files_out
return files
def get_max_rc_weights(experiment_path):
log_filename = 'train_log_0.csv'
try:
f = open(os.path.join(experiment_path, log_filename), 'rt')
max_rc = 0
max_epoch = -1
max_mse = -1
for line in f:
toks = line.split(',')
if toks[0] == 'val':
epoch = toks[1]
try:
rc = float(toks[4])
if rc > max_rc:
max_rc = rc
max_epoch = int(epoch)
max_mse = float(toks[6])
except:
pass
f.close()
chkpt_file = experiment_path + '/' + 'weights_' + str(max_epoch) + '.pkl'
if not os.path.isfile(chkpt_file):
print("WARNING: File ",chkpt_file," does not exists!")
return '', 0, 0, 0
return chkpt_file, max_rc, max_mse, max_epoch
except:
print('WARNING: Could not open ' + os.path.join(experiment_path, log_filename))
return '', 0, 0, 0
def get_split_index(split_filename):
filename, _ = os.path.splitext(split_filename)
id = int(filename.split('_')[-1])
return id
def get_weight_files(split_files, experiment_name, max_rc_checkpoints=True):
data_dir = 'data'
weight_files = []
for split_filename in split_files:
split_name,_ = os.path.splitext(split_filename)
_, split_id = split_name.split('_')
weight_files_all = os.path.join(data_dir, experiment_name+'_train_'+split_id+'/*.pkl')
files = glob.glob(weight_files_all)
if len(files) == 0:
# No trained model weights for this split
weight_files.append('')
continue
elif len(files) == 1:
weight_files.append(files[0])
else:
# Multiple weights
if max_rc_checkpoints:
weights_dir = os.path.join(data_dir, experiment_name + '_train_' + split_id)
print("Selecting model weights with the highest RC on validation set in ",weights_dir)
weight_file, max_rc, max_mse, max_epoch= get_max_rc_weights(weights_dir)
if weight_file != '':
print('Found: ',weight_file, ' RC=', max_rc, ' MSE=', max_rc, ' epoch=', max_epoch)
weight_files.append(weight_file)
continue
# Get the weights from the last training epoch
files.sort(key=lambda x: get_split_index(x), reverse=True)
weight_file=files[0]
weight_files.append(weight_file)
return weight_files
def run_command(command):
p = subprocess.Popen(command.split(),
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
return '\n'.join([ '\t'+line.decode("utf-8").strip() for line in p.stdout.readlines()])
def ge_pkg_versions():
dep_versions = {}
cmd = 'cat /proc/driver/nvidia/version'
display_driver = run_command(cmd)
dep_versions['display'] = display_driver
dep_versions['cuda'] = 'NA'
cuda_home = '/usr/local/cuda/'
if 'CUDA_HOME' in os.environ:
cuda_home = os.environ['CUDA_HOME']
cmd = cuda_home+'/version.txt'
if os.path.isfile(cmd):
cuda_version = run_command('cat '+cmd)
dep_versions['cuda'] = cuda_version
dep_versions['cudnn'] = torch.backends.cudnn.version()
dep_versions['platform'] = platform.platform()
dep_versions['python'] = sys.version_info[0]
dep_versions['torch'] = torch.__version__
dep_versions['numpy'] = np.__version__
dep_versions['PIL'] = Image.VERSION
dep_versions['OpenCV'] = 'NA'
if 'cv2' in sys.modules:
dep_versions['OpenCV'] = cv2.__version__
dep_versions['torchvision'] = pkg_resources.get_distribution("torchvision").version
return dep_versions
def print_pkg_versions():
print("Packages & system versions:")
print("----------------------------------------------------------------------")
versions = ge_pkg_versions()
for key, val in versions.items():
print(key,": ",val)
print("")
return
if __name__ == "__main__":
print_pkg_versions()
split_files = get_split_files('datasets/lamem', 'splits', 'test_*.txt')
print(split_files)
weight_files = get_weight_files(split_files, experiment_name='lamem_ResNet50FC_lstm3_last', max_rc_checkpoints=True)
# weight_files = get_weight_files(split_files, experiment_name='lamem_ResNet50FC_lstm3')
print(weight_files) | [((491, 510), 'os.path.isdir', 'os.path.isdir', (['path'], {}), '(path)\n', (504, 510), False, 'import os\n'), ((1248, 1299), 'os.path.join', 'os.path.join', (['dataset_path', 'splits_path', 'split_name'], {}), '(dataset_path, splits_path, split_name)\n', (1260, 1299), False, 'import os\n'), ((1312, 1327), 'glob.glob', 'glob.glob', (['path'], {}), '(path)\n', (1321, 1327), False, 'import glob\n'), ((2651, 2683), 'os.path.splitext', 'os.path.splitext', (['split_filename'], {}), '(split_filename)\n', (2667, 2683), False, 'import os\n'), ((4804, 4823), 'os.path.isfile', 'os.path.isfile', (['cmd'], {}), '(cmd)\n', (4818, 4823), False, 'import os\n'), ((4941, 4971), 'torch.backends.cudnn.version', 'torch.backends.cudnn.version', ([], {}), '()\n', (4969, 4971), False, 'import torch\n'), ((5004, 5023), 'platform.platform', 'platform.platform', ([], {}), '()\n', (5021, 5023), False, 'import platform\n'), ((2921, 2953), 'os.path.splitext', 'os.path.splitext', (['split_filename'], {}), '(split_filename)\n', (2937, 2953), False, 'import os\n'), ((3027, 3100), 'os.path.join', 'os.path.join', (['data_dir', "(experiment_name + '_train_' + split_id + '/*.pkl')"], {}), "(data_dir, experiment_name + '_train_' + split_id + '/*.pkl')\n", (3039, 3100), False, 'import os\n'), ((3111, 3138), 'glob.glob', 'glob.glob', (['weight_files_all'], {}), '(weight_files_all)\n', (3120, 3138), False, 'import glob\n'), ((5350, 5395), 'pkg_resources.get_distribution', 'pkg_resources.get_distribution', (['"""torchvision"""'], {}), "('torchvision')\n", (5380, 5395), False, 'import pkg_resources\n'), ((533, 555), 'os.path.join', 'os.path.join', (['path', 'fn'], {}), '(path, fn)\n', (545, 555), False, 'import os\n'), ((1447, 1466), 'os.path.split', 'os.path.split', (['file'], {}), '(file)\n', (1460, 1466), False, 'import os\n'), ((1656, 1699), 'os.path.join', 'os.path.join', (['experiment_path', 'log_filename'], {}), '(experiment_path, log_filename)\n', (1668, 1699), False, 'import os\n'), ((2287, 2313), 'os.path.isfile', 'os.path.isfile', (['chkpt_file'], {}), '(chkpt_file)\n', (2301, 2313), False, 'import os\n'), ((566, 582), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (576, 582), False, 'import os\n'), ((2525, 2568), 'os.path.join', 'os.path.join', (['experiment_path', 'log_filename'], {}), '(experiment_path, log_filename)\n', (2537, 2568), False, 'import os\n'), ((3460, 3522), 'os.path.join', 'os.path.join', (['data_dir', "(experiment_name + '_train_' + split_id)"], {}), "(data_dir, experiment_name + '_train_' + split_id)\n", (3472, 3522), False, 'import os\n')] |
HDembinski/aghast | python/aghast/aghast_generated/Slice.py | f3d45a6960033f48fb8f6b7e906cb36b9d9d8e95 | # automatically generated by the FlatBuffers compiler, do not modify
# namespace: aghast_generated
import flatbuffers
class Slice(object):
__slots__ = ["_tab"]
# Slice
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# Slice
def Start(self):
return self._tab.Get(
flatbuffers.number_types.Int64Flags,
self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(0),
)
# Slice
def Stop(self):
return self._tab.Get(
flatbuffers.number_types.Int64Flags,
self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(8),
)
# Slice
def Step(self):
return self._tab.Get(
flatbuffers.number_types.Int32Flags,
self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(16),
)
# Slice
def HasStart(self):
return self._tab.Get(
flatbuffers.number_types.BoolFlags,
self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(20),
)
# Slice
def HasStop(self):
return self._tab.Get(
flatbuffers.number_types.BoolFlags,
self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(21),
)
# Slice
def HasStep(self):
return self._tab.Get(
flatbuffers.number_types.BoolFlags,
self._tab.Pos + flatbuffers.number_types.UOffsetTFlags.py_type(22),
)
def CreateSlice(builder, start, stop, step, hasStart, hasStop, hasStep):
builder.Prep(8, 24)
builder.Pad(1)
builder.PrependBool(hasStep)
builder.PrependBool(hasStop)
builder.PrependBool(hasStart)
builder.PrependInt32(step)
builder.PrependInt64(stop)
builder.PrependInt64(start)
return builder.Offset()
| [((231, 264), 'flatbuffers.table.Table', 'flatbuffers.table.Table', (['buf', 'pos'], {}), '(buf, pos)\n', (254, 264), False, 'import flatbuffers\n'), ((406, 455), 'flatbuffers.number_types.UOffsetTFlags.py_type', 'flatbuffers.number_types.UOffsetTFlags.py_type', (['(0)'], {}), '(0)\n', (452, 455), False, 'import flatbuffers\n'), ((607, 656), 'flatbuffers.number_types.UOffsetTFlags.py_type', 'flatbuffers.number_types.UOffsetTFlags.py_type', (['(8)'], {}), '(8)\n', (653, 656), False, 'import flatbuffers\n'), ((808, 858), 'flatbuffers.number_types.UOffsetTFlags.py_type', 'flatbuffers.number_types.UOffsetTFlags.py_type', (['(16)'], {}), '(16)\n', (854, 858), False, 'import flatbuffers\n'), ((1013, 1063), 'flatbuffers.number_types.UOffsetTFlags.py_type', 'flatbuffers.number_types.UOffsetTFlags.py_type', (['(20)'], {}), '(20)\n', (1059, 1063), False, 'import flatbuffers\n'), ((1217, 1267), 'flatbuffers.number_types.UOffsetTFlags.py_type', 'flatbuffers.number_types.UOffsetTFlags.py_type', (['(21)'], {}), '(21)\n', (1263, 1267), False, 'import flatbuffers\n'), ((1421, 1471), 'flatbuffers.number_types.UOffsetTFlags.py_type', 'flatbuffers.number_types.UOffsetTFlags.py_type', (['(22)'], {}), '(22)\n', (1467, 1471), False, 'import flatbuffers\n')] |
AleksaLuka/Axelrod | axelrod/tests/strategies/test_mystrategy.py | 5f2fefcb2bf8f371ef489382f90f116b46ac1023 | import axelrod as axl
from .test_player import TestPlayer
C, D = axl.Action.C, axl.Action.D
class TestMyStrategy(TestPlayer):
name = "MyStrategy"
player = axl.mystrategy
expected_classifier = {
"memory_depth": 1,
"stochastic": False,
"long_run_time": False,
"inspects_source": False,
"manipulates_source": False,
"manipulates_state": False,
}
def test_strategy(self):
# First move is random.
actions = [(C, C), (C, D), (D, C)]
self.versus_test(
opponent=axl.Alternator(), expected_actions=actions, seed=1
)
actions = [(C, C), (C, D), (D, C)]
self.versus_test(
opponent=axl.Alternator(), expected_actions=actions, seed=2
)
actions = [(C, C), (C, C), (C, C)]
self.versus_test(
opponent=axl.Cooperator(), expected_actions=actions, seed=1
)
actions = [(C, D), (D, D), (D, D)]
self.versus_test(
opponent=axl.Defector(), expected_actions=actions, seed=2
)
| [((563, 579), 'axelrod.Alternator', 'axl.Alternator', ([], {}), '()\n', (577, 579), True, 'import axelrod as axl\n'), ((714, 730), 'axelrod.Alternator', 'axl.Alternator', ([], {}), '()\n', (728, 730), True, 'import axelrod as axl\n'), ((865, 881), 'axelrod.Cooperator', 'axl.Cooperator', ([], {}), '()\n', (879, 881), True, 'import axelrod as axl\n'), ((1016, 1030), 'axelrod.Defector', 'axl.Defector', ([], {}), '()\n', (1028, 1030), True, 'import axelrod as axl\n')] |
Gr1ph00n/staticwebanalyzer | analyzer/BannerTool.py | 8bf6337a77192b85913d75778830ccbb9006081f | #FILE NAME: BannerTool.py
#created by: Ciro Veneruso
#purpose: banner localization
#last edited by: Ciro Veneruso
#INSTALL: BeautifulSoup
#TODO: this code is a blob, must be refactorized!!!!
import re
import mechanize
import socket
import urllib
from tools import BaseTool
from bs4 import BeautifulSoup
from pprint import pprint
from ipwhois import IPWhois, WhoisLookupError
from tld import get_tld
import urlparse
from tld.exceptions import TldIOError, TldDomainNotFound, TldBadUrl
from tools import ToolException
class BannerTool(BaseTool):
def __init__(self, config):
BaseTool.__init__(self, "BannerAnalyzer", config, needRefresh = True)
self.values = []
def run(self, browser):
try:
url = browser.url.replace('http://','')
print url+"\n"
#response = browser.open(url)
html = browser.httpResponse #response.get_data()
site_domain_name = get_tld(browser.url)
#print(site_domain_name)
soup = BeautifulSoup(html)
links = soup.findAll('a')
response_domain = ""
addr = ""
name = ""
state = ""
city = ""
description = ""
country = ""
foo_flag = 0
flag = 0
for link in links:
foo = link.findChild('img')
#print foo
if foo is not None:
foo_flag = 1
flag = 1
href = link.get('href')
if href is None:
continue
print(href+"\n")
if href.startswith('/'):
response_domain ="link interno"
print ("link interno\n")
elif href.startswith('/'):
response_domain ="Link interno"
print ("link interno\n")
elif href.startswith("http://"+url):
response_domain ="link interno"
print ("link interno\n")
elif href.startswith("https://"+url):
response_domain ="link interno"
print ("link interno\n")
else:
response_domain ="link esterno"
print ("link esterno... Geolocalizzazione:\n")
try:
banner_domain_name = get_tld(href)
print(banner_domain_name+"\n")
print(site_domain_name)
url = 'https://' + url if not banner_domain_name.startswith('http') else banner_domain_name.replace('http:', 'https:')
parsed = urlparse.urlparse(url)
hostname = "%s://%s" % (parsed.scheme, parsed.netloc)
url = url.split("//")[1]
url_s = url.split("/")[0]
ip = socket.gethostbyname(url_s)
#print(href)
#get ip by url
#ip = socket.gethostbyname(banner_domain_name)
#get information by ip
result = None
try:
obj = IPWhois(ip)
result = obj.lookup()
except Error as e:
continue
addr = result['nets'][0]['address'] if result['nets'][0]['address'] != None else 'None'
name = result['nets'][0]['name'] if result['nets'][0]['name'] != None else 'None'
state = result['nets'][0]['state'] if result['nets'][0]['state'] != None else 'None'
city = result['nets'][0]['city'] if result['nets'][0]['city'] != None else 'None'
description = result['nets'][0]['description'] if result['nets'][0]['description'] != None else 'None'
country = result['nets'][0]['country'] if result['nets'][0]['country'] != None else 'None'
'''
self.values.append(["Link analyzed",href])
self.values.append(["Response",response_domain])
self.values.append(["Address", addr])
self.values.append(["Name", name])
self.values.append(["State", state])
self.values.append(["City", city])
self.values.append(["Description", description])
self.values.append(["Country", country])
print('Name: ' + name + '\n' + 'Description: ' + description + '\n' + 'Address: ' +
addr + '\n' + 'Country: ' + country + '\n' + 'State: ' + state + '\n' + 'City: ' + city)
'''
temp = {
"Url" : url,
"Address" : addr,
"Name" : name,
"State" : state,
"City" : city,
"Description" : description,
"Country" : country,
"Response" : response_domain
}
self.values.append({ "Link analyzed %s" % (href) : temp })
except TldBadUrl as e:
print ("Bad URL!")
if flag == 0:
print("There aren' t extra domain banners in this site")
if(foo_flag == 0):
print("There aren't banner in this site")
except WhoisLookupError as e:
raise ToolException(str(e))
return len(self.values) >= self.config.getInt("banner_count_treshold", 0)
def createModel(self):
return False, ["key","value"], self.values
| [] |
meetps/rhea | rhea/build/toolflow/xilinx/__init__.py | f8a9a08fb5e14c5c4488ef68a2dff4d18222c2c0 |
from .ise import ISE
from .vivado import Vivado
| [] |
fredpan/Prosopagnosia_Web_Server | app/AccountManagment.py | b56b58eccdbbde6b158802d49f7bcc1b44b18b69 | # Copyright 2020 EraO Prosopagnosia Helper Dev Team, Liren Pan, Yixiao Hong, Hongzheng Xu, Stephen Huang, Tiancong Wang
#
# Supervised by Prof. Steve Mann (http://www.eecg.toronto.edu/~mann/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import mysql.connector
import re
import time
from app.sql.config.DbConfig import db_config
from flask import render_template, redirect, url_for, request, g, session
from flask_bcrypt import Bcrypt
from app import EmailSender as email_confirmation
from app import webapp
validUsernameChar = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
# The function used to establish connection to sql database
def connect_to_database():
'''
Function used to connect to database
:return:
'''
return mysql.connector.connect(user=db_config['user'], password=db_config['password'], host=db_config['host'],
database=db_config['database'], use_pure=True)
def get_database():
'''
function used to get database
:return:
'''
db = getattr(g, '_database', None)
if db is None:
db = g._database = connect_to_database()
return db
"""
#############################################################
Login Settings
############################################################
"""
@webapp.route('/login', methods=['GET', 'POST'])
def user_login():
'''
This function takes GET/POST http request with URL of "/login"
It returns the user with an html website of the login page
:return: the rendered "login_index.html"
'''
return render_template("/login_index.html", title="Welcome")
@webapp.route('/login_submit', methods=['POST'])
def login_submit():
'''
This function takes POST http request with URL of "/login_submit". It firstly reads the user submitted username,
password and the check statue of "remember me" option based on whether the user checked "remember me" the function
adjust the session expiry time by adjusting the value of webapp.permanent_session_lifetime. The function then
connects to the database and reads the search results based on user inputs. If no search results find based on
the user provided username, the function will return the user with "login_index.html" with error message; if the
user input password doesn't match the database password after bcrypt,the function will return the user with
login_index.html" with error message; If it passed all the condition, the function will redirect to URL"/secure/index"
:return: /login_index.html or /secure/index
'''
session.permanent = True
bcrypt = Bcrypt(webapp)
username = request.form['username']
password = request.form['password']
remember = request.form.get('remember')
print(remember)
rememberMe = False
# if remember!=None and remember=="on":
if remember:
rememberMe = True
else:
session.clear()
webapp.permanent_session_lifetime = datetime.timedelta(milliseconds=0)
# password = bcrypt.generate_password_hash(password).decode("utf-8")
# bcrypt.check_password_hash
# connect to database
cnx = get_database()
cursor = cnx.cursor()
query = "SELECT password FROM user_info WHERE username = %s and active = 1"
cursor.execute(query, (username,))
results = cursor.fetchall()
if len(results) == 1:
hashed_pwd = results[0][0]
if bcrypt.check_password_hash(hashed_pwd, password):
session['authenticated'] = True
session['username'] = username
session['error'] = None
if rememberMe:
webapp.permanent_session_lifetime = datetime.timedelta(weeks=1)
return redirect(url_for('sensitive'))
session['username'] = username
session['error'] = "<=Error! Incorrect username or password!=>"
return render_template("/login_index.html", title="Main Page", username=username, error=session['error'])
"""
#############################################################
Sign up Settings
############################################################
"""
# Display an empty HTML form that allows users to fill the info and sign up.
@webapp.route('/signup', methods=['GET'])
def user_signup():
'''
This function takes GET http request with URL of "/signup"
It returns the user with an html website of the signup page
:return: the rendered "signup_index.html"
'''
return render_template("signup_index.html", title="Join Us!")
# Create a new account and save them in the database.
@webapp.route('/signup/save', methods=['POST'])
def sign_up_save():
'''
This function takes POST http request with a URL of "/signup/save". It firstly reads the user submitted username,
password1 and password2. It then connects to the database to check if there is already an existing username in the
database. The function also checks whether the user provided all the necessary information; whether the format of
the username and password are correct and whether the two passwords match. If any of the above condition failed,
the function will return user with "signup_index.html" with error message. If not, the function will insert the
user provided information to the database and return "signup_succeed_index.html" page to user indicating the user
has successfully created a new account.
:return: "signup_index.html" or "signup_succeed_index.html"
'''
bcrypt = Bcrypt(webapp)
# need to trim the user name
username = request.form.get('username', "")
password1 = request.form.get('password1', "")
password2 = request.form.get('password2', "")
# connect to database
cnx = get_database()
cursor = cnx.cursor()
query = "SELECT COUNT(username) FROM user_info WHERE username = %s "
cursor.execute(query, (username,))
results = cursor.fetchall()
numberOfExistUser = results[0][0]
if username == "" or password1 == "" or password2 == "":
error_msg = "Error: All fields are required!"
return render_template("signup_index.html", title="Sign Up", error_msg=error_msg,
username=username, password1=password1, password2=password2)
if re.findall(r'\s+', username) != []:
error_msg = "Error: No space allowed in user name!"
return render_template("signup_index.html", title="Sign Up", error_msg=error_msg,
username=username, password1=password1, password2=password2)
if numberOfExistUser != 0:
error_msg = "Error: User name already exist!"
return render_template("signup_index.html", title="Sign Up", error_msg=error_msg,
username=username, password1=password1, password2=password2)
if not (password1 == password2):
error_msg = "Error: Two passwords not matching!"
return render_template("signup_index.html", title="Sign Up", error_msg=error_msg,
username=username, password1=password1, password2=password2)
if (len(username) > 20 or len(username) < 1) or not all(c in validUsernameChar for c in username):
print(len(username))
error_msg = "Error: Username violation, username must have length between 1 to 20, only letters and numbers allowed"
return render_template("signup_index.html", title="Sign Up", error_msg=error_msg,
username=username, password1=password1, password2=password2)
if len(password1) > 16 or len(password1) < 1:
error_msg = "Error: Password length violation"
return render_template("signup_index.html", title="Sign Up", error_msg=error_msg,
username=username, password1=password1, password2=password2)
ts = time.time()
timestamp = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S')
password = bcrypt.generate_password_hash(password1).decode("utf-8")
query = ''' INSERT INTO user_info (username,password,create_date,active)
VALUES (%s,%s, %s,1)
'''
cursor.execute(query, (username, password, timestamp))
cnx.commit()
# Add error catch here for sql
return render_template("signup_succeed_index.html", title="Sign Up Succeed", username=username, password=password1)
"""
#############################################################
Secure Index
############################################################
"""
@webapp.route('/secure/index', methods=['GET', 'POST'])
def sensitive():
'''
This function takes GET/POST http request with URL of "/secure/index". The function firstly check if the user
session has key of “authenticated” and value of True which indicating the user has passed the security check.
If not, the user will be redirected back to ‘/user_login’. If the user session contains “authenticated” and
has a value of True, the function will perform a database search based on the “username” in the client’s
session and store the user’s uid, upload_counter and create_date into the session and return the page
of "/secured_index.html".
:return: "/secure/index" or "/secured_index.html"
'''
if 'authenticated' not in session:
return redirect(url_for('user_login'))
# ==========Read user Info and sign in =========#
if session['authenticated'] == True:
# connect to database
cnx = get_database()
cursor = cnx.cursor()
query = "SELECT uid , create_date FROM user_info WHERE username = %s and active = 1"
cursor.execute(query, (session['username'],))
results = cursor.fetchall()
uid = results[0][0]
memberSince = results[0][1]
session['uid'] = uid
session['membersince'] = memberSince
return render_template("/secured_index.html", username=session['username'], membersince=session['membersince'])
else:
return redirect(url_for('user_login'))
@webapp.route('/logout', methods=['GET', 'POST'])
def logout():
'''
This function takes GET/POST http request with URL of “/logout”. The function clear all the contents in the
current user’s session and terminate the user’s session’s lifetime. The function then redirect the user to
the main page.
:return: /secure/index
'''
session.clear()
webapp.permanent_session_lifetime = datetime.timedelta(milliseconds=0)
return redirect(url_for("sensitive"))
"""
#############################################################
Send Email
############################################################
"""
# Create a new account and save them in the database.
@webapp.route('/signup/send_email', methods=['POST'])
def send_email():
'''
This function takes POST http request with URL of “/signup/send_email”. The function read the user email,
username and password and check if the user email is in correct form with Regex, if the email address is correct,
it will call “send_email” function in “EmailSender” class which can send an email to the user with registered
username and password and redirect the user back to “signup_succeed_index.html” with success message. If the user
provided email address is not a correct form, the function will redirect back to “signup_succeed_index.html” with
error message.
:return: “signup_succeed_index.html”
'''
# need to trim the user name
email = request.form.get('email', "")
username = request.form.get('username', "")
password = request.form.get('password', "")
if not re.match(r"^[A-Za-z0-9\.\+_-]+@[A-Za-z0-9\._-]+\.[a-zA-Z]*$", email):
error_msg = "Error: Not a correct email address!"
return render_template("signup_succeed_index.html", title="Sign Up Succeed", username=username,
password=password, error_msg=error_msg)
# send email
email_confirmation.send_email(email, username, password)
success_msg = "=================Email Sent!==================="
return render_template("signup_succeed_index.html", title="Sign Up Succeed", username=username, password=password,
success_msg=success_msg)
| [((1836, 1883), 'app.webapp.route', 'webapp.route', (['"""/login"""'], {'methods': "['GET', 'POST']"}), "('/login', methods=['GET', 'POST'])\n", (1848, 1883), False, 'from app import webapp\n'), ((2161, 2208), 'app.webapp.route', 'webapp.route', (['"""/login_submit"""'], {'methods': "['POST']"}), "('/login_submit', methods=['POST'])\n", (2173, 2208), False, 'from app import webapp\n'), ((4716, 4756), 'app.webapp.route', 'webapp.route', (['"""/signup"""'], {'methods': "['GET']"}), "('/signup', methods=['GET'])\n", (4728, 4756), False, 'from app import webapp\n'), ((5088, 5134), 'app.webapp.route', 'webapp.route', (['"""/signup/save"""'], {'methods': "['POST']"}), "('/signup/save', methods=['POST'])\n", (5100, 5134), False, 'from app import webapp\n'), ((9003, 9057), 'app.webapp.route', 'webapp.route', (['"""/secure/index"""'], {'methods': "['GET', 'POST']"}), "('/secure/index', methods=['GET', 'POST'])\n", (9015, 9057), False, 'from app import webapp\n'), ((10507, 10555), 'app.webapp.route', 'webapp.route', (['"""/logout"""'], {'methods': "['GET', 'POST']"}), "('/logout', methods=['GET', 'POST'])\n", (10519, 10555), False, 'from app import webapp\n'), ((11193, 11245), 'app.webapp.route', 'webapp.route', (['"""/signup/send_email"""'], {'methods': "['POST']"}), "('/signup/send_email', methods=['POST'])\n", (11205, 11245), False, 'from app import webapp\n'), ((2104, 2157), 'flask.render_template', 'render_template', (['"""/login_index.html"""'], {'title': '"""Welcome"""'}), "('/login_index.html', title='Welcome')\n", (2119, 2157), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((3153, 3167), 'flask_bcrypt.Bcrypt', 'Bcrypt', (['webapp'], {}), '(webapp)\n', (3159, 3167), False, 'from flask_bcrypt import Bcrypt\n'), ((3263, 3291), 'flask.request.form.get', 'request.form.get', (['"""remember"""'], {}), "('remember')\n", (3279, 3291), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((4387, 4489), 'flask.render_template', 'render_template', (['"""/login_index.html"""'], {'title': '"""Main Page"""', 'username': 'username', 'error': "session['error']"}), "('/login_index.html', title='Main Page', username=username,\n error=session['error'])\n", (4402, 4489), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((4976, 5030), 'flask.render_template', 'render_template', (['"""signup_index.html"""'], {'title': '"""Join Us!"""'}), "('signup_index.html', title='Join Us!')\n", (4991, 5030), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((6000, 6014), 'flask_bcrypt.Bcrypt', 'Bcrypt', (['webapp'], {}), '(webapp)\n', (6006, 6014), False, 'from flask_bcrypt import Bcrypt\n'), ((6063, 6095), 'flask.request.form.get', 'request.form.get', (['"""username"""', '""""""'], {}), "('username', '')\n", (6079, 6095), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((6112, 6145), 'flask.request.form.get', 'request.form.get', (['"""password1"""', '""""""'], {}), "('password1', '')\n", (6128, 6145), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((6162, 6195), 'flask.request.form.get', 'request.form.get', (['"""password2"""', '""""""'], {}), "('password2', '')\n", (6178, 6195), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((8323, 8334), 'time.time', 'time.time', ([], {}), '()\n', (8332, 8334), False, 'import time\n'), ((8745, 8857), 'flask.render_template', 'render_template', (['"""signup_succeed_index.html"""'], {'title': '"""Sign Up Succeed"""', 'username': 'username', 'password': 'password1'}), "('signup_succeed_index.html', title='Sign Up Succeed',\n username=username, password=password1)\n", (8760, 8857), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((10859, 10874), 'flask.session.clear', 'session.clear', ([], {}), '()\n', (10872, 10874), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((10915, 10949), 'datetime.timedelta', 'datetime.timedelta', ([], {'milliseconds': '(0)'}), '(milliseconds=0)\n', (10933, 10949), False, 'import datetime\n'), ((11966, 11995), 'flask.request.form.get', 'request.form.get', (['"""email"""', '""""""'], {}), "('email', '')\n", (11982, 11995), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((12011, 12043), 'flask.request.form.get', 'request.form.get', (['"""username"""', '""""""'], {}), "('username', '')\n", (12027, 12043), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((12059, 12091), 'flask.request.form.get', 'request.form.get', (['"""password"""', '""""""'], {}), "('password', '')\n", (12075, 12091), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((12429, 12485), 'app.EmailSender.send_email', 'email_confirmation.send_email', (['email', 'username', 'password'], {}), '(email, username, password)\n', (12458, 12485), True, 'from app import EmailSender as email_confirmation\n'), ((12565, 12701), 'flask.render_template', 'render_template', (['"""signup_succeed_index.html"""'], {'title': '"""Sign Up Succeed"""', 'username': 'username', 'password': 'password', 'success_msg': 'success_msg'}), "('signup_succeed_index.html', title='Sign Up Succeed',\n username=username, password=password, success_msg=success_msg)\n", (12580, 12701), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((3440, 3455), 'flask.session.clear', 'session.clear', ([], {}), '()\n', (3453, 3455), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((3500, 3534), 'datetime.timedelta', 'datetime.timedelta', ([], {'milliseconds': '(0)'}), '(milliseconds=0)\n', (3518, 3534), False, 'import datetime\n'), ((6587, 6726), 'flask.render_template', 'render_template', (['"""signup_index.html"""'], {'title': '"""Sign Up"""', 'error_msg': 'error_msg', 'username': 'username', 'password1': 'password1', 'password2': 'password2'}), "('signup_index.html', title='Sign Up', error_msg=error_msg,\n username=username, password1=password1, password2=password2)\n", (6602, 6726), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((6762, 6790), 're.findall', 're.findall', (['"""\\\\s+"""', 'username'], {}), "('\\\\s+', username)\n", (6772, 6790), False, 'import re\n'), ((6873, 7012), 'flask.render_template', 'render_template', (['"""signup_index.html"""'], {'title': '"""Sign Up"""', 'error_msg': 'error_msg', 'username': 'username', 'password1': 'password1', 'password2': 'password2'}), "('signup_index.html', title='Sign Up', error_msg=error_msg,\n username=username, password1=password1, password2=password2)\n", (6888, 7012), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((7141, 7280), 'flask.render_template', 'render_template', (['"""signup_index.html"""'], {'title': '"""Sign Up"""', 'error_msg': 'error_msg', 'username': 'username', 'password1': 'password1', 'password2': 'password2'}), "('signup_index.html', title='Sign Up', error_msg=error_msg,\n username=username, password1=password1, password2=password2)\n", (7156, 7280), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((7418, 7557), 'flask.render_template', 'render_template', (['"""signup_index.html"""'], {'title': '"""Sign Up"""', 'error_msg': 'error_msg', 'username': 'username', 'password1': 'password1', 'password2': 'password2'}), "('signup_index.html', title='Sign Up', error_msg=error_msg,\n username=username, password1=password1, password2=password2)\n", (7433, 7557), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((7858, 7997), 'flask.render_template', 'render_template', (['"""signup_index.html"""'], {'title': '"""Sign Up"""', 'error_msg': 'error_msg', 'username': 'username', 'password1': 'password1', 'password2': 'password2'}), "('signup_index.html', title='Sign Up', error_msg=error_msg,\n username=username, password1=password1, password2=password2)\n", (7873, 7997), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((8146, 8285), 'flask.render_template', 'render_template', (['"""signup_index.html"""'], {'title': '"""Sign Up"""', 'error_msg': 'error_msg', 'username': 'username', 'password1': 'password1', 'password2': 'password2'}), "('signup_index.html', title='Sign Up', error_msg=error_msg,\n username=username, password1=password1, password2=password2)\n", (8161, 8285), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((10342, 10450), 'flask.render_template', 'render_template', (['"""/secured_index.html"""'], {'username': "session['username']", 'membersince': "session['membersince']"}), "('/secured_index.html', username=session['username'],\n membersince=session['membersince'])\n", (10357, 10450), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((10970, 10990), 'flask.url_for', 'url_for', (['"""sensitive"""'], {}), "('sensitive')\n", (10977, 10990), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((12104, 12175), 're.match', 're.match', (['"""^[A-Za-z0-9\\\\.\\\\+_-]+@[A-Za-z0-9\\\\._-]+\\\\.[a-zA-Z]*$"""', 'email'], {}), "('^[A-Za-z0-9\\\\.\\\\+_-]+@[A-Za-z0-9\\\\._-]+\\\\.[a-zA-Z]*$', email)\n", (12112, 12175), False, 'import re\n'), ((12247, 12379), 'flask.render_template', 'render_template', (['"""signup_succeed_index.html"""'], {'title': '"""Sign Up Succeed"""', 'username': 'username', 'password': 'password', 'error_msg': 'error_msg'}), "('signup_succeed_index.html', title='Sign Up Succeed',\n username=username, password=password, error_msg=error_msg)\n", (12262, 12379), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((8351, 8386), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['ts'], {}), '(ts)\n', (8382, 8386), False, 'import datetime\n'), ((9796, 9817), 'flask.url_for', 'url_for', (['"""user_login"""'], {}), "('user_login')\n", (9803, 9817), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((10481, 10502), 'flask.url_for', 'url_for', (['"""user_login"""'], {}), "('user_login')\n", (10488, 10502), False, 'from flask import render_template, redirect, url_for, request, g, session\n'), ((4193, 4220), 'datetime.timedelta', 'datetime.timedelta', ([], {'weeks': '(1)'}), '(weeks=1)\n', (4211, 4220), False, 'import datetime\n'), ((4249, 4269), 'flask.url_for', 'url_for', (['"""sensitive"""'], {}), "('sensitive')\n", (4256, 4269), False, 'from flask import render_template, redirect, url_for, request, g, session\n')] |
priyablue/lidar_navigation | scripts/slice.py | 39cd44a44043fa001c9d797ddea6c19e3376276c | import math
from point2d import Point2D
def to_point(rads, dist):
x = math.cos(rads) * dist
y = math.sin(rads) * dist
return Point2D(x, y)
class Slice(object):
def __init__(self, begin, end):
self.__begin = begin
self.__end = end
self.__begin_rad = math.radians(self.__begin)
self.__end_rad = math.radians(self.__end)
self.__begin_point = None
self.__end_point = None
# Calculate the angle halfway between the begin and end
self.__mid_rad = math.radians(self.__begin + ((self.__end - self.__begin) / 2.0))
self.__nearest_point = None
@property
def nearest(self):
return self.__nearest_point
def __contains__(self, point):
return self.__begin <= point.angle <= self.__end
def begin_point(self, max_dist):
return to_point(self.__begin_rad, max_dist)
def end_point(self, max_dist):
return to_point(self.__end_rad, max_dist)
def add_point(self, point):
# See if point is closer than the previously closest point
if point.origin_dist < self.__nearest_point.origin_dist:
self.__nearest_point = point
def reset(self, max_dist):
self.__nearest_point = to_point(self.__mid_rad, max_dist)
def __str__(self):
return "Begin: {} End: {} Nearest: {}".format(self.__begin, self.__end, self.__nearest_point)
| [((140, 153), 'point2d.Point2D', 'Point2D', (['x', 'y'], {}), '(x, y)\n', (147, 153), False, 'from point2d import Point2D\n'), ((77, 91), 'math.cos', 'math.cos', (['rads'], {}), '(rads)\n', (85, 91), False, 'import math\n'), ((107, 121), 'math.sin', 'math.sin', (['rads'], {}), '(rads)\n', (115, 121), False, 'import math\n'), ((295, 321), 'math.radians', 'math.radians', (['self.__begin'], {}), '(self.__begin)\n', (307, 321), False, 'import math\n'), ((347, 371), 'math.radians', 'math.radians', (['self.__end'], {}), '(self.__end)\n', (359, 371), False, 'import math\n'), ((529, 591), 'math.radians', 'math.radians', (['(self.__begin + (self.__end - self.__begin) / 2.0)'], {}), '(self.__begin + (self.__end - self.__begin) / 2.0)\n', (541, 591), False, 'import math\n')] |
WernerDreier/audio-korpora-pipeline | src/audio_korpora_pipeline/inputadapter/adapters.py | ac171cdfb0663c7b6250c06cc9c70a951b908251 | import concurrent
import os
import re
import shutil
import xml.etree.ElementTree as ET # TODO do we have this as requirement?
from concurrent.futures import as_completed
from concurrent.futures._base import as_completed
from pathlib import Path
import ffmpeg
import pandas as pd
import webrtcvad
from audio_korpora_pipeline.baseobjects import FileHandlingObject
from audio_korpora_pipeline.inputadapter.audiosplit.splitter import Splitter
from audio_korpora_pipeline.metamodel.mediasession import MediaAnnotationBundle, \
MediaAnnotationBundleWithoutTranscription, WrittenResource, MediaFile, \
MediaSessionActor, Sex, \
MediaSessionActors, MediaSession
class Adapter(FileHandlingObject):
def __init__(self, config):
super(Adapter, self).__init__()
def toMetamodel(self) -> MediaSession:
raise NotImplementedError("Please use a subclass")
def skipAlreadyProcessedFiles(self):
skip = self.config['global']['skipAlreadyProcessedFiles']
if not (skip):
self.logger.warn("No config setting for skipAlreadyProcessedFiles set. Assuming True")
return True
return skip
class UntranscribedMediaSplittingAdapter(Adapter):
AUDIO_SPLIT_AGRESSIVENESS = 3 # webrtcvad 1 (low), 3 (max)
ADAPTERNAME = "MediaSplittingAdapter"
mediaAnnotationBundles = []
mediaSessionActors = set() # using a set so we don't have duplets
def __init__(self, config):
super(UntranscribedMediaSplittingAdapter, self).__init__(config=config)
self.config = config
self.mediaSessionActors.add(MediaSessionActor("UNKNOWN", Sex.UNKNOWN, None))
def _splitMonoRawAudioToVoiceSectionsThread(self, file, outputpath):
self.logger.debug("Splitting file into chunks: {}".format(self._getFilenameWithExtension(file)))
splitter = Splitter()
vad = webrtcvad.Vad(int(self.AUDIO_SPLIT_AGRESSIVENESS))
basename = self._getFilenameWithoutExtension(file)
audiochunkPathsForThisfile = []
try:
audio, sample_rate = splitter.read_wave(file)
frames = splitter.frame_generator(30, audio, sample_rate)
frames = list(frames)
segments = splitter.vad_collector(sample_rate, 30, 300, vad, frames)
for i, segment in enumerate(segments):
path = os.path.join(outputpath, basename + '_chunk_{:05d}.wav'.format(i))
self.logger.debug("Write chunk {} of file {}".format(i, file))
splitter.write_wave(path, segment, sample_rate)
audiochunkPathsForThisfile.append(path)
# write staging complete file
stagingPath = os.path.join(outputpath, basename + ".stagingComplete")
with open(stagingPath, 'a'):
os.utime(stagingPath, None)
self.logger.debug("Finished splitting file {}".format(file))
except Exception as excep:
self.logger.warn("Could split file into chunks {}. Skipping".format(file), exc_info=excep)
return (False, str(file), []) # returning an empty list, as no success here
return (True, str(file), audiochunkPathsForThisfile)
def _convertMediafileToMonoAudioThread(self, filenumber, totalNumberOfFiles, singleFilepathToProcess, outputPath):
self.logger.debug(
"Processing file {}/{} on path {}".format(filenumber + 1, totalNumberOfFiles, singleFilepathToProcess))
nextFilename = os.path.join(outputPath, self._getFilenameWithoutExtension(singleFilepathToProcess) + ".wav")
try:
(ffmpeg
.input(singleFilepathToProcess)
.output(nextFilename, format='wav', acodec='pcm_s16le', ac=1, ar='16k')
.overwrite_output()
.run()
)
except ffmpeg.Error as ffmpgError:
self.logger.warn("Ffmpeg rose an error", exc_info=ffmpgError)
self.logger.warn("Due to error of ffmpeg skipped file {}".format(singleFilepathToProcess))
return (False, str(singleFilepathToProcess), str(nextFilename))
except Exception as e:
self.logger.warn("Got an error while using ffmpeg for file {}".format(singleFilepathToProcess), exc_info=e)
return (False, str(singleFilepathToProcess), str(nextFilename))
return (True, str(singleFilepathToProcess), str(nextFilename))
def createMediaSession(self, bundles):
session = MediaSession(self.ADAPTERNAME, self.mediaSessionActors, bundles)
return session
def createMediaAnnotationBundles(self, audiochunks):
annotationBundles = []
for index, filepath in enumerate(audiochunks):
bundle = MediaAnnotationBundleWithoutTranscription(identifier=filepath) # we do not have any written ressources
bundle.setMediaFile(filepath)
annotationBundles.append(bundle)
return annotationBundles
def splitAudioToChunks(self, filesToChunk, outputPath):
if ((filesToChunk == None) or (len(filesToChunk) == 0)):
self.logger.info("Nothing to split, received empty wav-filenamelist")
return []
successfullyChunkedFiles = []
with concurrent.futures.ThreadPoolExecutor(max_workers=None) as executor:
futures = []
for filenumber, file in enumerate(filesToChunk):
futures.append(
executor.submit(self._splitMonoRawAudioToVoiceSectionsThread, file, outputPath))
for future in as_completed(futures):
if (future.result()[0] == False):
self.logger.warning("Couldnt split audiofile {}, removing from list".format(future.result()[1]))
else:
successfullyChunkedFiles.extend(future.result()[2])
self.logger.debug("Splitting Audio is done {}".format(future.result()))
self.logger.debug("Finished splitting {} wav files".format(len(filesToChunk)))
return successfullyChunkedFiles
def determineWavFilesToChunk(self, baseFilesToChunk, stagingChunkPath):
allStageIndicatorFilesFullpath = set(self._getAllMediaFilesInBasepath(stagingChunkPath, {".stagingComplete"}))
allExistingChunkedFilesFullpath = set(self._getAllMediaFilesInBasepath(stagingChunkPath, {".wav"}))
allStageIndicatorFilesDictionary = self._toFilenameDictionary(allStageIndicatorFilesFullpath)
allBaseFilesDictionary = self._toFilenameDictionary(baseFilesToChunk)
stagingCompleteCorrectKeys = set(allBaseFilesDictionary.keys()).intersection(
set(allStageIndicatorFilesDictionary.keys()))
stagingIncompleteCorrectKeys = set(allBaseFilesDictionary.keys()).difference(
set(allStageIndicatorFilesDictionary.keys()))
stagingComplete = []
for fullpath in allExistingChunkedFilesFullpath:
if any(self._getFilenameWithoutExtension(fullpath).startswith(cm) for cm in stagingCompleteCorrectKeys):
stagingComplete.append(fullpath)
stagingIncomplete = [allBaseFilesDictionary[key] for key in stagingIncompleteCorrectKeys]
self.logger.debug("Got {} files not yet chunked".format(len(stagingIncomplete)))
self.logger.debug("Got {} files chunked".format(len(stagingComplete)))
return stagingIncomplete, stagingComplete
def convertMediaFilesToMonoAudio(self, filesToProcess, outputpath, adapterName):
if (filesToProcess == None or len(filesToProcess) == 0):
self.logger.debug("No files to convert for {}, skipping".format(adapterName))
return []
successfulFilenames = []
with concurrent.futures.ThreadPoolExecutor(max_workers=None) as executor:
futures = []
for filenumber, currentFile in enumerate(filesToProcess):
futures.append(
executor.submit(self._convertMediafileToMonoAudioThread, filenumber, len(filesToProcess),
currentFile, outputpath))
for future in as_completed(futures):
if (future.result()[0] == False):
self.logger.warning("Couldnt process audiofile {}, removing from list".format(future.result()[1]))
else:
successfulFilenames.append(future.result()[2])
self.logger.debug("Processing Audio is done {} for Converter {}".format(future.result(), adapterName))
return successfulFilenames
def _toFilenameDictionary(self, list):
if (list == None or len(list) == 0):
self.logger.debug("Got nothing in list, returning empty dictionary")
return dict()
listDict = dict()
for fullpath in list:
listDict[self._getFilenameWithoutExtension(fullpath)] = fullpath
self.logger.debug("Created dictionary of files of length {}".format(len(listDict)))
return listDict
def determineFilesToConvertToMonoFromGivenLists(self, alreadyStagedFiles, originalFiles, adaptername):
dictionaryOfOriginalFilepaths = self._toFilenameDictionary(originalFiles)
dictionaryOfStagedFilepaths = self._toFilenameDictionary(alreadyStagedFiles)
notYetProcessedKeys = set(dictionaryOfOriginalFilepaths.keys()).difference(set(dictionaryOfStagedFilepaths.keys()))
alreadyProcessedKeys = set(dictionaryOfOriginalFilepaths.keys()).intersection(
set(dictionaryOfStagedFilepaths.keys()))
fullpathsToNotYetProcessed = [dictionaryOfOriginalFilepaths[key] for key in notYetProcessedKeys]
fullpathsProcessed = [dictionaryOfStagedFilepaths[key] for key in alreadyProcessedKeys]
self.logger.debug("Got {} files not yet processed for corpus {}".format(len(notYetProcessedKeys), adaptername))
self.logger.debug("Got {} files already processed for corpus {}".format(len(alreadyProcessedKeys), adaptername))
return fullpathsToNotYetProcessed, fullpathsProcessed
def _preprocess_workflow_with_splitting(self, filesAlreadyProcessed, filesToProcess, monoPath, chunkPath,
adaptername):
filesSuccessfullyProcessed = self.convertMediaFilesToMonoAudio(filesToProcess, monoPath, adaptername)
baseFilesToChunk = []
baseFilesToChunk = baseFilesToChunk + filesSuccessfullyProcessed + filesAlreadyProcessed
# split mono audio to chunks
filesToChunk, filesAlreadyChunked = self.determineWavFilesToChunk(baseFilesToChunk,
chunkPath)
filesSuccessfullyChunked = self.splitAudioToChunks(filesToChunk, chunkPath)
# add chunks to media session
mediaBundleFiles = [] + filesSuccessfullyChunked + filesAlreadyChunked
mediaAnnotationbundles = self.createMediaAnnotationBundles(mediaBundleFiles)
mediaSession = self.createMediaSession(mediaAnnotationbundles)
return mediaSession
class UntranscribedVideoAdapter(UntranscribedMediaSplittingAdapter):
ADAPTERNAME = "UntranscribedVideoAdapter"
def __init__(self, config):
super(UntranscribedVideoAdapter, self).__init__(config=config)
self.config = config
def toMetamodel(self):
self.logger.debug("Untranscribed Video Korpus")
# convert video to mono audio
filesToProcess, filesAlreadyProcessed = self._determineVideoFilesToConvertToMono()
return self._preprocess_workflow_with_splitting(filesAlreadyProcessed, filesToProcess,
self._validateStagingMonoPath(), self._validateStagingChunksPath(),
self.ADAPTERNAME)
def _validateKorpusPath(self):
korpus_path = self.config['untranscribed_videos_input_adapter']['korpus_path']
if not os.path.isdir(korpus_path):
raise IOError("Could not read korpus path" + korpus_path)
return korpus_path
def _validateStagingMonoPath(self):
workdir = self.config['global']['workdir']
if not os.path.isdir(workdir):
raise IOError("Could not read workdir path" + workdir)
workdir = Path(workdir).joinpath("untranscribed_video_staging_mono")
workdir.mkdir(parents=True, exist_ok=True)
return str(workdir)
def _validateStagingChunksPath(self):
workdir = self.config['global']['workdir']
if not os.path.isdir(workdir):
raise IOError("Could not read workdir path" + workdir)
workdir = Path(workdir).joinpath("untranscribed_video_staging_chunks")
workdir.mkdir(parents=True, exist_ok=True)
return str(workdir)
def _determineVideoFilesToConvertToMono(self):
originalFiles = set(self._getAllMediaFilesInBasepath(self._validateKorpusPath(), {".mp4"}))
alreadyStagedFiles = set(self._getAllMediaFilesInBasepath(self._validateStagingMonoPath(), {".wav"}))
self.logger.debug("Got {} original untranscribed mp4 files to process".format(len(originalFiles)))
return self.determineFilesToConvertToMonoFromGivenLists(alreadyStagedFiles, originalFiles, self.ADAPTERNAME)
class ChJugendspracheAdapter(UntranscribedMediaSplittingAdapter):
ADAPTERNAME = "CHJugendspracheAdapter"
def __init__(self, config):
super(ChJugendspracheAdapter, self).__init__(config=config)
self.config = config
def toMetamodel(self):
self.logger.debug("CH-Jugendsprache Korpus")
# convert audio to mono audio
filesToProcess, filesAlreadyProcessed = self._determineChJugendspracheFilesToConvertToMono()
return self._preprocess_workflow_with_splitting(filesAlreadyProcessed, filesToProcess,
self._validateStagingMonoPath(), self._validateStagingChunksPath(),
self.ADAPTERNAME)
def _determineChJugendspracheFilesToConvertToMono(self):
originalFiles = set(self._getAllMediaFilesInBasepath(self._validateKorpusPath(), {".WAV", ".wav"}))
alreadyStagedFiles = set(self._getAllMediaFilesInBasepath(self._validateStagingMonoPath(), {".wav"}))
self.logger.debug("Got {} original jugendsprache files to process".format(len(originalFiles)))
return self.determineFilesToConvertToMonoFromGivenLists(alreadyStagedFiles, originalFiles, self.ADAPTERNAME)
def _validateStagingMonoPath(self):
workdir = self.config['global']['workdir']
if not os.path.isdir(workdir):
raise IOError("Could not read workdir path" + workdir)
workdir = Path(workdir).joinpath("ch_jugensprache_staging_mono")
workdir.mkdir(parents=True, exist_ok=True)
return str(workdir)
def _validateStagingChunksPath(self):
workdir = self.config['global']['workdir']
if not os.path.isdir(workdir):
raise IOError("Could not read workdir path" + workdir)
workdir = Path(workdir).joinpath("ch_jugensprache_staging_chunks")
workdir.mkdir(parents=True, exist_ok=True)
return str(workdir)
def _validateKorpusPath(self):
korpus_path = self.config['ch_jugendsprache_input_adapter']['korpus_path']
if not os.path.isdir(korpus_path):
raise IOError("Could not read korpus path" + korpus_path)
return korpus_path
class ArchimobAdapter(UntranscribedMediaSplittingAdapter):
"""
ArchimobAdapter
"""
ADAPTERNAME = "Archimob"
def __init__(self, config):
super(ArchimobAdapter, self).__init__(config=config)
self.config = config
def _validateKorpusPath(self):
korpus_path = self.config['archimob_input_adapter']['korpus_path']
if not os.path.isdir(korpus_path):
raise IOError("Could not read korpus path" + korpus_path)
return korpus_path
def _transcription_pause_tag_symbol(self):
symbol = self.config['archimob_input_adapter']['transcription_pause_tag_symbol']
if not symbol:
self.logger.warn("No symbol for transcription pause tag configured, falling back to default, which is '@'-Symbol")
symbol = '@'
return symbol
def _transcription_vocal_tag_symbol(self):
symbol = self.config['archimob_input_adapter']['transcription_vocal_tag_symbol']
if not symbol:
self.logger.warn("No symbol for transcription pause tag configured, falling back to default, which is '#'-Symbol")
symbol = '#'
return symbol
def _validateWorkdir(self):
workdir = self.config['global']['workdir']
if not os.path.isdir(workdir):
raise IOError("Could not read workdir path" + workdir)
workdir = Path(workdir).joinpath("archimob_staging")
workdir.mkdir(parents=True, exist_ok=True)
return str(workdir)
def _determineArchimobFilesToProcess(self):
originalFiles = set(self._getAllMediaFilesInBasepath(self._validateKorpusPath(), {".wav"}))
originalFiles = self._fixOriginalDatasetFlawsIfNecessary(originalFiles)
alreadyStagedFiles = set(self._getAllMediaFilesInBasepath(self._validateWorkdir(), {".wav"}))
self.logger.debug("Got {} original archimob files to process".format(len(originalFiles)))
return self.determineFilesToConvertToMonoFromGivenLists(alreadyStagedFiles, originalFiles, self.ADAPTERNAME)
def toMetamodel(self):
self.logger.debug("Archimob V2 Korpus")
# convert chunks to mono audio
filesToProcess, filesAlreadyProcessed = self._determineArchimobFilesToProcess()
filesSuccessfullyProcessed = self.convertMediaFilesToMonoAudio(filesToProcess, self._validateWorkdir(),
self.ADAPTERNAME)
filesForMediaBundle = []
filesForMediaBundle = filesForMediaBundle + filesSuccessfullyProcessed + filesAlreadyProcessed
# add chunks to media session
mediaAnnotationbundles = self.createMediaAnnotationBundles(filesForMediaBundle)
mediaSession = self.createMediaSession(mediaAnnotationbundles)
return mediaSession
def createMediaSession(self, bundles):
actors = self._createMediaSessionActorsFromBundles(bundles)
session = MediaSession(self.ADAPTERNAME, actors, bundles)
return session
def createMediaAnnotationBundles(self, filesForMediaBundle):
allXmlOriginalTranscriptionFiles = self._archimobOriginalTranscriptionFiles(self._validateKorpusPath())
transcriptionsPerSpeaker = self._extract(allXmlOriginalTranscriptionFiles)
mediaFilesAndTranscription = self._onlyTranscriptionsWithMediaFilesAndViceVersa(transcriptionsPerSpeaker,
filesForMediaBundle)
mediaAnnotationBundles = self._createActualMediaAnnotationBundles(mediaFilesAndTranscription)
return mediaAnnotationBundles
def _fixOriginalDatasetFlawsIfNecessary(self, originalFiles):
# As of Archimobe release V2 there are some minor flaws in the data, which are treated sequentially
if (self._fixForDuplicateWavs1063Necessary(originalFiles)):
originalFiles = self._fixForDuplicateWavs1063(originalFiles)
if (self._fixForWrongFilenames1082Necessary(originalFiles)):
originalFiles = self._fixForWrongFilenames1082(originalFiles)
return originalFiles
def _fixForDuplicateWavs1063Necessary(self, originalFiles):
# This flaw is simply, that within 1063 there exists another folder 1063 containing all files again
existingPathsForDoubled1063 = list(
filter(lambda file: os.path.sep + "1063" + os.path.sep + "1063" + os.path.sep in file, originalFiles))
fixNecessary = len(existingPathsForDoubled1063) > 0
self.logger.info("Found {} files of speaker 1063 which are duplicates. They will be ignored".format(
len(existingPathsForDoubled1063)))
return fixNecessary
def _fixForDuplicateWavs1063(self, originalFiles):
# fix is simply by removing the files in question from list
pathsWithout1063duplicates = list(
filter(lambda file: not (os.path.sep + "1063" + os.path.sep + "1063" + os.path.sep in file), originalFiles))
originalFiles = pathsWithout1063duplicates
return originalFiles
def _fixForWrongFilenames1082Necessary(self, originalFiles):
regexForFindingWrongNames = "(^\d{4}_\d)(d\d{4}_.*\.wav)" # like 1082_2d1082_2_TLI_3.wav
onlyFilenames = [os.path.basename(filename) for filename in originalFiles]
for filename in onlyFilenames:
m = re.search(regexForFindingWrongNames, filename)
if (not (m is None)):
return True
return False
def _fixForWrongFilenames1082(self, originalFiles):
fixedFiles = originalFiles.copy()
regexForFindingWrongFullpaths = "(.*\\" + os.path.sep + ")(\d{4}_\d)(d\d{4}_.*\.wav)" # like /home/somebody/files/1082/1082_2d1082_2_TLI_3.wav
for filename in originalFiles:
m = re.search(regexForFindingWrongFullpaths, filename)
if (not (m is None)):
newFilename = m.group(1) + m.group(3)
self.logger.debug(
"Fix 1082: Renaming file {} from {} to {}".format(m.group(2) + m.group(3), filename, newFilename))
try:
shutil.move(filename, newFilename)
fixedFiles.append(newFilename)
except Exception as inst:
self.logger.warn(
"Could not move file {} to {}, skipping and just removing from usable filenames".format(filename,
newFilename),
exc_info=inst)
fixedFiles.remove(filename)
return fixedFiles
def _archimobOriginalTranscriptionFiles(self, path):
xmlOriginalFiles = list(Path(path).glob("**/*.xml"))
self.logger.debug("Found {} original xml files for archimob".format(len(xmlOriginalFiles)))
return xmlOriginalFiles
def _extract(self, allXmlOriginalTranscriptionFiles):
transcriptionsPerSpeaker = []
with concurrent.futures.ThreadPoolExecutor(max_workers=None) as executor:
futures = []
for filenumber, file in enumerate(allXmlOriginalTranscriptionFiles):
futures.append(executor.submit(self._extractSingleXmlFileThread, file))
for future in as_completed(futures):
if (future.result()[0] == False):
self.logger.warning("Couldnt extract metadata for file {}, removing from list".format(future.result()[1]))
else:
transcriptionsPerSpeaker.append(
(future.result()[1], future.result()[2])) # tuple of original file and transcription dataframe
self.logger.debug("Extracting metadata for speaker finished {}".format(future.result()))
self.logger.debug("Finished metadata extraction for all {} xml files".format(len(allXmlOriginalTranscriptionFiles)))
return transcriptionsPerSpeaker
def _extractSingleXmlFileThread(self, xmlFile):
namespaceprefix = "{http://www.tei-c.org/ns/1.0}"
try:
tree = ET.parse(xmlFile)
root = tree.getroot()
ch_datacolumns = pd.DataFrame(columns=['Filename', 'transcript'])
transcriptionForSpeaker = pd.DataFrame(columns=ch_datacolumns.columns)
tagsToIgnore = set([namespaceprefix + tag for tag in {"gap", "incident", "kinesic", "other"}])
for utteranceTag in root.iter(namespaceprefix + 'u'):
media = utteranceTag.attrib['start']
filename = media.split('#')[1]
ch_transcript = [""]
for element in utteranceTag:
extractedWord = ""
if (namespaceprefix + "w" == element.tag):
extractedWord = self._extractWordTag(element)
if (namespaceprefix + "pause" == element.tag):
extractedWord = self._extractPauseTag(element)
if (namespaceprefix + "vocal" == element.tag):
extractedWord = self._extractVocalTag(namespaceprefix, element)
if (namespaceprefix + "del" == element.tag):
extractedWord = self._extractDeletionTag(element)
if (namespaceprefix + "unclear" == element.tag):
extractedWord = self._extractUnclearTag(namespaceprefix, element)
if (element.tag in tagsToIgnore):
self.logger.debug(
"Found tag {} which is in ignore list, ignoring the whole utterance {}".format(element.tag, filename))
break
if (extractedWord):
cleanedWord = self._cleanExtractedWord(extractedWord)
if (cleanedWord):
ch_transcript.append(cleanedWord)
try:
actualTranscript = " ".join(ch_transcript).strip()
if (not actualTranscript or (self._transcription_pause_tag_symbol() == actualTranscript)):
self.logger.debug("Skipping empty transcription for filename {}".format(filename))
continue
transcriptionForSpeaker = transcriptionForSpeaker.append(
{'Filename': filename, 'transcript': actualTranscript}, ignore_index=True)
transcriptionForSpeaker = self._cleanSpecialCaseWhereTwoSentencesPerFileExist(transcriptionForSpeaker)
except Exception as e:
self.logger.warn("Couldn't append single utterance for filename {}".format(filename), exc_info=e)
continue
# writing is just for manual checking
transcriptionForSpeaker.to_csv(
os.path.join(self._getFullFilenameWithoutExtension(xmlFile) + "_transcript_CH.csv"),
header=True, index=False, encoding='utf-8')
return True, xmlFile, transcriptionForSpeaker
except Exception as e:
self.logger.warn("Couldn't extract metadata for xml file {}".format(xmlFile), exc_info=e)
return False, xmlFile, None
def _extractWordTag(self, element):
return element.text
def _extractPauseTag(self, element):
return self._transcription_pause_tag_symbol()
def _extractVocalTag(self, namespaceprefix, element):
desc = element.find(namespaceprefix + "desc")
if desc is not None:
return self._transcription_vocal_tag_symbol() + desc.text
return ""
def _extractDeletionTag(self, element):
truncatedTextWithPotentialSlash = element.text
if truncatedTextWithPotentialSlash:
truncatedText = truncatedTextWithPotentialSlash.replace("/", "")
return truncatedText
return ""
def _extractUnclearTag(self, namespaceprefix, element):
if element is not None:
wordsWithinUnclearTag = element.findall(namespaceprefix + 'w')
unclearText = []
for word in wordsWithinUnclearTag:
unclearText.append(word.text)
return " ".join(unclearText)
return ""
def _cleanExtractedWord(self, extractedWord):
# replace all tokens with gravis with their counterpart
# remove all chars not in allowed list
# Note: q,x and y are not allowed, as thos are not existing within transcription of archimob!
allowed_chars = {
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'r', 's', 't', 'u', 'v', 'w', 'z',
'ä', 'ö', 'ü',
' '
}
allowed_chars.add(self._transcription_pause_tag_symbol())
allowed_chars.add(self._transcription_vocal_tag_symbol())
whitespace_regex = re.compile(r'[ \t]+')
extractedWord = extractedWord.lower()
extractedWord = extractedWord.replace('á', 'a')
extractedWord = extractedWord.replace('à', 'a')
extractedWord = extractedWord.replace('â', 'a')
extractedWord = extractedWord.replace('ç', 'c')
extractedWord = extractedWord.replace('é', 'e')
extractedWord = extractedWord.replace('è', 'e')
extractedWord = extractedWord.replace('ê', 'e')
extractedWord = extractedWord.replace('í', 'i')
extractedWord = extractedWord.replace('ì', 'i')
extractedWord = extractedWord.replace('î', 'i')
extractedWord = extractedWord.replace('ñ', 'n')
extractedWord = extractedWord.replace('ó', 'o')
extractedWord = extractedWord.replace('ò', 'o')
extractedWord = extractedWord.replace('ô', 'o')
extractedWord = extractedWord.replace('ú', 'u')
extractedWord = extractedWord.replace('ù', 'u')
extractedWord = extractedWord.replace('ǜ', 'u')
extractedWord = extractedWord.replace('û', 'u')
extractedWord = extractedWord.replace('ș', 's')
extractedWord = extractedWord.replace('ş', 's')
extractedWord = extractedWord.replace('ß', 'ss')
extractedWord = extractedWord.replace('-', ' ')
# Those should not exist anymore, however, be safe
extractedWord = extractedWord.replace('–', ' ')
extractedWord = extractedWord.replace('/', ' ')
extractedWord = whitespace_regex.sub(' ', extractedWord)
extractedWord = ''.join([char for char in extractedWord if char in allowed_chars])
extractedWord = whitespace_regex.sub(' ', extractedWord)
extractedWord = extractedWord.strip()
return extractedWord
def _onlyTranscriptionsWithMediaFilesAndViceVersa(self, transcriptionsPerSpeaker, filesForMediaBundle):
if not transcriptionsPerSpeaker or not filesForMediaBundle:
return []
existingMediaFilesTuples = [(self._getFilenameWithoutExtension(mediafile), mediafile) for mediafile in
filesForMediaBundle]
existingMediaFiles, existingMediaFilesFullpath = zip(*existingMediaFilesTuples)
# combine all transcriptions
allTranscriptions = pd.concat([transcription[1] for transcription in transcriptionsPerSpeaker])
if any("-" in filename for filename in allTranscriptions.Filename) \
and not any("-" in filename for filename in existingMediaFiles):
self.logger.debug(
"Found filenames with dash (-) instead of underscore (_) but only filenames with underscore. Automatically fixing this...")
allTranscriptions.Filename = allTranscriptions.Filename.str.replace("-", "_")
# Find all files that exist in both sets
# TODO: Performance not good for 70k files
allMatchingTranscriptions = allTranscriptions[allTranscriptions.Filename.isin(existingMediaFiles)].copy()
allMatchingTranscriptions["FullpathFilename"] = ""
allMatchingTranscriptions.set_index("Filename", inplace=True)
for filenumber, existingFile in enumerate(existingMediaFiles):
allMatchingTranscriptions.loc[existingFile, "FullpathFilename"] = existingMediaFilesFullpath[filenumber]
return allMatchingTranscriptions[["FullpathFilename", "transcript"]].copy()
def _createActualMediaAnnotationBundles(self, mediaFilesAndTranscription):
bundles = []
for fileAndTranscription in mediaFilesAndTranscription.itertuples(index=False):
bundle = MediaAnnotationBundle(fileAndTranscription.FullpathFilename)
speakerId = self._speakerIdFromFullpath(fileAndTranscription.FullpathFilename)
bundle.setMediaFile(MediaFile(speakerId))
written_resource = WrittenResource(fileAndTranscription.transcript, speakerId, languageCode="CH",
annotationType=WrittenResource.DIETH_WITHOUT_GRAVIS)
bundle.setWrittenResource(written_resource)
bundles.append(bundle)
self.logger.debug("Created {} mediaAnnotationBundles out of {} transcriptions".format(len(bundles), len(
mediaFilesAndTranscription)))
return bundles
def _speakerIdFromFullpath(self, fullpathFilename):
return self._getFilenameWithoutExtension(fullpathFilename).split("_")[0]
def _createMediaSessionActorsFromBundles(self, bundles):
speakerIds = set([speaker.writtenResource.actorRef for speaker in bundles])
actors = [MediaSessionActor(speakerId, Sex.UNKNOWN, None) for speakerId in speakerIds]
return MediaSessionActors(actors)
def _cleanSpecialCaseWhereTwoSentencesPerFileExist(self, transcriptionForSpeaker):
if transcriptionForSpeaker is None or len(transcriptionForSpeaker) < 2:
return transcriptionForSpeaker
lastFilename = transcriptionForSpeaker.iloc[-1]["Filename"]
filenameBefore = transcriptionForSpeaker.iloc[-2]["Filename"]
if lastFilename == filenameBefore:
lastTranscription = transcriptionForSpeaker.iloc[-1]["transcript"]
transcriptionBefore = transcriptionForSpeaker.iloc[-2]["transcript"]
newTranscript = transcriptionBefore + " " + lastTranscription
transcriptionForSpeaker.drop(transcriptionForSpeaker.tail(2).index, inplace=True)
transcriptionForSpeaker = transcriptionForSpeaker.append(
{'Filename': lastFilename, 'transcript': newTranscript}, ignore_index=True)
self.logger.info(
"Found a case {} where two sentences '{}' and '{}' are within one audio-file, merging them together".format(
lastFilename,
transcriptionBefore, lastTranscription))
return transcriptionForSpeaker
class CommonVoiceAdapter(Adapter):
RELATIVE_PATH_TO_AUDIO = "clips"
LANGUAGECODE_DE = "de_DE"
ADAPTERNAME = "CommonVoiceDE"
mediaAnnotationBundles = []
mediaSessionActors = set() # using a set so we don't have duplets
def __init__(self, config):
super(CommonVoiceAdapter, self).__init__(config=config)
self.config = config
def toMetamodel(self):
self.logger.debug("Created CommonVoice Adapter")
self.audiofilenames = self._readExistingAudioFiles()
self.speakermetadata = self._readExistingSpeakerMetadata()
self._persistMetamodel()
self._buildMediaSession()
return self.mediaSession
def _validateKorpusPath(self):
korpus_path = self.config['common_voice_input_adapter']['korpus_path']
if not os.path.isdir(korpus_path):
raise IOError("Could not read korpus path" + korpus_path)
return korpus_path
def _existingAudioFileFullpath(self, filename):
return os.path.join(self._validateKorpusPath(), self.RELATIVE_PATH_TO_AUDIO, filename)
def _readExistingAudioFiles(self):
fullpath = os.path.join(self._validateKorpusPath(), self.RELATIVE_PATH_TO_AUDIO)
for file in os.listdir(fullpath):
if file.endswith(".mp3"):
currentfile = MediaAnnotationBundle(self._existingAudioFileFullpath(file))
self.mediaAnnotationBundles.append(currentfile)
self.logger.debug("Found {} audiofiles to process".format(len(self.mediaAnnotationBundles)))
pass
def _readExistingSpeakerMetadata(self, ):
existing_audio_identifier = self._getFilenamesFromMediaAnnotationBundles()
common_voice_valid_metadata = self._getCommonVoiceValidMetadata(
existing_audio_identifier, self._validateKorpusPath())
self._enrichWithTranscription(common_voice_valid_metadata)
self._extractMediaSessionActors(common_voice_valid_metadata)
def _enrichWithTranscription(self, common_voice_valid_metadata):
self.mediaAnnotationBundles_dictionary_withoutExtension = {self._getFilenameWithoutExtension(x.identifier): x for x
in self.mediaAnnotationBundles}
self.mediaAnnotationBundles_dictionary_withExtension = {self._getFilenameWithExtension(x.identifier): x for x in
self.mediaAnnotationBundles}
common_voice_valid_metadata.apply(self._enrichWithTranscriptionInner, axis=1)
pass
def _enrichWithTranscriptionInner(self, row):
currentMediaAnnotationBundle = self.mediaAnnotationBundles_dictionary_withoutExtension.get(row.path,
self.mediaAnnotationBundles_dictionary_withExtension.get(
row.path))
currentMediaAnnotationBundle.setWrittenResource(
WrittenResource(row.sentence, row.client_id, self.LANGUAGECODE_DE))
currentMediaAnnotationBundle.setMediaFile(MediaFile(row.client_id))
self.logger.debug(
"Found matching media-annotation bundle for identifier {} and path {}".format(row.client_id, row.path))
def _extractMediaSessionActors(self, common_voice_valid_metadata):
common_voice_valid_metadata.apply(self._createMediaSessionActorFromRow, axis=1)
self.logger.debug("Found {} Speakers".format(len(self.mediaSessionActors)))
pass
def _createMediaSessionActorFromRow(self, row):
self.mediaSessionActors.add(MediaSessionActor(row.client_id, Sex.toSexEnum(row.gender), row.age))
pass
def _getCommonVoiceValidMetadata(self, existing_audio_identifier,
korpus_path):
commonvoice_valid_metadatafilenames = ["dev.tsv", "test.tsv", "train.tsv", "validated.tsv"]
combined_csv = pd.concat(
[pd.read_csv(os.path.join(korpus_path, f), sep="\t", header=0) for f in commonvoice_valid_metadatafilenames])
common_voice_valid_metadata = combined_csv[combined_csv.path.isin(existing_audio_identifier)]
common_voice_valid_metadata = self._fixChangeInDataFormatCommonVoice(common_voice_valid_metadata, combined_csv)
return common_voice_valid_metadata
def _getFilenamesFromMediaAnnotationBundles(self):
return [os.path.splitext(os.path.basename(base.identifier))[0] for base in
self.mediaAnnotationBundles]
def _getFilenamesFromMediaAnnotationBundlesWithExtension(self):
return [os.path.basename(base.identifier) for base in self.mediaAnnotationBundles]
def _persistMetamodel(self):
# TODO actual persisting of working json
# Actual json output
# print(json.dumps(self.mediaAnnotationBundles, default=lambda o: o.__dict__, sort_keys=True, indent=4))
pass
def _buildMediaSession(self):
actors = MediaSessionActors(self.mediaSessionActors)
session = MediaSession(self.ADAPTERNAME, actors, self.mediaAnnotationBundles)
# TODO Validate
self.mediaSession = session
pass
def _fixChangeInDataFormatCommonVoice(self, common_voice_valid_metadata, combined_csv):
if (len(common_voice_valid_metadata) == 0):
self.logger.debug(
"CommonVoice tsv-files seem to have filename-extension set (new fileformat). Trying matching with extension")
common_voice_valid_metadata = combined_csv[
combined_csv.path.isin(self._getFilenamesFromMediaAnnotationBundlesWithExtension())]
self.logger.debug(
"CommonVoice Valid metadata length is: {}".format(len(common_voice_valid_metadata)))
return common_voice_valid_metadata
| [((1766, 1776), 'audio_korpora_pipeline.inputadapter.audiosplit.splitter.Splitter', 'Splitter', ([], {}), '()\n', (1774, 1776), False, 'from audio_korpora_pipeline.inputadapter.audiosplit.splitter import Splitter\n'), ((4142, 4206), 'audio_korpora_pipeline.metamodel.mediasession.MediaSession', 'MediaSession', (['self.ADAPTERNAME', 'self.mediaSessionActors', 'bundles'], {}), '(self.ADAPTERNAME, self.mediaSessionActors, bundles)\n', (4154, 4206), False, 'from audio_korpora_pipeline.metamodel.mediasession import MediaAnnotationBundle, MediaAnnotationBundleWithoutTranscription, WrittenResource, MediaFile, MediaSessionActor, Sex, MediaSessionActors, MediaSession\n'), ((5117, 5138), 'concurrent.futures._base.as_completed', 'as_completed', (['futures'], {}), '(futures)\n', (5129, 5138), False, 'from concurrent.futures._base import as_completed\n'), ((7461, 7482), 'concurrent.futures._base.as_completed', 'as_completed', (['futures'], {}), '(futures)\n', (7473, 7482), False, 'from concurrent.futures._base import as_completed\n'), ((17081, 17128), 'audio_korpora_pipeline.metamodel.mediasession.MediaSession', 'MediaSession', (['self.ADAPTERNAME', 'actors', 'bundles'], {}), '(self.ADAPTERNAME, actors, bundles)\n', (17093, 17128), False, 'from audio_korpora_pipeline.metamodel.mediasession import MediaAnnotationBundle, MediaAnnotationBundleWithoutTranscription, WrittenResource, MediaFile, MediaSessionActor, Sex, MediaSessionActors, MediaSession\n'), ((21115, 21136), 'concurrent.futures._base.as_completed', 'as_completed', (['futures'], {}), '(futures)\n', (21127, 21136), False, 'from concurrent.futures._base import as_completed\n'), ((26032, 26053), 're.compile', 're.compile', (['"""[ \\\\t]+"""'], {}), "('[ \\\\t]+')\n", (26042, 26053), False, 'import re\n'), ((28168, 28243), 'pandas.concat', 'pd.concat', (['[transcription[1] for transcription in transcriptionsPerSpeaker]'], {}), '([transcription[1] for transcription in transcriptionsPerSpeaker])\n', (28177, 28243), True, 'import pandas as pd\n'), ((30423, 30449), 'audio_korpora_pipeline.metamodel.mediasession.MediaSessionActors', 'MediaSessionActors', (['actors'], {}), '(actors)\n', (30441, 30449), False, 'from audio_korpora_pipeline.metamodel.mediasession import MediaAnnotationBundle, MediaAnnotationBundleWithoutTranscription, WrittenResource, MediaFile, MediaSessionActor, Sex, MediaSessionActors, MediaSession\n'), ((32684, 32704), 'os.listdir', 'os.listdir', (['fullpath'], {}), '(fullpath)\n', (32694, 32704), False, 'import os\n'), ((36287, 36330), 'audio_korpora_pipeline.metamodel.mediasession.MediaSessionActors', 'MediaSessionActors', (['self.mediaSessionActors'], {}), '(self.mediaSessionActors)\n', (36305, 36330), False, 'from audio_korpora_pipeline.metamodel.mediasession import MediaAnnotationBundle, MediaAnnotationBundleWithoutTranscription, WrittenResource, MediaFile, MediaSessionActor, Sex, MediaSessionActors, MediaSession\n'), ((36345, 36412), 'audio_korpora_pipeline.metamodel.mediasession.MediaSession', 'MediaSession', (['self.ADAPTERNAME', 'actors', 'self.mediaAnnotationBundles'], {}), '(self.ADAPTERNAME, actors, self.mediaAnnotationBundles)\n', (36357, 36412), False, 'from audio_korpora_pipeline.metamodel.mediasession import MediaAnnotationBundle, MediaAnnotationBundleWithoutTranscription, WrittenResource, MediaFile, MediaSessionActor, Sex, MediaSessionActors, MediaSession\n'), ((1529, 1576), 'audio_korpora_pipeline.metamodel.mediasession.MediaSessionActor', 'MediaSessionActor', (['"""UNKNOWN"""', 'Sex.UNKNOWN', 'None'], {}), "('UNKNOWN', Sex.UNKNOWN, None)\n", (1546, 1576), False, 'from audio_korpora_pipeline.metamodel.mediasession import MediaAnnotationBundle, MediaAnnotationBundleWithoutTranscription, WrittenResource, MediaFile, MediaSessionActor, Sex, MediaSessionActors, MediaSession\n'), ((2515, 2570), 'os.path.join', 'os.path.join', (['outputpath', "(basename + '.stagingComplete')"], {}), "(outputpath, basename + '.stagingComplete')\n", (2527, 2570), False, 'import os\n'), ((4375, 4437), 'audio_korpora_pipeline.metamodel.mediasession.MediaAnnotationBundleWithoutTranscription', 'MediaAnnotationBundleWithoutTranscription', ([], {'identifier': 'filepath'}), '(identifier=filepath)\n', (4416, 4437), False, 'from audio_korpora_pipeline.metamodel.mediasession import MediaAnnotationBundle, MediaAnnotationBundleWithoutTranscription, WrittenResource, MediaFile, MediaSessionActor, Sex, MediaSessionActors, MediaSession\n'), ((4839, 4894), 'concurrent.futures.ThreadPoolExecutor', 'concurrent.futures.ThreadPoolExecutor', ([], {'max_workers': 'None'}), '(max_workers=None)\n', (4876, 4894), False, 'import concurrent\n'), ((7111, 7166), 'concurrent.futures.ThreadPoolExecutor', 'concurrent.futures.ThreadPoolExecutor', ([], {'max_workers': 'None'}), '(max_workers=None)\n', (7148, 7166), False, 'import concurrent\n'), ((11011, 11037), 'os.path.isdir', 'os.path.isdir', (['korpus_path'], {}), '(korpus_path)\n', (11024, 11037), False, 'import os\n'), ((11223, 11245), 'os.path.isdir', 'os.path.isdir', (['workdir'], {}), '(workdir)\n', (11236, 11245), False, 'import os\n'), ((11551, 11573), 'os.path.isdir', 'os.path.isdir', (['workdir'], {}), '(workdir)\n', (11564, 11573), False, 'import os\n'), ((13547, 13569), 'os.path.isdir', 'os.path.isdir', (['workdir'], {}), '(workdir)\n', (13560, 13569), False, 'import os\n'), ((13871, 13893), 'os.path.isdir', 'os.path.isdir', (['workdir'], {}), '(workdir)\n', (13884, 13893), False, 'import os\n'), ((14222, 14248), 'os.path.isdir', 'os.path.isdir', (['korpus_path'], {}), '(korpus_path)\n', (14235, 14248), False, 'import os\n'), ((14684, 14710), 'os.path.isdir', 'os.path.isdir', (['korpus_path'], {}), '(korpus_path)\n', (14697, 14710), False, 'import os\n'), ((15504, 15526), 'os.path.isdir', 'os.path.isdir', (['workdir'], {}), '(workdir)\n', (15517, 15526), False, 'import os\n'), ((19277, 19303), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (19293, 19303), False, 'import os\n'), ((19380, 19426), 're.search', 're.search', (['regexForFindingWrongNames', 'filename'], {}), '(regexForFindingWrongNames, filename)\n', (19389, 19426), False, 'import re\n'), ((19778, 19828), 're.search', 're.search', (['regexForFindingWrongFullpaths', 'filename'], {}), '(regexForFindingWrongFullpaths, filename)\n', (19787, 19828), False, 'import re\n'), ((20854, 20909), 'concurrent.futures.ThreadPoolExecutor', 'concurrent.futures.ThreadPoolExecutor', ([], {'max_workers': 'None'}), '(max_workers=None)\n', (20891, 20909), False, 'import concurrent\n'), ((21834, 21851), 'xml.etree.ElementTree.parse', 'ET.parse', (['xmlFile'], {}), '(xmlFile)\n', (21842, 21851), True, 'import xml.etree.ElementTree as ET\n'), ((21903, 21951), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['Filename', 'transcript']"}), "(columns=['Filename', 'transcript'])\n", (21915, 21951), True, 'import pandas as pd\n'), ((21984, 22028), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': 'ch_datacolumns.columns'}), '(columns=ch_datacolumns.columns)\n', (21996, 22028), True, 'import pandas as pd\n'), ((29411, 29471), 'audio_korpora_pipeline.metamodel.mediasession.MediaAnnotationBundle', 'MediaAnnotationBundle', (['fileAndTranscription.FullpathFilename'], {}), '(fileAndTranscription.FullpathFilename)\n', (29432, 29471), False, 'from audio_korpora_pipeline.metamodel.mediasession import MediaAnnotationBundle, MediaAnnotationBundleWithoutTranscription, WrittenResource, MediaFile, MediaSessionActor, Sex, MediaSessionActors, MediaSession\n'), ((29630, 29766), 'audio_korpora_pipeline.metamodel.mediasession.WrittenResource', 'WrittenResource', (['fileAndTranscription.transcript', 'speakerId'], {'languageCode': '"""CH"""', 'annotationType': 'WrittenResource.DIETH_WITHOUT_GRAVIS'}), "(fileAndTranscription.transcript, speakerId, languageCode=\n 'CH', annotationType=WrittenResource.DIETH_WITHOUT_GRAVIS)\n", (29645, 29766), False, 'from audio_korpora_pipeline.metamodel.mediasession import MediaAnnotationBundle, MediaAnnotationBundleWithoutTranscription, WrittenResource, MediaFile, MediaSessionActor, Sex, MediaSessionActors, MediaSession\n'), ((30335, 30382), 'audio_korpora_pipeline.metamodel.mediasession.MediaSessionActor', 'MediaSessionActor', (['speakerId', 'Sex.UNKNOWN', 'None'], {}), '(speakerId, Sex.UNKNOWN, None)\n', (30352, 30382), False, 'from audio_korpora_pipeline.metamodel.mediasession import MediaAnnotationBundle, MediaAnnotationBundleWithoutTranscription, WrittenResource, MediaFile, MediaSessionActor, Sex, MediaSessionActors, MediaSession\n'), ((32288, 32314), 'os.path.isdir', 'os.path.isdir', (['korpus_path'], {}), '(korpus_path)\n', (32301, 32314), False, 'import os\n'), ((34426, 34492), 'audio_korpora_pipeline.metamodel.mediasession.WrittenResource', 'WrittenResource', (['row.sentence', 'row.client_id', 'self.LANGUAGECODE_DE'], {}), '(row.sentence, row.client_id, self.LANGUAGECODE_DE)\n', (34441, 34492), False, 'from audio_korpora_pipeline.metamodel.mediasession import MediaAnnotationBundle, MediaAnnotationBundleWithoutTranscription, WrittenResource, MediaFile, MediaSessionActor, Sex, MediaSessionActors, MediaSession\n'), ((34540, 34564), 'audio_korpora_pipeline.metamodel.mediasession.MediaFile', 'MediaFile', (['row.client_id'], {}), '(row.client_id)\n', (34549, 34564), False, 'from audio_korpora_pipeline.metamodel.mediasession import MediaAnnotationBundle, MediaAnnotationBundleWithoutTranscription, WrittenResource, MediaFile, MediaSessionActor, Sex, MediaSessionActors, MediaSession\n'), ((35946, 35979), 'os.path.basename', 'os.path.basename', (['base.identifier'], {}), '(base.identifier)\n', (35962, 35979), False, 'import os\n'), ((2614, 2641), 'os.utime', 'os.utime', (['stagingPath', 'None'], {}), '(stagingPath, None)\n', (2622, 2641), False, 'import os\n'), ((11322, 11335), 'pathlib.Path', 'Path', (['workdir'], {}), '(workdir)\n', (11326, 11335), False, 'from pathlib import Path\n'), ((11650, 11663), 'pathlib.Path', 'Path', (['workdir'], {}), '(workdir)\n', (11654, 11663), False, 'from pathlib import Path\n'), ((13646, 13659), 'pathlib.Path', 'Path', (['workdir'], {}), '(workdir)\n', (13650, 13659), False, 'from pathlib import Path\n'), ((13970, 13983), 'pathlib.Path', 'Path', (['workdir'], {}), '(workdir)\n', (13974, 13983), False, 'from pathlib import Path\n'), ((15603, 15616), 'pathlib.Path', 'Path', (['workdir'], {}), '(workdir)\n', (15607, 15616), False, 'from pathlib import Path\n'), ((29583, 29603), 'audio_korpora_pipeline.metamodel.mediasession.MediaFile', 'MediaFile', (['speakerId'], {}), '(speakerId)\n', (29592, 29603), False, 'from audio_korpora_pipeline.metamodel.mediasession import MediaAnnotationBundle, MediaAnnotationBundleWithoutTranscription, WrittenResource, MediaFile, MediaSessionActor, Sex, MediaSessionActors, MediaSession\n'), ((35059, 35084), 'audio_korpora_pipeline.metamodel.mediasession.Sex.toSexEnum', 'Sex.toSexEnum', (['row.gender'], {}), '(row.gender)\n', (35072, 35084), False, 'from audio_korpora_pipeline.metamodel.mediasession import MediaAnnotationBundle, MediaAnnotationBundleWithoutTranscription, WrittenResource, MediaFile, MediaSessionActor, Sex, MediaSessionActors, MediaSession\n'), ((20064, 20098), 'shutil.move', 'shutil.move', (['filename', 'newFilename'], {}), '(filename, newFilename)\n', (20075, 20098), False, 'import shutil\n'), ((20601, 20611), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (20605, 20611), False, 'from pathlib import Path\n'), ((35341, 35369), 'os.path.join', 'os.path.join', (['korpus_path', 'f'], {}), '(korpus_path, f)\n', (35353, 35369), False, 'import os\n'), ((35776, 35809), 'os.path.basename', 'os.path.basename', (['base.identifier'], {}), '(base.identifier)\n', (35792, 35809), False, 'import os\n'), ((3359, 3396), 'ffmpeg.input', 'ffmpeg.input', (['singleFilepathToProcess'], {}), '(singleFilepathToProcess)\n', (3371, 3396), False, 'import ffmpeg\n')] |
anaikawadi/svbrdf-estimation | development/multiImage_pytorch/experiment.py | c977aa8448b2131af3960895afd1105d29e5484a | import matplotlib.pyplot as plt
import math
import shutil
import torch
from accelerate import Accelerator
from tensorboardX import SummaryWriter
from cli import parse_args
from dataset import SvbrdfDataset
from losses import MixedLoss, MixedLoss2, MixedLoss3
from models import MultiViewModel, SingleViewModel
from pathlib import Path
from persistence import Checkpoint
from renderers import LocalRenderer, RednerRenderer
import utils
import environment as env
import numpy as np
import sys
from PIL import Image
class Identity(torch.nn.Module):
def __init__(self):
super(Identity, self).__init__()
def forward(self, x):
return x
args = parse_args()
clean_training = args.mode == 'train' and args.retrain
# Load the checkpoint
checkpoint_dir = Path(args.model_dir)
checkpoint = Checkpoint()
if not clean_training:
checkpoint = Checkpoint.load(checkpoint_dir)
# Immediatly restore the arguments if we have a valid checkpoint
if checkpoint.is_valid():
args = checkpoint.restore_args(args)
# Make the result reproducible
utils.enable_deterministic_random_engine()
# Determine the device
accelerator = Accelerator()
device = accelerator.device
# Create the model
model = MultiViewModel(use_coords=args.use_coords).to(device)
if checkpoint.is_valid():
model = checkpoint.restore_model_state(model)
elif args.mode == 'test':
print("No model found in the model directory but it is required for testing.")
exit(1)
# TODO: Choose a random number for the used input image count if we are training and we don't request it to be fix (see fixImageNb for reference)
data = SvbrdfDataset(data_directory=args.input_dir,
image_size=args.image_size, scale_mode=args.scale_mode, input_image_count=args.image_count, used_input_image_count=args.used_image_count,
use_augmentation=True, mix_materials=args.mode == 'train',
no_svbrdf=args.no_svbrdf_input, is_linear=args.linear_input)
epoch_start = 0
# model.generator.delete()
# model = torch.nn.Sequential(
# *list(model.children())[:-8],
# )
# print(*list(model.parameters()))
if args.mode == 'train':
validation_split = 0.01
print("Using {:.2f} % of the data for validation".format(
round(validation_split * 100.0, 2)))
training_data, validation_data = torch.utils.data.random_split(data, [int(math.ceil(
len(data) * (1.0 - validation_split))), int(math.floor(len(data) * validation_split))])
print("Training samples: {:d}.".format(len(training_data)))
print("Validation samples: {:d}.".format(len(validation_data)))
training_dataloader = torch.utils.data.DataLoader(
training_data, batch_size=8, pin_memory=True, shuffle=True)
validation_dataloader = torch.utils.data.DataLoader(
validation_data, batch_size=8, pin_memory=True, shuffle=False)
batch_count = int(math.ceil(len(training_data) /
training_dataloader.batch_size))
# Train as many epochs as specified
epoch_end = args.epochs
print("Training from epoch {:d} to {:d}".format(epoch_start, epoch_end))
# Set up the optimizer
# TODO: Use betas=(0.5, 0.999)
L = torch.FloatTensor(5, 3).uniform_(0.2, 1.0)
L = L / torch.linalg.norm(L, ord=2, dim=-1, keepdim=True)
L[:, :2] = 2.0 * L[:, :2] - 1.0
V = torch.FloatTensor(1, 3).uniform_(0.2, 1.0)
V = V / torch.linalg.norm(V, ord=2, dim=-1, keepdim=True)
V[:, :2] = 2.0 * V[:, :2] - 1.0
scenes = env.generate_specific_scenes(5, L, L)
L.requires_grad = True
VIP = [L]
# V.requires_grad = True
optimizer = torch.optim.Adam(VIP, lr=0.1)
model, optimizer, training_dataloader, validation_dataloader = accelerator.prepare(
model, optimizer, training_dataloader, validation_dataloader)
# print("scene", scene.camera)
# TODO: Use scheduler if necessary
#scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='min')
# Set up the loss
loss_renderer = LocalRenderer()
loss_function = MixedLoss2(loss_renderer, scenes)
# Setup statistics stuff
statistics_dir = checkpoint_dir / "logs"
if clean_training and statistics_dir.exists():
# Nuke the stats dir
shutil.rmtree(statistics_dir)
statistics_dir.mkdir(parents=True, exist_ok=True)
writer = SummaryWriter(str(statistics_dir.absolute()))
last_batch_inputs = None
# Clear checkpoint in order to free up some memory
checkpoint.purge()
lights = []
losses = []
for epoch in range(epoch_start, epoch_end):
for i, batch in enumerate(training_dataloader):
# Unique index of this batch
print("Ldet", (L.detach().numpy())[0])
lights.append(((L.detach().numpy())[0]).tolist())
scenes = env.generate_specific_scenes(5, L, L)
print("L", L)
# if(epoch_end - epoch < 3):
loss_function = MixedLoss2(loss_renderer, scenes)
# else:
# loss_function = MixedLoss2(loss_renderer, scene[0])
batch_index = epoch * batch_count + i
# Construct inputs
batch_inputs = batch["inputs"].to(device)
batch_svbrdf = batch["svbrdf"].to(device)
# Perform a step
optimizer.zero_grad()
outputs = model(batch_inputs)
print("batch_inputs", batch_inputs.size())
print("batch_svbrdfs", batch_svbrdf.size())
print("batch_outputs", outputs.size())
loss = loss_function(outputs, batch_svbrdf)
accelerator.backward(loss)
optimizer.step()
print("Epoch {:d}, Batch {:d}, loss: {:f}".format(
epoch, i + 1, loss.item()))
losses.append((epoch, loss.item()))
# Statistics
writer.add_scalar("loss", loss.item(), batch_index)
last_batch_inputs = batch_inputs
lights.append(((L.detach().numpy())[0]).tolist())
with open('/content/experiment1/losses/loss.txt', "w") as text_file:
text_file.write(str(losses))
print("lights1", lights)
# print(len(lights))
lights2 = []
for j in range(len(lights)):
if j%10 == 0:
lights2.append(lights[j])
# print("lights2", lights)
# l=np.array(lights)
l = np.array(lights2)
renderer = LocalRenderer()
rendered_scene = env.generate_specific_scenes(1, L.detach(), L.detach())
img = renderer.render(rendered_scene[0], batch_svbrdf[0])
fig = plt.figure(frameon=False)
# fig.set_size_inches(w,h)
ax = plt.Axes(fig, [0., 0., 1., 1.])
ax.set_axis_off()
fig.add_axes(ax)
# print("shape", img.size())
ax.imshow(img[0].detach().permute(1,2,0), aspect='auto')
fig.savefig('/content/experiment1/figures/render1.png')
img = renderer.render(rendered_scene[0], outputs[0])
fig = plt.figure(frameon=False)
# fig.set_size_inches(w,h)
ax = plt.Axes(fig, [0., 0., 1., 1.])
ax.set_axis_off()
fig.add_axes(ax)
# print("shape", img.size())
ax.imshow(img[0].detach().permute(1,2,0), aspect='auto')
fig.savefig('/content/experiment1/figures/render2.png')
# print("size", batch_inputs.size())
torch.add(L, 5)
print("L", L)
rendered_scene = env.generate_specific_scenes(1, L, L)
img = renderer.render(rendered_scene[0], batch_svbrdf[0])
fig = plt.figure(frameon=False)
# fig.set_size_inches(w,h)
ax = plt.Axes(fig, [0., 0., 1., 1.])
ax.set_axis_off()
fig.add_axes(ax)
# print("shape", img.size())
ax.imshow(img[0].detach().permute(1,2,0), aspect='auto')
fig.savefig('/content/experiment1/figures/render3.png')
print("size", batch_inputs[0][0].size())
img = batch_inputs[0][0]
fig = plt.figure(frameon=False)
# fig.set_size_inches(w,h)
ax = plt.Axes(fig, [0., 0., 1., 1.])
ax.set_axis_off()
fig.add_axes(ax)
# print("shape", img.size())
ax.imshow(img.detach().permute(1,2,0), aspect='auto')
fig.savefig('/content/experiment1/figures/render4.png')
print("size", batch_inputs[0][0].size())
normals, diffuse, roughness, specular = utils.unpack_svbrdf(outputs[0])
img = normals
fig = plt.figure(frameon=False)
# fig.set_size_inches(w,h)
ax = plt.Axes(fig, [0., 0., 1., 1.])
ax.set_axis_off()
fig.add_axes(ax)
# print("shape", img.size())
ax.imshow(img.detach().permute(1,2,0), aspect='auto')
fig.savefig('/content/experiment1/figures/output_normal.png')
img = diffuse
fig = plt.figure(frameon=False)
# fig.set_size_inches(w,h)
ax = plt.Axes(fig, [0., 0., 1., 1.])
ax.set_axis_off()
fig.add_axes(ax)
# print("shape", img.size())
ax.imshow(img.detach().permute(1,2,0), aspect='auto')
fig.savefig('/content/experiment1/figures/output_diffuse.png')
img = roughness
fig = plt.figure(frameon=False)
# fig.set_size_inches(w,h)
ax = plt.Axes(fig, [0., 0., 1., 1.])
ax.set_axis_off()
fig.add_axes(ax)
# print("shape", img.size())
ax.imshow(img.detach().permute(1,2,0), aspect='auto')
fig.savefig('/content/experiment1/figures/output_roughness.png')
img = specular
fig = plt.figure(frameon=False)
# fig.set_size_inches(w,h)
ax = plt.Axes(fig, [0., 0., 1., 1.])
ax.set_axis_off()
fig.add_axes(ax)
# print("shape", img.size())
ax.imshow(img.detach().permute(1,2,0), aspect='auto')
fig.savefig('/content/experiment1/figures/output_specular.png')
print("size", batch_inputs[0][0].size())
normals, diffuse, roughness, specular = utils.unpack_svbrdf(batch_svbrdf[0])
img = normals
fig = plt.figure(frameon=False)
# fig.set_size_inches(w,h)
ax = plt.Axes(fig, [0., 0., 1., 1.])
ax.set_axis_off()
fig.add_axes(ax)
# print("shape", img.size())
ax.imshow(img.detach().permute(1,2,0), aspect='auto')
fig.savefig('/content/experiment1/figures/target_normal.png')
img = diffuse
fig = plt.figure(frameon=False)
# fig.set_size_inches(w,h)
ax = plt.Axes(fig, [0., 0., 1., 1.])
ax.set_axis_off()
fig.add_axes(ax)
# print("shape", img.size())
ax.imshow(img.detach().permute(1,2,0), aspect='auto')
fig.savefig('/content/experiment1/figures/target_diffuse.png')
img = roughness
fig = plt.figure(frameon=False)
# fig.set_size_inches(w,h)
ax = plt.Axes(fig, [0., 0., 1., 1.])
ax.set_axis_off()
fig.add_axes(ax)
# print("shape", img.size())
ax.imshow(img.detach().permute(1,2,0), aspect='auto')
fig.savefig('/content/experiment1/figures/target_roughness.png')
img = specular
fig = plt.figure(frameon=False)
# fig.set_size_inches(w,h)
ax = plt.Axes(fig, [0., 0., 1., 1.])
ax.set_axis_off()
fig.add_axes(ax)
# print("shape", img.size())
ax.imshow(img.detach().permute(1,2,0), aspect='auto')
fig.savefig('/content/experiment1/figures/target_specular.png')
images = [Image.open(x) for x in ['/content/experiment1/figures/target_normal.png', '/content/experiment1/figures/target_diffuse.png', '/content/experiment1/figures/target_roughness.png', '/content/experiment1/figures/target_specular.png']]
widths, heights = zip(*(i.size for i in images))
total_width = sum(widths)
max_height = max(heights)
new_im = Image.new('RGB', (total_width, max_height))
x_offset = 0
for im in images:
new_im.paste(im, (x_offset,0))
x_offset += im.size[0]
new_im.save('/content/experiment1/figures/target_svbrdf.png')
images = [Image.open(x) for x in ['/content/experiment1/figures/output_normal.png', '/content/experiment1/figures/output_diffuse.png', '/content/experiment1/figures/output_roughness.png', '/content/experiment1/figures/output_specular.png']]
widths, heights = zip(*(i.size for i in images))
total_width = sum(widths)
max_height = max(heights)
new_im = Image.new('RGB', (total_width, max_height))
x_offset = 0
for im in images:
new_im.paste(im, (x_offset,0))
x_offset += im.size[0]
new_im.save('/content/experiment1/figures/output_svbrdf.png')
print("size", batch_inputs[0][0].size())
normals, diffuse, roughness, specular = utils.unpack_svbrdf(outputs[0])
img = normals
fig = plt.figure(frameon=False)
# fig.set_size_inches(w,h)
ax = plt.Axes(fig, [0., 0., 1., 1.])
ax.set_axis_off()
fig.add_axes(ax)
# print("shape", img.size())
ax.imshow(img.detach().permute(1,2,0), aspect='auto')
fig.savefig('/content/experiment1/figures/output_normal.png')
print("lights3", l)
fig = plt.figure()
ax = fig.add_subplot(projection='3d')
ax.scatter([0.0], [0.0], [0.0], marker='o', c='r')
# v = V.detach().numpy()
ax.scatter(l[:,0], l[:,1], l[:,2], marker='.', c='g')
# ax.scatter(v[:,0], v[:,1], v[:,2], marker='^', c='b')
ax.set_xlim(-8, 8)
ax.set_ylim(-8, 8)
ax.set_zlim(-8., 8.)
ax.set_xlabel('X')
ax.set_ylabel('Y')
ax.set_zlabel('Z')
# plt.show()
plt.savefig('/content/experiment1/figures/light.png')
plt.show()
# if epoch % args.save_frequency == 0:
# Checkpoint.save(checkpoint_dir, args, model, optimizer, epoch)
# if epoch % args.validation_frequency == 0 and len(validation_data) > 0:
# model.eval()
# val_loss = 0.0
# batch_count_val = 0
# for batch in validation_dataloader:
# # Construct inputs
# batch_inputs = batch["inputs"].to(device)
# batch_svbrdf = batch["svbrdf"].to(device)
# outputs = model(batch_inputs)
# val_loss += loss_function(outputs, batch_svbrdf).item()
# batch_count_val += 1
# val_loss /= batch_count_val
# print("Epoch {:d}, validation loss: {:f}".format(epoch, val_loss))
# writer.add_scalar("val_loss", val_loss, epoch * batch_count)
# model.train()
| [((675, 687), 'cli.parse_args', 'parse_args', ([], {}), '()\n', (685, 687), False, 'from cli import parse_args\n'), ((784, 804), 'pathlib.Path', 'Path', (['args.model_dir'], {}), '(args.model_dir)\n', (788, 804), False, 'from pathlib import Path\n'), ((818, 830), 'persistence.Checkpoint', 'Checkpoint', ([], {}), '()\n', (828, 830), False, 'from persistence import Checkpoint\n'), ((1068, 1110), 'utils.enable_deterministic_random_engine', 'utils.enable_deterministic_random_engine', ([], {}), '()\n', (1108, 1110), False, 'import utils\n'), ((1149, 1162), 'accelerate.Accelerator', 'Accelerator', ([], {}), '()\n', (1160, 1162), False, 'from accelerate import Accelerator\n'), ((1625, 1943), 'dataset.SvbrdfDataset', 'SvbrdfDataset', ([], {'data_directory': 'args.input_dir', 'image_size': 'args.image_size', 'scale_mode': 'args.scale_mode', 'input_image_count': 'args.image_count', 'used_input_image_count': 'args.used_image_count', 'use_augmentation': '(True)', 'mix_materials': "(args.mode == 'train')", 'no_svbrdf': 'args.no_svbrdf_input', 'is_linear': 'args.linear_input'}), "(data_directory=args.input_dir, image_size=args.image_size,\n scale_mode=args.scale_mode, input_image_count=args.image_count,\n used_input_image_count=args.used_image_count, use_augmentation=True,\n mix_materials=args.mode == 'train', no_svbrdf=args.no_svbrdf_input,\n is_linear=args.linear_input)\n", (1638, 1943), False, 'from dataset import SvbrdfDataset\n'), ((871, 902), 'persistence.Checkpoint.load', 'Checkpoint.load', (['checkpoint_dir'], {}), '(checkpoint_dir)\n', (886, 902), False, 'from persistence import Checkpoint\n'), ((2651, 2742), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['training_data'], {'batch_size': '(8)', 'pin_memory': '(True)', 'shuffle': '(True)'}), '(training_data, batch_size=8, pin_memory=True,\n shuffle=True)\n', (2678, 2742), False, 'import torch\n'), ((2776, 2870), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['validation_data'], {'batch_size': '(8)', 'pin_memory': '(True)', 'shuffle': '(False)'}), '(validation_data, batch_size=8, pin_memory=True,\n shuffle=False)\n', (2803, 2870), False, 'import torch\n'), ((3541, 3578), 'environment.generate_specific_scenes', 'env.generate_specific_scenes', (['(5)', 'L', 'L'], {}), '(5, L, L)\n', (3569, 3578), True, 'import environment as env\n'), ((3670, 3699), 'torch.optim.Adam', 'torch.optim.Adam', (['VIP'], {'lr': '(0.1)'}), '(VIP, lr=0.1)\n', (3686, 3699), False, 'import torch\n'), ((4061, 4076), 'renderers.LocalRenderer', 'LocalRenderer', ([], {}), '()\n', (4074, 4076), False, 'from renderers import LocalRenderer, RednerRenderer\n'), ((4098, 4131), 'losses.MixedLoss2', 'MixedLoss2', (['loss_renderer', 'scenes'], {}), '(loss_renderer, scenes)\n', (4108, 4131), False, 'from losses import MixedLoss, MixedLoss2, MixedLoss3\n'), ((6369, 6386), 'numpy.array', 'np.array', (['lights2'], {}), '(lights2)\n', (6377, 6386), True, 'import numpy as np\n'), ((6402, 6417), 'renderers.LocalRenderer', 'LocalRenderer', ([], {}), '()\n', (6415, 6417), False, 'from renderers import LocalRenderer, RednerRenderer\n'), ((6567, 6592), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'frameon': '(False)'}), '(frameon=False)\n', (6577, 6592), True, 'import matplotlib.pyplot as plt\n'), ((6633, 6668), 'matplotlib.pyplot.Axes', 'plt.Axes', (['fig', '[0.0, 0.0, 1.0, 1.0]'], {}), '(fig, [0.0, 0.0, 1.0, 1.0])\n', (6641, 6668), True, 'import matplotlib.pyplot as plt\n'), ((6931, 6956), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'frameon': '(False)'}), '(frameon=False)\n', (6941, 6956), True, 'import matplotlib.pyplot as plt\n'), ((6997, 7032), 'matplotlib.pyplot.Axes', 'plt.Axes', (['fig', '[0.0, 0.0, 1.0, 1.0]'], {}), '(fig, [0.0, 0.0, 1.0, 1.0])\n', (7005, 7032), True, 'import matplotlib.pyplot as plt\n'), ((7277, 7292), 'torch.add', 'torch.add', (['L', '(5)'], {}), '(L, 5)\n', (7286, 7292), False, 'import torch\n'), ((7332, 7369), 'environment.generate_specific_scenes', 'env.generate_specific_scenes', (['(1)', 'L', 'L'], {}), '(1, L, L)\n', (7360, 7369), True, 'import environment as env\n'), ((7442, 7467), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'frameon': '(False)'}), '(frameon=False)\n', (7452, 7467), True, 'import matplotlib.pyplot as plt\n'), ((7508, 7543), 'matplotlib.pyplot.Axes', 'plt.Axes', (['fig', '[0.0, 0.0, 1.0, 1.0]'], {}), '(fig, [0.0, 0.0, 1.0, 1.0])\n', (7516, 7543), True, 'import matplotlib.pyplot as plt\n'), ((7822, 7847), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'frameon': '(False)'}), '(frameon=False)\n', (7832, 7847), True, 'import matplotlib.pyplot as plt\n'), ((7888, 7923), 'matplotlib.pyplot.Axes', 'plt.Axes', (['fig', '[0.0, 0.0, 1.0, 1.0]'], {}), '(fig, [0.0, 0.0, 1.0, 1.0])\n', (7896, 7923), True, 'import matplotlib.pyplot as plt\n'), ((8204, 8235), 'utils.unpack_svbrdf', 'utils.unpack_svbrdf', (['outputs[0]'], {}), '(outputs[0])\n', (8223, 8235), False, 'import utils\n'), ((8264, 8289), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'frameon': '(False)'}), '(frameon=False)\n', (8274, 8289), True, 'import matplotlib.pyplot as plt\n'), ((8330, 8365), 'matplotlib.pyplot.Axes', 'plt.Axes', (['fig', '[0.0, 0.0, 1.0, 1.0]'], {}), '(fig, [0.0, 0.0, 1.0, 1.0])\n', (8338, 8365), True, 'import matplotlib.pyplot as plt\n'), ((8595, 8620), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'frameon': '(False)'}), '(frameon=False)\n', (8605, 8620), True, 'import matplotlib.pyplot as plt\n'), ((8661, 8696), 'matplotlib.pyplot.Axes', 'plt.Axes', (['fig', '[0.0, 0.0, 1.0, 1.0]'], {}), '(fig, [0.0, 0.0, 1.0, 1.0])\n', (8669, 8696), True, 'import matplotlib.pyplot as plt\n'), ((8929, 8954), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'frameon': '(False)'}), '(frameon=False)\n', (8939, 8954), True, 'import matplotlib.pyplot as plt\n'), ((8995, 9030), 'matplotlib.pyplot.Axes', 'plt.Axes', (['fig', '[0.0, 0.0, 1.0, 1.0]'], {}), '(fig, [0.0, 0.0, 1.0, 1.0])\n', (9003, 9030), True, 'import matplotlib.pyplot as plt\n'), ((9264, 9289), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'frameon': '(False)'}), '(frameon=False)\n', (9274, 9289), True, 'import matplotlib.pyplot as plt\n'), ((9330, 9365), 'matplotlib.pyplot.Axes', 'plt.Axes', (['fig', '[0.0, 0.0, 1.0, 1.0]'], {}), '(fig, [0.0, 0.0, 1.0, 1.0])\n', (9338, 9365), True, 'import matplotlib.pyplot as plt\n'), ((9659, 9695), 'utils.unpack_svbrdf', 'utils.unpack_svbrdf', (['batch_svbrdf[0]'], {}), '(batch_svbrdf[0])\n', (9678, 9695), False, 'import utils\n'), ((9724, 9749), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'frameon': '(False)'}), '(frameon=False)\n', (9734, 9749), True, 'import matplotlib.pyplot as plt\n'), ((9790, 9825), 'matplotlib.pyplot.Axes', 'plt.Axes', (['fig', '[0.0, 0.0, 1.0, 1.0]'], {}), '(fig, [0.0, 0.0, 1.0, 1.0])\n', (9798, 9825), True, 'import matplotlib.pyplot as plt\n'), ((10055, 10080), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'frameon': '(False)'}), '(frameon=False)\n', (10065, 10080), True, 'import matplotlib.pyplot as plt\n'), ((10121, 10156), 'matplotlib.pyplot.Axes', 'plt.Axes', (['fig', '[0.0, 0.0, 1.0, 1.0]'], {}), '(fig, [0.0, 0.0, 1.0, 1.0])\n', (10129, 10156), True, 'import matplotlib.pyplot as plt\n'), ((10389, 10414), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'frameon': '(False)'}), '(frameon=False)\n', (10399, 10414), True, 'import matplotlib.pyplot as plt\n'), ((10455, 10490), 'matplotlib.pyplot.Axes', 'plt.Axes', (['fig', '[0.0, 0.0, 1.0, 1.0]'], {}), '(fig, [0.0, 0.0, 1.0, 1.0])\n', (10463, 10490), True, 'import matplotlib.pyplot as plt\n'), ((10724, 10749), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'frameon': '(False)'}), '(frameon=False)\n', (10734, 10749), True, 'import matplotlib.pyplot as plt\n'), ((10790, 10825), 'matplotlib.pyplot.Axes', 'plt.Axes', (['fig', '[0.0, 0.0, 1.0, 1.0]'], {}), '(fig, [0.0, 0.0, 1.0, 1.0])\n', (10798, 10825), True, 'import matplotlib.pyplot as plt\n'), ((11398, 11441), 'PIL.Image.new', 'Image.new', (['"""RGB"""', '(total_width, max_height)'], {}), "('RGB', (total_width, max_height))\n", (11407, 11441), False, 'from PIL import Image\n'), ((11994, 12037), 'PIL.Image.new', 'Image.new', (['"""RGB"""', '(total_width, max_height)'], {}), "('RGB', (total_width, max_height))\n", (12003, 12037), False, 'from PIL import Image\n'), ((12306, 12337), 'utils.unpack_svbrdf', 'utils.unpack_svbrdf', (['outputs[0]'], {}), '(outputs[0])\n', (12325, 12337), False, 'import utils\n'), ((12366, 12391), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'frameon': '(False)'}), '(frameon=False)\n', (12376, 12391), True, 'import matplotlib.pyplot as plt\n'), ((12432, 12467), 'matplotlib.pyplot.Axes', 'plt.Axes', (['fig', '[0.0, 0.0, 1.0, 1.0]'], {}), '(fig, [0.0, 0.0, 1.0, 1.0])\n', (12440, 12467), True, 'import matplotlib.pyplot as plt\n'), ((12699, 12711), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (12709, 12711), True, 'import matplotlib.pyplot as plt\n'), ((13120, 13173), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""/content/experiment1/figures/light.png"""'], {}), "('/content/experiment1/figures/light.png')\n", (13131, 13173), True, 'import matplotlib.pyplot as plt\n'), ((13178, 13188), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (13186, 13188), True, 'import matplotlib.pyplot as plt\n'), ((1219, 1261), 'models.MultiViewModel', 'MultiViewModel', ([], {'use_coords': 'args.use_coords'}), '(use_coords=args.use_coords)\n', (1233, 1261), False, 'from models import MultiViewModel, SingleViewModel\n'), ((3272, 3321), 'torch.linalg.norm', 'torch.linalg.norm', (['L'], {'ord': '(2)', 'dim': '(-1)', 'keepdim': '(True)'}), '(L, ord=2, dim=-1, keepdim=True)\n', (3289, 3321), False, 'import torch\n'), ((3432, 3481), 'torch.linalg.norm', 'torch.linalg.norm', (['V'], {'ord': '(2)', 'dim': '(-1)', 'keepdim': '(True)'}), '(V, ord=2, dim=-1, keepdim=True)\n', (3449, 3481), False, 'import torch\n'), ((4295, 4324), 'shutil.rmtree', 'shutil.rmtree', (['statistics_dir'], {}), '(statistics_dir)\n', (4308, 4324), False, 'import shutil\n'), ((11039, 11052), 'PIL.Image.open', 'Image.open', (['x'], {}), '(x)\n', (11049, 11052), False, 'from PIL import Image\n'), ((11635, 11648), 'PIL.Image.open', 'Image.open', (['x'], {}), '(x)\n', (11645, 11648), False, 'from PIL import Image\n'), ((3212, 3235), 'torch.FloatTensor', 'torch.FloatTensor', (['(5)', '(3)'], {}), '(5, 3)\n', (3229, 3235), False, 'import torch\n'), ((3372, 3395), 'torch.FloatTensor', 'torch.FloatTensor', (['(1)', '(3)'], {}), '(1, 3)\n', (3389, 3395), False, 'import torch\n'), ((4858, 4895), 'environment.generate_specific_scenes', 'env.generate_specific_scenes', (['(5)', 'L', 'L'], {}), '(5, L, L)\n', (4886, 4895), True, 'import environment as env\n'), ((4991, 5024), 'losses.MixedLoss2', 'MixedLoss2', (['loss_renderer', 'scenes'], {}), '(loss_renderer, scenes)\n', (5001, 5024), False, 'from losses import MixedLoss, MixedLoss2, MixedLoss3\n')] |
amazon-research/long-short-term-transformer | src/rekognition_online_action_detection/engines/__init__.py | a425be4b52ab68fddd85c91d26571e4cdfe8379a | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
from .engines import do_train, do_inference
from .lstr.lstr_trainer import *
from .lstr.lstr_inference import *
| [] |
nuagenetworks/nuage-tempest-plugin | nuage_tempest_plugin/tests/api/test_nuage_ports.py | ac1bfb0709c7bbaf04017af3050fb3ed1ad1324a | # Copyright 2017 NOKIA
# All Rights Reserved.
from netaddr import IPNetwork
import testtools
from tempest.common import waiters
from tempest.lib import exceptions
from tempest.scenario import manager
from tempest.test import decorators
from nuage_tempest_plugin.lib.test.nuage_test import NuageAdminNetworksTest
from nuage_tempest_plugin.lib.test.nuage_test import NuageBaseTest
from nuage_tempest_plugin.lib.topology import Topology
from nuage_tempest_plugin.lib.utils import constants
from nuage_tempest_plugin.services.nuage_client import NuageRestClient
CONF = Topology.get_conf()
LOG = Topology.get_logger(__name__)
class PortsTest(NuageBaseTest, NuageAdminNetworksTest,
manager.NetworkScenarioTest):
@classmethod
def setup_clients(cls):
super(PortsTest, cls).setup_clients()
cls.vsd_client = NuageRestClient()
def show_port(self, port_id):
"""Wrapper utility that shows a given port."""
body = self.ports_client.show_port(port_id)
return body['port']
def _create_server(self, name, network, port_id=None):
keypair = self.create_keypair()
network = {'uuid': network['id']}
if port_id is not None:
network['port'] = port_id
return self.create_server(
name=name,
networks=[network],
key_name=keypair['name'],
wait_until='ACTIVE')
def _delete_server(self, server_id, clients=None):
if clients is None:
clients = self.os_primary
clients.servers_client.delete_server(server_id)
waiters.wait_for_server_termination(clients.servers_client, server_id)
@decorators.attr(type='smoke')
def test_nuage_dhcp_port_create_check_status(self):
network = self.create_network()
self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=24)
filters = {
'device_owner': 'network:dhcp:nuage',
'network_id': network['id']
}
dhcp_port = self.ports_client.list_ports(**filters)['ports'][0]
self.assertEqual('ACTIVE', dhcp_port['status'])
@decorators.attr(type='smoke')
def test_nuage_dhcp_port_with_router_detach_check_status(self):
network = self.create_network()
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=24)
router = self.create_router(
admin_state_up=True,
external_network_id=CONF.network.public_network_id)
self.create_router_interface(router_id=router["id"],
subnet_id=subnet["id"],
cleanup=False)
self.routers_client.remove_router_interface(router_id=router["id"],
subnet_id=subnet["id"])
filters = {
'device_owner': 'network:dhcp:nuage',
'network_id': network['id']
}
dhcp_port = self.ports_client.list_ports(**filters)['ports'][0]
self.assertEqual('ACTIVE', dhcp_port['status'])
@decorators.attr(type='smoke')
def test_nuage_port_create_show_check_status(self):
network = self.create_network()
self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=24)
port = self.create_port(network)
self.assertEqual('DOWN', port['status'])
port = self.show_port(port['id'])
# state has to remain DOWN as long as port is not bound
self.assertEqual('DOWN', port['status'])
@decorators.attr(type='smoke')
def test_nuage_port_create_server_create_delete_check_status(self):
network = self.create_network()
self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=24)
port = self.create_port(network)
server = self._create_server('s1', network, port['id'])
port = self.show_port(port['id'])
self.assertEqual('ACTIVE', port['status'])
self._delete_server(server['id'])
port = self.show_port(port['id'])
self.assertEqual('DOWN', port['status'])
@decorators.attr(type='smoke')
def test_nuage_port_create_fixed_ips_negative(self):
# Set up resources
# Base resources
if self.is_dhcp_agent_present():
raise self.skipException(
'Cannot run this test case when DHCP agent is enabled')
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
subnet2 = self.create_subnet(network, cidr=IPNetwork("20.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet2, "Unable to create second subnet")
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
},
{
"ip_address": "20.0.0.4",
"subnet_id": subnet2["id"]
}
]
# Fail
msg = "Port can't have multiple IPv4 IPs of different subnets"
self.assertRaisesRegex(exceptions.BadRequest,
msg,
self.create_port,
network=network, fixed_ips=fixed_ips)
@testtools.skipIf(Topology.before_nuage('5.4'), 'Unsupported pre-5.4')
def test_nuage_os_managed_subnet_port_create_with_nuage_policy_negative(
self):
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
msg = ("Cannot use VSP policy groups on OS managed subnets,"
" use neutron security groups instead.")
self.assertRaisesRegex(exceptions.BadRequest,
msg,
self.create_port,
network=network,
nuage_policy_groups=['Random_value'])
@testtools.skipIf(Topology.before_nuage('5.4'), 'Unsupported pre-5.4')
def test_nuage_os_managed_subnet_port_update_with_nuage_policy_negative(
self):
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
port = self.create_port(network=network)
self.assertIsNotNone(port, "Unable to create port")
msg = ("Cannot use VSP policy groups on OS managed subnets,"
" use neutron security groups instead.")
self.assertRaisesRegex(exceptions.BadRequest,
msg,
self.update_port,
port=port,
nuage_policy_groups=['Random_value'])
@decorators.attr(type='smoke')
def test_nuage_port_update_fixed_ips_negative(self):
if self.is_dhcp_agent_present():
raise self.skipException(
'Multiple subnets in a network not supported when DHCP agent '
'is enabled.')
# Set up resources
# Base resources
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
subnet2 = self.create_subnet(network, cidr=IPNetwork("20.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet2, "Unable to create second subnet")
router = self.create_router(
admin_state_up=True,
external_network_id=CONF.network.public_network_id)
self.assertIsNotNone(router, "Unable to create router")
# Attach subnet
self.create_router_interface(router_id=router["id"],
subnet_id=subnet["id"])
self.create_router_interface(router_id=router["id"],
subnet_id=subnet2["id"])
# Create port
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
}
]
port = self.create_port(network=network, fixed_ips=fixed_ips)
self.assertIsNotNone(port, "Unable to create port on network")
# update within subnet should succeed
fixed_ips = [
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
}
]
port = self.update_port(port=port, fixed_ips=fixed_ips)
self.assertIsNotNone(port, "Unable to update port")
self.assertEqual(port["fixed_ips"][0]["ip_address"], "10.0.0.5",
message="The port did not update properly.")
# Update to subnet2 should fail
fixed_ips = [
{
"ip_address": "20.0.0.4",
"subnet_id": subnet2["id"]
}
]
try:
self.update_port(port=port, fixed_ips=fixed_ips)
self.fail("Exception expected when updating to"
" a different subnet!")
except exceptions.BadRequest as e:
if "Updating fixed ip of port" in e._error_string:
pass
else:
# Differentiate between VSD failure and update failure
LOG.debug(e._error_string)
self.fail("A different NuageBadRequest exception"
" was expected for this operation.")
@decorators.attr(type='smoke')
def test_nuage_port_create_fixed_ips_same_subnet_l2(self):
# Set up resources
# Base resources
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
}
]
port = self.create_port(network=network, fixed_ips=fixed_ips)
self.assertIsNotNone(port, "Unable to create port on network")
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.L2_DOMAIN,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport = self.vsd_client.get_vport(
constants.L2_DOMAIN,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
@decorators.attr(type='smoke')
def test_nuage_port_update_fixed_ips_same_subnet_l2(self):
# Set up resources
# Base resources
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
}
]
port = self.create_port(network=network, fixed_ips=fixed_ips)
self.assertIsNotNone(port, "Unable to create port on network")
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.L2_DOMAIN,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport = self.vsd_client.get_vport(
constants.L2_DOMAIN,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
# update within subnet should succeed
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
}
]
port = self.update_port(port=port, fixed_ips=fixed_ips)
self.assertIsNotNone(port, "Unable to update port")
nuage_vport = self.vsd_client.get_vport(
constants.L2_DOMAIN,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
@decorators.attr(type='smoke')
def test_nuage_port_create_fixed_ips_same_subnet_l3(self):
# Set up resources
# Base resources
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
router = self.create_router(
admin_state_up=True,
external_network_id=CONF.network.public_network_id)
self.assertIsNotNone(router, "Unable to create router")
# Attach subnet
self.create_router_interface(router_id=router["id"],
subnet_id=subnet["id"])
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
}
]
port = self.create_port(network=network, fixed_ips=fixed_ips)
self.assertIsNotNone(port, "Unable to create port on network")
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.SUBNETWORK,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
nuage_vport_vips = self.vsd_client.get_virtual_ip(
constants.VPORT,
nuage_vport[0]['ID'])
valid_vips = ['10.0.0.4']
vip_mismatch = False
mac_mismatch = False
if valid_vips and not nuage_vport_vips:
vip_mismatch = True
for nuage_vport_vip in nuage_vport_vips:
if nuage_vport_vip['virtualIP'] not in valid_vips:
vip_mismatch = True
if nuage_vport_vip['MAC'] != port['mac_address']:
mac_mismatch = True
self.assertEqual(vip_mismatch, False)
self.assertEqual(mac_mismatch, False)
@decorators.attr(type='smoke')
def test_nuage_port_create_fixed_ips_same_subnet_l3_no_security(self):
# Set up resources
# Base resources
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
router = self.create_router(
admin_state_up=True,
external_network_id=CONF.network.public_network_id)
self.assertIsNotNone(router, "Unable to create router")
# Attach subnet
self.create_router_interface(router_id=router["id"],
subnet_id=subnet["id"])
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
}
]
port = self.create_port(network=network, fixed_ips=fixed_ips,
port_security_enabled=False)
self.assertIsNotNone(port, "Unable to create port on network")
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.SUBNETWORK,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.ENABLED,
nuage_vport[0]['addressSpoofing'])
nuage_vport_vips = self.vsd_client.get_virtual_ip(
constants.VPORT,
nuage_vport[0]['ID'])
valid_vips = ['10.0.0.4']
vip_mismatch = False
mac_mismatch = False
if valid_vips and not nuage_vport_vips:
vip_mismatch = True
for nuage_vport_vip in nuage_vport_vips:
if nuage_vport_vip['virtualIP'] not in valid_vips:
vip_mismatch = True
if nuage_vport_vip['MAC'] != port['mac_address']:
mac_mismatch = True
self.assertEqual(vip_mismatch, False)
self.assertEqual(mac_mismatch, False)
@decorators.attr(type='smoke')
def test_nuage_port_update_fixed_ips_same_subnet_l3_no_security(self):
# Set up resources
# Base resources
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
router = self.create_router(
admin_state_up=True,
external_network_id=CONF.network.public_network_id)
self.assertIsNotNone(router, "Unable to create router")
# Attach subnet
self.create_router_interface(router_id=router["id"],
subnet_id=subnet["id"])
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
}
]
allowed_address_pairs = [{'ip_address': '10.0.0.5',
'mac_address': 'fe:a0:36:4b:c8:70'}]
port = self.create_port(network=network,
fixed_ips=fixed_ips,
allowed_address_pairs=allowed_address_pairs)
self.assertIsNotNone(port, "Unable to create port on network")
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.SUBNETWORK,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
# update within subnet should succeed
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
}
]
port = self.update_port(port=port, fixed_ips=fixed_ips,
allowed_address_pairs=[],
security_groups=[],
port_security_enabled=False)
self.assertIsNotNone(port, "Unable to update port")
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.ENABLED,
nuage_vport[0]['addressSpoofing'])
nuage_vport_vips = self.vsd_client.get_virtual_ip(
constants.VPORT,
nuage_vport[0]['ID'])
valid_vips = ['10.0.0.4']
vip_mismatch = False
mac_mismatch = False
if valid_vips and not nuage_vport_vips:
vip_mismatch = True
for nuage_vport_vip in nuage_vport_vips:
if nuage_vport_vip['virtualIP'] not in valid_vips:
vip_mismatch = True
if nuage_vport_vip['MAC'] != port['mac_address']:
mac_mismatch = True
self.assertEqual(vip_mismatch, False)
self.assertEqual(mac_mismatch, False)
@decorators.attr(type='smoke')
def test_nuage_port_update_fixed_ips_same_subnet_l3(self):
# Set up resources
# Base resources
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
router = self.create_router(
admin_state_up=True,
external_network_id=CONF.network.public_network_id)
self.assertIsNotNone(router, "Unable to create router")
# Attach subnet
self.create_router_interface(router_id=router["id"],
subnet_id=subnet["id"])
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
}
]
port = self.create_port(network=network, fixed_ips=fixed_ips)
self.assertIsNotNone(port, "Unable to create port on network")
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.SUBNETWORK,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
# update within subnet should succeed
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
}
]
port = self.update_port(port=port, fixed_ips=fixed_ips)
self.assertIsNotNone(port, "Unable to update port")
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
nuage_vport_vips = self.vsd_client.get_virtual_ip(
constants.VPORT,
nuage_vport[0]['ID'])
valid_vips = ['10.0.0.4']
vip_mismatch = False
mac_mismatch = False
if valid_vips and not nuage_vport_vips:
vip_mismatch = True
for nuage_vport_vip in nuage_vport_vips:
if nuage_vport_vip['virtualIP'] not in valid_vips:
vip_mismatch = True
if nuage_vport_vip['MAC'] != port['mac_address']:
mac_mismatch = True
self.assertEqual(vip_mismatch, False)
self.assertEqual(mac_mismatch, False)
@decorators.attr(type='smoke')
def test_nuage_port_create_fixed_ips_same_subnet_l2_with_aap(self):
# Set up resources
# Base resources
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
}
]
allowed_address_pairs = [{'ip_address': '10.0.0.50',
'mac_address': 'fe:a0:36:4b:c8:70'}]
port = self.create_port(network=network, fixed_ips=fixed_ips,
allowed_address_pairs=allowed_address_pairs)
self.assertIsNotNone(port, "Unable to create port on network")
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.L2_DOMAIN,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport = self.vsd_client.get_vport(
constants.L2_DOMAIN,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.ENABLED,
nuage_vport[0]['addressSpoofing'])
@decorators.attr(type='smoke')
def test_nuage_port_update_fixed_ips_same_subnet_l2_with_aap(self):
# Set up resources
# Base resources
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
}
]
port = self.create_port(network=network, fixed_ips=fixed_ips)
self.assertIsNotNone(port, "Unable to create port on network")
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.L2_DOMAIN,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport = self.vsd_client.get_vport(
constants.L2_DOMAIN,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
# update within subnet should succeed
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
}
]
allowed_address_pairs = [{'ip_address': '10.0.0.50',
'mac_address': 'fe:a0:36:4b:c8:70'}]
port = self.update_port(port=port,
fixed_ips=fixed_ips,
allowed_address_pairs=allowed_address_pairs)
self.assertIsNotNone(port, "Unable to update port")
nuage_vport = self.vsd_client.get_vport(
constants.L2_DOMAIN,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.ENABLED,
nuage_vport[0]['addressSpoofing'])
@decorators.attr(type='smoke')
def test_nuage_port_create_fixed_ips_same_subnet_l3_with_aap(self):
# Set up resources
# Base resources
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
router = self.create_router(
admin_state_up=True,
external_network_id=CONF.network.public_network_id)
self.assertIsNotNone(router, "Unable to create router")
# Attach subnet
self.create_router_interface(router_id=router["id"],
subnet_id=subnet["id"])
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
}
]
allowed_address_pairs = [{'ip_address': '10.0.0.6',
'mac_address': 'fe:a0:36:4b:c8:70'}]
port = self.create_port(network=network, fixed_ips=fixed_ips,
allowed_address_pairs=allowed_address_pairs)
self.assertIsNotNone(port, "Unable to create port on network")
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.SUBNETWORK,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
nuage_vport_vips = self.vsd_client.get_virtual_ip(
constants.VPORT,
nuage_vport[0]['ID'])
valid_vips = ['10.0.0.4', allowed_address_pairs[0]['ip_address']]
vip_mismatch = False
if valid_vips and not nuage_vport_vips:
vip_mismatch = True
for nuage_vport_vip in nuage_vport_vips:
if nuage_vport_vip['virtualIP'] not in valid_vips:
vip_mismatch = True
self.assertEqual(vip_mismatch, False)
@decorators.attr(type='smoke')
def test_nuage_port_create_fixed_ips_same_subnet_l3_with_aap_outside_cidr(
self):
# Set up resources
# Base resources
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
router = self.create_router(
admin_state_up=True,
external_network_id=CONF.network.public_network_id)
self.assertIsNotNone(router, "Unable to create router")
# Attach subnet
self.create_router_interface(router_id=router["id"],
subnet_id=subnet["id"])
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
}
]
allowed_address_pairs = [{'ip_address': '1.1.1.5',
'mac_address': 'fe:a0:36:4b:c8:70'}]
port = self.create_port(network=network, fixed_ips=fixed_ips,
allowed_address_pairs=allowed_address_pairs)
self.assertIsNotNone(port, "Unable to create port on network")
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.SUBNETWORK,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.ENABLED,
nuage_vport[0]['addressSpoofing'])
nuage_vport_vips = self.vsd_client.get_virtual_ip(
constants.VPORT,
nuage_vport[0]['ID'])
valid_vips = ['10.0.0.4']
vip_mismatch = False
if valid_vips and not nuage_vport_vips:
vip_mismatch = True
for nuage_vport_vip in nuage_vport_vips:
if nuage_vport_vip['virtualIP'] not in valid_vips:
vip_mismatch = True
self.assertEqual(vip_mismatch, False)
@decorators.attr(type='smoke')
def test_nuage_port_update_fixed_ips_same_subnet_l3_with_aap(self):
# Set up resources
# Base resources
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
router = self.create_router(
admin_state_up=True,
external_network_id=CONF.network.public_network_id)
self.assertIsNotNone(router, "Unable to create router")
# Attach subnet
self.create_router_interface(router_id=router["id"],
subnet_id=subnet["id"])
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
}
]
port = self.create_port(network=network, fixed_ips=fixed_ips)
self.assertIsNotNone(port, "Unable to create port on network")
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.SUBNETWORK,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
# update within subnet should succeed
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
}
]
allowed_address_pairs = [{'ip_address': '10.0.0.6',
'mac_address': 'fe:a0:36:4b:c8:70'}]
port = self.update_port(port=port, fixed_ips=fixed_ips,
allowed_address_pairs=allowed_address_pairs)
self.assertIsNotNone(port, "Unable to update port")
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
nuage_vport_vips = self.vsd_client.get_virtual_ip(
constants.VPORT,
nuage_vport[0]['ID'])
valid_vips = ['10.0.0.4', allowed_address_pairs[0]['ip_address']]
vip_mismatch = False
if valid_vips and not nuage_vport_vips:
vip_mismatch = True
for nuage_vport_vip in nuage_vport_vips:
if nuage_vport_vip['virtualIP'] not in valid_vips:
vip_mismatch = True
self.assertEqual(vip_mismatch, False)
@decorators.attr(type='smoke')
def test_nuage_port_update_fixed_ips_same_subnet_l3_with_aap_with_vm(self):
# Set up resources
# Base resources
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
router = self.create_router(
admin_state_up=True,
external_network_id=CONF.network.public_network_id)
self.assertIsNotNone(router, "Unable to create router")
# Attach subnet
self.create_router_interface(router_id=router["id"],
subnet_id=subnet["id"])
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
}
]
allowed_address_pairs = [{'ip_address': '10.0.0.10',
'mac_address': 'fe:a0:36:4b:c8:70'}]
port = self.create_port(network=network, fixed_ips=fixed_ips,
allowed_address_pairs=allowed_address_pairs)
self.assertIsNotNone(port, "Unable to create port on network")
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.SUBNETWORK,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
nuage_vport_vips = self.vsd_client.get_virtual_ip(
constants.VPORT,
nuage_vport[0]['ID'])
valid_vips = [fixed_ips[0]["ip_address"],
allowed_address_pairs[0]['ip_address']]
vip_mismatch = False
if valid_vips and not nuage_vport_vips:
vip_mismatch = True
for nuage_vport_vip in nuage_vport_vips:
if nuage_vport_vip['virtualIP'] not in valid_vips:
vip_mismatch = True
self.assertEqual(vip_mismatch, False)
self._create_server(name='vm-' + network['name'],
network=network, port_id=port['id'])
# update within subnet should succeed
fixed_ips = [
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.6",
"subnet_id": subnet["id"]
}
]
allowed_address_pairs = [{'ip_address': '10.0.0.7',
'mac_address': 'fe:a0:36:4b:c8:70'},
{'ip_address': '10.0.0.10',
'mac_address': 'fe:a0:36:4b:c8:70'}]
port = self.update_port(port=port, fixed_ips=fixed_ips,
allowed_address_pairs=allowed_address_pairs)
self.assertIsNotNone(port, "Unable to update port")
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
nuage_vport_vips = self.vsd_client.get_virtual_ip(
constants.VPORT,
nuage_vport[0]['ID'])
valid_vips = [fixed_ips[0]["ip_address"],
allowed_address_pairs[0]['ip_address'],
allowed_address_pairs[1]['ip_address']]
vip_mismatch = False
if valid_vips and not nuage_vport_vips:
vip_mismatch = True
for nuage_vport_vip in nuage_vport_vips:
if nuage_vport_vip['virtualIP'] not in valid_vips:
vip_mismatch = True
self.assertEqual(vip_mismatch, False)
@decorators.attr(type='smoke')
def test_nuage_port_update_app_to_fixed_ips_l3_with_vm(self):
# Set up resources
# Base resources
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
router = self.create_router(
admin_state_up=True,
external_network_id=CONF.network.public_network_id)
self.assertIsNotNone(router, "Unable to create router")
# Attach subnet
self.create_router_interface(router_id=router["id"],
subnet_id=subnet["id"])
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
}
]
allowed_address_pairs = [{'ip_address': '10.0.0.6',
'mac_address': 'fe:a0:36:4b:c8:70'}]
port = self.create_port(network=network, fixed_ips=fixed_ips,
allowed_address_pairs=allowed_address_pairs)
self.assertIsNotNone(port, "Unable to create port on network")
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.SUBNETWORK,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
nuage_vport_vips = self.vsd_client.get_virtual_ip(
constants.VPORT,
nuage_vport[0]['ID'])
valid_vips = [fixed_ips[0]["ip_address"],
allowed_address_pairs[0]['ip_address']]
vip_mismatch = False
if valid_vips and not nuage_vport_vips:
vip_mismatch = True
for nuage_vport_vip in nuage_vport_vips:
if nuage_vport_vip['virtualIP'] not in valid_vips:
vip_mismatch = True
self.assertEqual(vip_mismatch, False)
self._create_server(name='vm-' + network['name'],
network=network, port_id=port['id'])
# update within subnet should succeed
fixed_ips = [
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.6",
"subnet_id": subnet["id"]
}
]
allowed_address_pairs = [{'ip_address': '10.0.0.7',
'mac_address': 'fe:a0:36:4b:c8:70'},
{'ip_address': '10.0.0.10',
'mac_address': 'fe:a0:36:4b:c8:70'}]
port = self.update_port(port=port, fixed_ips=fixed_ips,
allowed_address_pairs=allowed_address_pairs)
self.assertIsNotNone(port, "Unable to update port")
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
nuage_vport_vips = self.vsd_client.get_virtual_ip(
constants.VPORT,
nuage_vport[0]['ID'])
valid_vips = [fixed_ips[0]["ip_address"],
allowed_address_pairs[0]['ip_address'],
allowed_address_pairs[1]['ip_address']]
vip_mismatch = False
if valid_vips and not nuage_vport_vips:
vip_mismatch = True
for nuage_vport_vip in nuage_vport_vips:
if nuage_vport_vip['virtualIP'] not in valid_vips:
vip_mismatch = True
self.assertEqual(vip_mismatch, False)
@decorators.attr(type='smoke')
def test_nuage_port_update_fixed_ip_with_vm_and_conflict_with_aap_neg(
self):
# Set up resources
# Base resources
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
router = self.create_router(
admin_state_up=True,
external_network_id=CONF.network.public_network_id)
self.assertIsNotNone(router, "Unable to create router")
# Attach subnet
self.create_router_interface(router_id=router["id"],
subnet_id=subnet["id"])
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
}
]
allowed_address_pairs = [{'ip_address': '10.0.0.10',
'mac_address': 'fe:a0:36:4b:c8:70'}]
port = self.create_port(network=network, fixed_ips=fixed_ips,
allowed_address_pairs=allowed_address_pairs)
self.assertIsNotNone(port, "Unable to create port on network")
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.SUBNETWORK,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
nuage_vport_vips = self.vsd_client.get_virtual_ip(
constants.VPORT,
nuage_vport[0]['ID'])
valid_vips = [fixed_ips[0]["ip_address"],
allowed_address_pairs[0]['ip_address']]
vip_mismatch = False
if valid_vips and not nuage_vport_vips:
vip_mismatch = True
for nuage_vport_vip in nuage_vport_vips:
if nuage_vport_vip['virtualIP'] not in valid_vips:
vip_mismatch = True
self.assertEqual(vip_mismatch, False)
self._create_server(name='vm-' + network['name'],
network=network, port_id=port['id'])
fixed_ips = [
{
"ip_address": "10.0.0.8",
"subnet_id": subnet["id"]
}
]
allowed_address_pairs = [{'ip_address': '10.0.0.6',
'mac_address': 'fe:a0:36:4b:c8:70'}]
self.create_port(network=network, fixed_ips=fixed_ips,
allowed_address_pairs=allowed_address_pairs)
# update within subnet should succeed
fixed_ips = [
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.6",
"subnet_id": subnet["id"]
}
]
# below update will fail with proper roll back
try:
self.update_port(port=port, fixed_ips=fixed_ips)
self.fail("Exception expected when updating to"
" a different subnet!")
except exceptions.BadRequest as e:
if ('Bad request: The IP Address 10.0.0.6 is'
' currently in use by subnet' in e._error_string):
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.SUBNETWORK,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
nuage_vport_vips = self.vsd_client.get_virtual_ip(
constants.VPORT,
nuage_vport[0]['ID'])
vip_mismatch = False
if valid_vips and not nuage_vport_vips:
vip_mismatch = True
for nuage_vport_vip in nuage_vport_vips:
if nuage_vport_vip['virtualIP'] not in valid_vips:
vip_mismatch = True
self.assertEqual(vip_mismatch, False)
pass
else:
# Differentiate between VSD failure and update failure
LOG.debug(e._error_string)
self.fail("A different NuageBadRequest exception"
" was expected for this operation.")
@decorators.attr(type='smoke')
def test_nuage_port_create_fixed_ip_same_as_aap(self):
# Set up resources
# Base resources
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
router = self.create_router(
admin_state_up=True,
external_network_id=CONF.network.public_network_id)
self.assertIsNotNone(router, "Unable to create router")
# Attach subnet
self.create_router_interface(router_id=router["id"],
subnet_id=subnet["id"])
fixed_ips = [
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.6",
"subnet_id": subnet["id"]
}
]
allowed_address_pairs = [{'ip_address': '10.0.0.6',
'mac_address': 'fe:a0:36:4b:c8:70'}]
port = self.create_port(network=network, fixed_ips=fixed_ips,
allowed_address_pairs=allowed_address_pairs)
self.assertIsNotNone(port, "Unable to create port on network")
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.SUBNETWORK,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.ENABLED,
nuage_vport[0]['addressSpoofing'])
nuage_vport_vips = self.vsd_client.get_virtual_ip(
constants.VPORT,
nuage_vport[0]['ID'])
valid_vips = [fixed_ips[0]["ip_address"],
allowed_address_pairs[0]['ip_address']]
vip_mismatch = False
mac_mismatch = False
if valid_vips and not nuage_vport_vips:
vip_mismatch = True
for nuage_vport_vip in nuage_vport_vips:
if nuage_vport_vip['virtualIP'] not in valid_vips:
vip_mismatch = True
if nuage_vport_vip['MAC'] != port['mac_address']:
mac_mismatch = True
self.assertEqual(vip_mismatch, False)
self.assertEqual(mac_mismatch, False)
@decorators.attr(type='smoke')
def test_nuage_port_update_fixed_ips_same_as_aap(self):
# Set up resources
# Base resources
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
router = self.create_router(
admin_state_up=True,
external_network_id=CONF.network.public_network_id)
self.assertIsNotNone(router, "Unable to create router")
# Attach subnet
self.create_router_interface(router_id=router["id"],
subnet_id=subnet["id"])
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
}
]
allowed_address_pairs = [{'ip_address': '10.0.0.6',
'mac_address': 'fe:a0:36:4b:c8:70'}]
port = self.create_port(network=network, fixed_ips=fixed_ips,
allowed_address_pairs=allowed_address_pairs)
self.assertIsNotNone(port, "Unable to create port on network")
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.SUBNETWORK,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
nuage_vport_vips = self.vsd_client.get_virtual_ip(
constants.VPORT,
nuage_vport[0]['ID'])
valid_vips = [fixed_ips[0]["ip_address"],
allowed_address_pairs[0]['ip_address']]
vip_mismatch = False
if valid_vips and not nuage_vport_vips:
vip_mismatch = True
for nuage_vport_vip in nuage_vport_vips:
if nuage_vport_vip['virtualIP'] not in valid_vips:
vip_mismatch = True
self.assertEqual(vip_mismatch, False)
fixed_ips = [
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.6",
"subnet_id": subnet["id"]
}
]
port = self.update_port(port=port, fixed_ips=fixed_ips)
self.assertIsNotNone(port, "Unable to update port")
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.ENABLED,
nuage_vport[0]['addressSpoofing'])
nuage_vport_vips = self.vsd_client.get_virtual_ip(
constants.VPORT,
nuage_vport[0]['ID'])
valid_vips = [fixed_ips[0]["ip_address"],
allowed_address_pairs[0]['ip_address']]
vip_mismatch = False
mac_mismatch = False
if valid_vips and not nuage_vport_vips:
vip_mismatch = True
for nuage_vport_vip in nuage_vport_vips:
if nuage_vport_vip['virtualIP'] not in valid_vips:
vip_mismatch = True
self.assertEqual(vip_mismatch, False)
if nuage_vport_vip['MAC'] != port['mac_address']:
mac_mismatch = True
self.assertEqual(vip_mismatch, False)
self.assertEqual(mac_mismatch, False)
@decorators.attr(type='smoke')
def test_nuage_port_create_fixed_ips_same_subnet_with_aap_router_attach(
self):
# Set up resources
# Base resources
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
router = self.create_router(
admin_state_up=True,
external_network_id=CONF.network.public_network_id)
self.assertIsNotNone(router, "Unable to create router")
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
}
]
allowed_address_pairs = [{'ip_address': '10.0.0.6',
'mac_address': 'fe:a0:36:4b:c8:70'}]
port = self.create_port(network=network, fixed_ips=fixed_ips,
allowed_address_pairs=allowed_address_pairs)
self.assertIsNotNone(port, "Unable to create port on network")
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.L2_DOMAIN,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport = self.vsd_client.get_vport(
constants.L2_DOMAIN,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.ENABLED,
nuage_vport[0]['addressSpoofing'])
# Attach subnet
self.create_router_interface(router_id=router["id"],
subnet_id=subnet["id"])
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.SUBNETWORK,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport_vips = self.vsd_client.get_virtual_ip(
constants.VPORT,
nuage_vport[0]['ID'])
valid_vips = ['10.0.0.4', allowed_address_pairs[0]['ip_address']]
vip_mismatch = False
if valid_vips and not nuage_vport_vips:
vip_mismatch = True
for nuage_vport_vip in nuage_vport_vips:
if nuage_vport_vip['virtualIP'] not in valid_vips:
vip_mismatch = True
self.assertEqual(vip_mismatch, False)
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
@decorators.attr(type='smoke')
@testtools.skipIf(Topology.before_nuage('5.4'), 'Unsupported pre-5.4')
def test_nuage_port_update_fixed_ips_same_subnet_with_aap_router_detach(
self):
# Set up resources
# Base resources
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
router = self.create_router(
admin_state_up=True,
external_network_id=CONF.network.public_network_id)
self.assertIsNotNone(router, "Unable to create router")
# Attach subnet
self.create_router_interface(router_id=router["id"],
subnet_id=subnet["id"], cleanup=False)
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
}
]
port = self.create_port(network=network, fixed_ips=fixed_ips)
self.assertIsNotNone(port, "Unable to create port on network")
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.SUBNETWORK,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
# update within subnet should succeed
fixed_ips = [
{
"ip_address": "10.0.0.4",
"subnet_id": subnet["id"]
},
{
"ip_address": "10.0.0.5",
"subnet_id": subnet["id"]
}
]
allowed_address_pairs = [{'ip_address': '10.0.0.6',
'mac_address': 'fe:a0:36:4b:c8:70'}]
port = self.update_port(port=port, fixed_ips=fixed_ips,
allowed_address_pairs=allowed_address_pairs)
self.assertIsNotNone(port, "Unable to update port")
nuage_vport = self.vsd_client.get_vport(
constants.SUBNETWORK,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
valid_vips = ['10.0.0.4', allowed_address_pairs[0]['ip_address']]
nuage_vport_vips = self.vsd_client.get_virtual_ip(
constants.VPORT,
nuage_vport[0]['ID'])
vip_mismatch = False
if valid_vips and not nuage_vport_vips:
vip_mismatch = True
for nuage_vport_vip in nuage_vport_vips:
if nuage_vport_vip['virtualIP'] not in valid_vips:
vip_mismatch = True
self.assertEqual(vip_mismatch, False)
self.admin_routers_client.remove_router_interface(
router['id'],
subnet_id=subnet['id'])
vsd_vport_parent = self.vsd_client.get_global_resource(
constants.L2_DOMAIN,
filters='externalID',
filter_value=subnet['id'])[0]
nuage_vport = self.vsd_client.get_vport(
constants.L2_DOMAIN,
vsd_vport_parent['ID'],
filters='externalID',
filter_value=port['id'])
self.assertEqual(constants.ENABLED if Topology.from_nuage('5.4')
else constants.INHERITED,
nuage_vport[0]['addressSpoofing'])
@decorators.attr(type='smoke')
@testtools.skipIf(Topology.before_nuage('5.4'), 'Unsupported pre-5.4')
def test_delete_unbound_port_with_hanging_vminterface(self):
# OPENSTACK-2797
network = self.create_network()
self.assertIsNotNone(network, "Unable to create network")
subnet = self.create_subnet(network, cidr=IPNetwork("10.0.0.0/24"),
mask_bits=28)
self.assertIsNotNone(subnet, "Unable to create subnet")
port = self.create_port(network=network, cleanup=False)
self.addCleanup(self._try_delete,
self.manager.ports_client.delete_port,
port['id'])
# Find vport
l2domain = self.vsd.get_l2domain(by_subnet_id=subnet['id'])
vport = self.vsd.get_vport(l2domain=l2domain, by_port_id=port['id'])
# Create "Fake" VM interface to simulate following behavior:
# -> Port is being bound -> VM created -> port deleted ->
# Port not bound but leftover VM on VSD
vminterface = self.vsd.vspk.NUVMInterface(
name='test-fip-vm', vport_id=vport.id,
external_id=self.vsd.external_id(port['id']),
mac='E6:04:AA:7A:AA:86', ip_address='10.0.0.10')
vm = self.vsd.vspk.NUVM(name='test-port-delete-vm',
uuid='1339f7f4-f7a0-445f-b257-8dbfaf0d6fc8',
external_id=self.vsd.external_id(
'1339f7f4-f7a0-445f-b257-8dbfaf0d6fc8'),
interfaces=[vminterface])
# Impersonate tenant user for appropriate permissions on VM
self.vsd.session().impersonate(port['tenant_id'],
self.default_netpartition_name)
self.vsd.session().user.create_child(vm)
self.vsd.session().stop_impersonate()
# Delete port, VM should be deleted in this request
self.delete_port(port)
# Verify that vport is deleted
vport = self.vsd.get_vport(l2domain=l2domain, by_port_id=port['id'])
self.assertIsNone(vport, 'Vport not deleted by Port delete statement')
| [((569, 588), 'nuage_tempest_plugin.lib.topology.Topology.get_conf', 'Topology.get_conf', ([], {}), '()\n', (586, 588), False, 'from nuage_tempest_plugin.lib.topology import Topology\n'), ((595, 624), 'nuage_tempest_plugin.lib.topology.Topology.get_logger', 'Topology.get_logger', (['__name__'], {}), '(__name__)\n', (614, 624), False, 'from nuage_tempest_plugin.lib.topology import Topology\n'), ((1668, 1697), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (1683, 1697), False, 'from tempest.test import decorators\n'), ((2156, 2185), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (2171, 2185), False, 'from tempest.test import decorators\n'), ((3135, 3164), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (3150, 3164), False, 'from tempest.test import decorators\n'), ((3620, 3649), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (3635, 3649), False, 'from tempest.test import decorators\n'), ((4207, 4236), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (4222, 4236), False, 'from tempest.test import decorators\n'), ((7364, 7393), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (7379, 7393), False, 'from tempest.test import decorators\n'), ((10182, 10211), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (10197, 10211), False, 'from tempest.test import decorators\n'), ((11499, 11528), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (11514, 11528), False, 'from tempest.test import decorators\n'), ((13424, 13453), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (13439, 13453), False, 'from tempest.test import decorators\n'), ((15717, 15746), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (15732, 15746), False, 'from tempest.test import decorators\n'), ((18081, 18110), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (18096, 18110), False, 'from tempest.test import decorators\n'), ((21405, 21434), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (21420, 21434), False, 'from tempest.test import decorators\n'), ((24309, 24338), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (24324, 24338), False, 'from tempest.test import decorators\n'), ((25841, 25870), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (25856, 25870), False, 'from tempest.test import decorators\n'), ((28014, 28043), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (28029, 28043), False, 'from tempest.test import decorators\n'), ((30395, 30424), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (30410, 30424), False, 'from tempest.test import decorators\n'), ((32758, 32787), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (32773, 32787), False, 'from tempest.test import decorators\n'), ((35750, 35779), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (35765, 35779), False, 'from tempest.test import decorators\n'), ((39968, 39997), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (39983, 39997), False, 'from tempest.test import decorators\n'), ((44171, 44200), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (44186, 44200), False, 'from tempest.test import decorators\n'), ((49204, 49233), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (49219, 49233), False, 'from tempest.test import decorators\n'), ((51784, 51813), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (51799, 51813), False, 'from tempest.test import decorators\n'), ((55634, 55663), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (55649, 55663), False, 'from tempest.test import decorators\n'), ((58508, 58537), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (58523, 58537), False, 'from tempest.test import decorators\n'), ((62282, 62311), 'tempest.test.decorators.attr', 'decorators.attr', ([], {'type': '"""smoke"""'}), "(type='smoke')\n", (62297, 62311), False, 'from tempest.test import decorators\n'), ((844, 861), 'nuage_tempest_plugin.services.nuage_client.NuageRestClient', 'NuageRestClient', ([], {}), '()\n', (859, 861), False, 'from nuage_tempest_plugin.services.nuage_client import NuageRestClient\n'), ((1591, 1661), 'tempest.common.waiters.wait_for_server_termination', 'waiters.wait_for_server_termination', (['clients.servers_client', 'server_id'], {}), '(clients.servers_client, server_id)\n', (1626, 1661), False, 'from tempest.common import waiters\n'), ((5570, 5598), 'nuage_tempest_plugin.lib.topology.Topology.before_nuage', 'Topology.before_nuage', (['"""5.4"""'], {}), "('5.4')\n", (5591, 5598), False, 'from nuage_tempest_plugin.lib.topology import Topology\n'), ((6423, 6451), 'nuage_tempest_plugin.lib.topology.Topology.before_nuage', 'Topology.before_nuage', (['"""5.4"""'], {}), "('5.4')\n", (6444, 6451), False, 'from nuage_tempest_plugin.lib.topology import Topology\n'), ((58560, 58588), 'nuage_tempest_plugin.lib.topology.Topology.before_nuage', 'Topology.before_nuage', (['"""5.4"""'], {}), "('5.4')\n", (58581, 58588), False, 'from nuage_tempest_plugin.lib.topology import Topology\n'), ((62334, 62362), 'nuage_tempest_plugin.lib.topology.Topology.before_nuage', 'Topology.before_nuage', (['"""5.4"""'], {}), "('5.4')\n", (62355, 62362), False, 'from nuage_tempest_plugin.lib.topology import Topology\n'), ((1835, 1859), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (1844, 1859), False, 'from netaddr import IPNetwork\n'), ((2344, 2368), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (2353, 2368), False, 'from netaddr import IPNetwork\n'), ((3302, 3326), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (3311, 3326), False, 'from netaddr import IPNetwork\n'), ((3803, 3827), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (3812, 3827), False, 'from netaddr import IPNetwork\n'), ((4654, 4678), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (4663, 4678), False, 'from netaddr import IPNetwork\n'), ((4845, 4869), 'netaddr.IPNetwork', 'IPNetwork', (['"""20.0.0.0/24"""'], {}), "('20.0.0.0/24')\n", (4854, 4869), False, 'from netaddr import IPNetwork\n'), ((5877, 5901), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (5886, 5901), False, 'from netaddr import IPNetwork\n'), ((6730, 6754), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (6739, 6754), False, 'from netaddr import IPNetwork\n'), ((7849, 7873), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (7858, 7873), False, 'from netaddr import IPNetwork\n'), ((8040, 8064), 'netaddr.IPNetwork', 'IPNetwork', (['"""20.0.0.0/24"""'], {}), "('20.0.0.0/24')\n", (8049, 8064), False, 'from netaddr import IPNetwork\n'), ((10484, 10508), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (10493, 10508), False, 'from netaddr import IPNetwork\n'), ((11801, 11825), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (11810, 11825), False, 'from netaddr import IPNetwork\n'), ((13726, 13750), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (13735, 13750), False, 'from netaddr import IPNetwork\n'), ((16031, 16055), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (16040, 16055), False, 'from netaddr import IPNetwork\n'), ((18395, 18419), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (18404, 18419), False, 'from netaddr import IPNetwork\n'), ((21707, 21731), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (21716, 21731), False, 'from netaddr import IPNetwork\n'), ((24620, 24644), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (24629, 24644), False, 'from netaddr import IPNetwork\n'), ((26152, 26176), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (26161, 26176), False, 'from netaddr import IPNetwork\n'), ((28325, 28349), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (28334, 28349), False, 'from netaddr import IPNetwork\n'), ((30732, 30756), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (30741, 30756), False, 'from netaddr import IPNetwork\n'), ((33069, 33093), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (33078, 33093), False, 'from netaddr import IPNetwork\n'), ((36069, 36093), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (36078, 36093), False, 'from netaddr import IPNetwork\n'), ((40273, 40297), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (40282, 40297), False, 'from netaddr import IPNetwork\n'), ((44504, 44528), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (44513, 44528), False, 'from netaddr import IPNetwork\n'), ((49502, 49526), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (49511, 49526), False, 'from netaddr import IPNetwork\n'), ((52083, 52107), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (52092, 52107), False, 'from netaddr import IPNetwork\n'), ((55969, 55993), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (55978, 55993), False, 'from netaddr import IPNetwork\n'), ((58918, 58942), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (58927, 58942), False, 'from netaddr import IPNetwork\n'), ((62138, 62164), 'nuage_tempest_plugin.lib.topology.Topology.from_nuage', 'Topology.from_nuage', (['"""5.4"""'], {}), "('5.4')\n", (62157, 62164), False, 'from nuage_tempest_plugin.lib.topology import Topology\n'), ((62634, 62658), 'netaddr.IPNetwork', 'IPNetwork', (['"""10.0.0.0/24"""'], {}), "('10.0.0.0/24')\n", (62643, 62658), False, 'from netaddr import IPNetwork\n')] |
thanosa/coding-challenges | advent_of_code/2019/11_space_police/aoc_2019_11.py | a10b0de51da076a4bcc798b4a3d5a08e29c5af01 | ''' Advent of code 2019 Day 11 - Space police '''
from typing import NamedTuple
from enum import Enum
INPUT_FILE=__file__.replace('.py', '.dat')
def to_number(digits: list) -> int:
return int(''.join(map(str, digits)))
def to_list(number: int) -> list:
return [int(i) for i in str(number)]
def get_modes(instruction: int, parameter_count: int = 3) -> list:
params = instruction // 100
string = str(params).zfill(parameter_count)
return list(reversed(to_list(string)))
def get_dict(lst: list):
return {k: v for k,v in enumerate(lst)}
def get_value(code: dict, key: int):
if key in code:
return code[key]
else:
return 0
def run_program(code: dict, inputs: list) -> int:
code = code.copy()
output = 0
pos = 0
base = 0
counter = 0
while (code[pos] % 100) != 99:
instruction = code[pos + 0]
params = []
for i in range(3):
try:
param = code[pos + 1 + i]
except:
param = None
params.append(param)
operation = instruction % 100
modes = get_modes(instruction)
values = [0] * 2
# Addition
if operation == 1:
for i in range(2):
if modes[i] == 0:
values[i] = get_value(code, params[i])
elif modes[i] == 1:
values[i] = params[i]
elif modes[i] == 2:
values[i] = get_value(code, params[i] + base)
if modes[2] == 0:
code[params[2]] = values[0] + values[1]
else:
code[params[2] + base] = values[0] + values[1]
pos += 4
# Multiplication
elif operation == 2:
for i in range(2):
if modes[i] == 0:
values[i] = get_value(code, params[i])
elif modes[i] == 1:
values[i] = params[i]
elif modes[i] == 2:
values[i] = get_value(code, params[i] + base)
if modes[2] == 0:
code[params[2]] = values[0] * values[1]
else:
code[params[2] + base] = values[0] * values[1]
pos += 4
# Store input
elif operation == 3:
if modes[0] == 0:
code[params[0]] = inputs.pop(0)
elif modes[0] == 2:
code[params[0] + base] = inputs.pop(0)
else:
raise RuntimeError("fail")
pos += 2
# Get output
elif operation == 4:
if modes[0] == 0:
values[0] = get_value(code, params[0])
elif modes[0] == 1:
values[0] = params[0]
elif modes[0] == 2:
values[0] = get_value(code, params[0] + base)
yield values[0]
pos += 2
# Jump if true
elif operation == 5:
for i in range(2):
if modes[i] == 0:
values[i] = get_value(code, params[i])
elif modes[i] == 1:
values[i] = params[i]
elif modes[i] == 2:
values[i] = get_value(code, params[i] + base)
if values[0] != 0:
pos = values[1]
else:
pos += 3
# Jump if false
elif operation == 6:
for i in range(2):
if modes[i] == 0:
values[i] = get_value(code, params[i])
elif modes[i] == 1:
values[i] = params[i]
elif modes[i] == 2:
values[i] = get_value(code, params[i] + base)
if values[0] == 0:
pos = values[1]
else:
pos += 3
# Less than
elif operation == 7:
for i in range(2):
if modes[i] == 0:
values[i] = get_value(code, params[i])
elif modes[i] == 1:
values[i] = params[i]
elif modes[i] == 2:
values[i] = get_value(code, params[i] + base)
if values[0] < values[1]:
if modes[2] == 0:
code[params[2]] = 1
else:
code[params[2] + base] = 1
else:
if modes[2] == 0:
code[params[2]] = 0
else:
code[params[2] + base] = 0
pos += 4
# Equals
elif operation == 8:
for i in range(2):
if modes[i] == 0:
values[i] = get_value(code, params[i])
elif modes[i] == 1:
values[i] = params[i]
elif modes[i] == 2:
values[i] = get_value(code, params[i] + base)
if values[0] == values[1]:
if modes[2] == 0:
code[params[2]] = 1
else:
code[params[2] + base] = 1
else:
if modes[2] == 0:
code[params[2]] = 0
else:
code[params[2] + base] = 0
pos += 4
# Relative base shift
elif operation == 9:
i = 0
if modes[i] == 0:
values[i] = get_value(code, params[i])
elif modes[i] == 1:
values[i] = params[i]
elif modes[i] == 2:
values[i] = get_value(code, params[i] + base)
base += values[i]
pos += 2
else:
raise RuntimeError(f"error in operation: {pos}")
class Point(NamedTuple):
X: int
Y: int
class Direction(Enum):
UP = 0
LEFT = 1
DOWN = 2
RIGHT = 3
def run_robot(code: dict, start_on_white: bool = False) -> int:
DIRECTIONS_COUNT = 4
direction = Direction.UP
panels = {}
seen = set()
color = []
position = Point(0, 0)
if start_on_white:
panels[position] = 1
finished = False
brain = run_program(code, color)
while True:
try:
# Sense the color on the point. Default is black (0).
if position in panels:
color.append(panels[position])
else:
color.append(0)
paint = next(brain)
rotation = next(brain)
if paint == "" or rotation == "":
raise RuntimeError(f"Failed to read paint: {paint}, rotation: {rotation}")
# Paints the panel.
panels[position] = paint
# Keeps track of all visited points.
seen.add(position)
# Turn left (0) or right (1).
if rotation == 0:
direction = Direction((direction.value + 1) % DIRECTIONS_COUNT)
elif rotation == 1:
direction = Direction((direction.value - 1) % DIRECTIONS_COUNT)
# Move a step forward.
if direction == Direction.UP:
position = Point(position.X, position.Y - 1)
elif direction == Direction.LEFT:
position = Point(position.X - 1, position.Y)
elif direction == Direction.DOWN:
position = Point(position.X, position.Y + 1)
elif direction == Direction.RIGHT:
position = Point(position.X + 1, position.Y)
else:
raise RuntimeError(f"Wrong direction: {direction}")
except StopIteration:
return panels
def print_panels(panels: dict):
min_x = min(panels, key=lambda panel: panel.X).X
max_x = max(panels, key=lambda panel: panel.X).X
min_y = min(panels, key=lambda panel: panel.Y).Y
max_y = max(panels, key=lambda panel: panel.Y).Y
print(f"{min_x} {max_x} {min_y} {max_y}")
for y in range(min_y, max_y + 1):
row = []
for x in range(min_x, max_x + 1):
point = Point(x, y)
if point in panels:
if panels[Point(x, y)] == 1:
row.append("#")
else:
row.append(" ")
else:
row.append(" ")
print(''.join(row))
# Read the input
with open(INPUT_FILE) as f:
input_dict = get_dict(list(map(int, f.read().strip().split(','))))
# Part 1 solution
panels_count = len(run_robot(input_dict))
print(f"Part 1: {panels_count}")
# Part 2 solution
panels = run_robot(input_dict, True)
print(f"Part 2:")
print_panels(panels)
| [] |
jkrueger1/nicos | nicos_mlz/mira/setups/mezeiflip.py | 5f4ce66c312dedd78995f9d91e8a6e3c891b262b | description = 'Mezei spin flipper using TTI power supply'
group = 'optional'
tango_base = 'tango://miractrl.mira.frm2:10000/mira/'
devices = dict(
dct1 = device('nicos.devices.entangle.PowerSupply',
description = 'current in first channel of supply (flipper current)',
tangodevice = tango_base + 'tti1/out1',
timeout = 1,
precision = 0.01,
),
dct2 = device('nicos.devices.entangle.PowerSupply',
description = 'current in second channel of supply (compensation current)',
tangodevice = tango_base + 'tti1/out2',
timeout = 1,
precision = 0.01,
),
flip = device('nicos.devices.polarized.MezeiFlipper',
description = 'Mezei flipper before sample (in shielding table)',
flip = 'dct1',
corr = 'dct2',
),
)
| [] |
hxri/mars | mars/learn/cluster/_k_means_init.py | f7864f00911883b94800b63856f0e57648d3d9b4 | # Copyright 1999-2021 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from ... import opcodes
from ... import tensor as mt
from ...core import OutputType, recursive_tile
from ...core.operand import OperandStage
from ...serialization.serializables import KeyField, Int32Field
from ...tensor.array_utils import as_same_device, device
from ...tensor.core import TensorOrder
from ...tensor.random import RandomStateField
from ...utils import has_unknown_shape
from ..metrics import euclidean_distances
from ..operands import LearnOperand, LearnOperandMixin
def _kmeans_plus_plus_init(X,
x_squared_norms,
random_state,
n_clusters: int,
n_local_trials: int = None):
n_samples, n_features = X.shape
centers = mt.empty((n_clusters, n_features), dtype=X.dtype)
assert x_squared_norms is not None, 'x_squared_norms None in _k_init'
# Set the number of local seeding trials if none is given
if n_local_trials is None:
# This is what Arthur/Vassilvitskii tried, but did not report
# specific results for other than mentioning in the conclusion
# that it helped.
n_local_trials = 2 + int(np.log(n_clusters))
# Pick first center randomly
center_id = random_state.randint(n_samples)
if X.issparse(): # pragma: no cover
centers[0] = X[center_id].todense()
else:
centers[0] = X[center_id]
# Initialize list of closest distances and calculate current potential
closest_dist_sq = euclidean_distances(
centers[0, mt.newaxis], X, Y_norm_squared=x_squared_norms,
squared=True)
current_pot = closest_dist_sq.sum()
# Pick the remaining n_clusters-1 points
for c in range(1, n_clusters):
# Choose center candidates by sampling with probability proportional
# to the squared distance to the closest existing center
rand_vals = random_state.random_sample(n_local_trials) * current_pot
candidate_ids = mt.searchsorted(closest_dist_sq.cumsum(),
rand_vals)
# XXX: numerical imprecision can result in a candidate_id out of range
candidate_ids = mt.clip(candidate_ids, None, closest_dist_sq.size - 1)
# Compute distances to center candidates
distance_to_candidates = euclidean_distances(
X[candidate_ids], X, Y_norm_squared=x_squared_norms, squared=True)
# update closest distances squared and potential for each candidate
distance_to_candidates = mt.minimum(closest_dist_sq, distance_to_candidates)
candidates_pot = distance_to_candidates.sum(axis=1)
# Decide which candidate is the best
best_candidate = mt.argmin(candidates_pot)
current_pot = candidates_pot[best_candidate]
closest_dist_sq = distance_to_candidates[best_candidate]
best_candidate = candidate_ids[best_candidate]
# Permanently add best center candidate found in local tries
if X.issparse(): # pragma: no cover
c_center = X[best_candidate].todense()
else:
c_center = X[best_candidate]
centers[c] = c_center
return centers
class KMeansPlusPlusInit(LearnOperand, LearnOperandMixin):
_op_type_ = opcodes.KMEANS_PLUS_PLUS_INIT
_x = KeyField('x')
_n_clusters = Int32Field('n_clusters')
_x_squared_norms = KeyField('x_squared_norms')
_state = RandomStateField('state')
_n_local_trials = Int32Field('n_local_trials')
def __init__(self, x=None, n_clusters=None, x_squared_norms=None,
state=None, n_local_trials=None, output_types=None, **kw):
super().__init__(_x=x, _n_clusters=n_clusters, _x_squared_norms=x_squared_norms,
_state=state, _n_local_trials=n_local_trials,
_output_types=output_types, **kw)
if self._output_types is None:
self._output_types = [OutputType.tensor]
@property
def x(self):
return self._x
@property
def n_clusters(self):
return self._n_clusters
@property
def x_squared_norms(self):
return self._x_squared_norms
@property
def state(self):
return self._state
@property
def n_local_trials(self):
return self._n_local_trials
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
self._x = self._inputs[0]
self._x_squared_norms = self._inputs[-1]
def __call__(self):
inputs = [self._x, self._x_squared_norms]
kw = {
'shape': (self._n_clusters, self._x.shape[1]),
'dtype': self._x.dtype,
'order': TensorOrder.C_ORDER
}
return self.new_tileable(inputs, kws=[kw])
@classmethod
def _tile_one_chunk(cls, op: "KMeansPlusPlusInit"):
out = op.outputs[0]
chunk_op = op.copy().reset_key()
chunk_kw = out.params.copy()
chunk_kw['index'] = (0, 0)
chunk_inputs = [op.x.chunks[0], op.x_squared_norms.chunks[0]]
chunk = chunk_op.new_chunk(chunk_inputs, kws=[chunk_kw])
kw = out.params
kw['chunks'] = [chunk]
kw['nsplits'] = tuple((s,) for s in out.shape)
new_op = op.copy()
return new_op.new_tileables(op.inputs, kws=[kw])
@classmethod
def tile(cls, op: "KMeansPlusPlusInit"):
if len(op.x.chunks) == 1:
assert len(op.x_squared_norms.chunks) == 1
return cls._tile_one_chunk(op)
else:
return (yield from cls._tile_k_init(op))
@classmethod
def _tile_k_init(cls, op: "KMeansPlusPlusInit"):
X = op.x
n_clusters = op.n_clusters
x_squared_norms = op.x_squared_norms
random_state = op.state
n_local_trials = op.n_local_trials
centers = _kmeans_plus_plus_init(X, x_squared_norms, random_state,
n_clusters, n_local_trials)
return (yield from recursive_tile(centers))
@classmethod
def execute(cls, ctx, op: "KMeansPlusPlusInit"):
try:
from sklearn.cluster._kmeans import _kmeans_plusplus
except ImportError: # pragma: no cover
try:
from sklearn.cluster._kmeans import _k_init
except ImportError:
from sklearn.cluster.k_means_ import _k_init
def _kmeans_plusplus(*args, **kwargs):
return _k_init(*args, **kwargs), None
(x, x_squared_norms), device_id, _ = as_same_device(
[ctx[inp.key] for inp in op.inputs], device=op.device, ret_extra=True)
with device(device_id):
ctx[op.outputs[0].key] = _kmeans_plusplus(
x, op.n_clusters, x_squared_norms=x_squared_norms, random_state=op.state,
n_local_trials=op.n_local_trials)[0]
###############################################################################
# Initialization heuristic
def _k_init(X, n_clusters, x_squared_norms, random_state, n_local_trials=None):
"""Init n_clusters seeds according to k-means++
Parameters
----------
X : array or sparse matrix, shape (n_samples, n_features)
The data to pick seeds for. To avoid memory copy, the input data
should be double precision (dtype=np.float64).
n_clusters : integer
The number of seeds to choose
x_squared_norms : array, shape (n_samples,)
Squared Euclidean norm of each data point.
random_state : int, RandomState instance
The generator used to initialize the centers. Use an int to make the
randomness deterministic.
See :term:`Glossary <random_state>`.
n_local_trials : integer, optional
The number of seeding trials for each center (except the first),
of which the one reducing inertia the most is greedily chosen.
Set to None to make the number of trials depend logarithmically
on the number of seeds (2+log(k)); this is the default.
Notes
-----
Selects initial cluster centers for k-mean clustering in a smart way
to speed up convergence. see: Arthur, D. and Vassilvitskii, S.
"k-means++: the advantages of careful seeding". ACM-SIAM symposium
on Discrete algorithms. 2007
Version ported from http://www.stanford.edu/~darthur/kMeansppTest.zip,
which is the implementation used in the aforementioned paper.
"""
op = KMeansPlusPlusInit(x=X, n_clusters=n_clusters, x_squared_norms=x_squared_norms,
state=random_state, n_local_trials=n_local_trials)
return op()
class KMeansScalablePlusPlusInit(LearnOperand, LearnOperandMixin):
_op_type_ = opcodes.KMEANS_SCALABLE_PLUS_PLUS_INIT
_x = KeyField('x')
_n_clusters = Int32Field('n_clusters')
_x_squared_norms = KeyField('x_squared_norms')
_state = RandomStateField('state')
_init_iter = Int32Field('init_iter')
_oversampling_factor = Int32Field('oversampling_factor')
def __init__(self, x=None, n_clusters=None, x_squared_norms=None,
state=None, init_iter=None, oversampling_factor=None,
output_types=None, **kw):
super().__init__(_x=x, _n_clusters=n_clusters, _x_squared_norms=x_squared_norms,
_state=state, _init_iter=init_iter,
_oversampling_factor=oversampling_factor,
_output_types=output_types, **kw)
if self._output_types is None:
self._output_types = [OutputType.tensor]
@property
def x(self):
return self._x
@property
def n_clusters(self):
return self._n_clusters
@property
def x_squared_norms(self):
return self._x_squared_norms
@property
def state(self):
return self._state
@property
def init_iter(self):
return self._init_iter
@property
def oversampling_factor(self):
return self._oversampling_factor
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
if self._x is not None:
self._x = self._inputs[0]
if self._x_squared_norms is not None:
self._x_squared_norms = self._inputs[-1]
def __call__(self):
inputs = [self._x, self._x_squared_norms]
kw = {
'shape': (self._n_clusters, self._x.shape[1]),
'dtype': self._x.dtype,
'order': TensorOrder.C_ORDER
}
return self.new_tileable(inputs, kws=[kw])
@classmethod
def tile(cls, op: "KMeansScalablePlusPlusInit"):
if has_unknown_shape(*op.inputs):
yield
x = mt.tensor(op.x)
x_squared_norms = mt.atleast_2d(op.x_squared_norms)
out = op.outputs[0]
random_state = op.state
rs = mt.random.RandomState.from_numpy(random_state)
n_samples, n_features = x.shape
n_clusters = op.n_clusters
# step 1, sample a centroid
centers = x[random_state.randint(n_samples, size=1)]
for _ in range(op.init_iter):
distances = euclidean_distances(
x, centers, X_norm_squared=x_squared_norms, squared=True)
# calculate the cost of data with respect to current centers
cost = mt.sum(mt.min(distances, axis=1))
# calculate the distribution to sample new centers
distribution = mt.full(len(distances), 1 / len(distances))
mt.true_divide(mt.min(distances, axis=1), cost,
where=cost != 0, out=distribution)
# pick new centers
new_centers_size = op.oversampling_factor * n_clusters
new_centers = x[rs.choice(n_samples, new_centers_size, p=distribution)]
centers = mt.concatenate([centers, new_centers])
# rechunk centers into one chunk
centers = (yield from recursive_tile(centers)).rechunk(centers.shape)
distances = yield from recursive_tile(euclidean_distances(
x, centers, X_norm_squared=x_squared_norms, squared=True))
map_index_to_chunks = {}
# calculate weight for each chunk
for c in distances.chunks:
map_chunk_op = KMeansScalablePlusPlusInit(stage=OperandStage.map)
map_chunk_kw = {
'shape': (len(centers),),
'dtype': np.dtype(np.int64),
'order': TensorOrder.C_ORDER,
'index': c.index
}
map_chunk = map_chunk_op.new_chunk([c], kws=[map_chunk_kw])
map_index_to_chunks[c.index] = map_chunk
combine_chunks = []
for i in range(distances.chunk_shape[0]):
map_chunks = [map_index_to_chunks[i, j]
for j in range(distances.chunk_shape[1])]
combine_chunk_op = KMeansScalablePlusPlusInit(stage=OperandStage.combine)
combine_chunk_kw = {
'shape': (len(centers),),
'dtype': np.dtype(np.int64),
'order': TensorOrder.C_ORDER,
'index': (i,)
}
combine_chunk = combine_chunk_op.new_chunk(
map_chunks, kws=[combine_chunk_kw])
combine_chunks.append(combine_chunk)
reduce_chunk_op = KMeansScalablePlusPlusInit(n_clusters=op.n_clusters,
state=random_state,
stage=OperandStage.reduce)
reduce_chunk_kw = out.params
reduce_chunk_kw['index'] = (0, 0)
reduce_chunk = reduce_chunk_op.new_chunk([centers.chunks[0]] + combine_chunks,
kws=[reduce_chunk_kw])
new_op = op.copy()
kw = out.params
kw['chunks'] = [reduce_chunk]
kw['nsplits'] = tuple((s,) for s in out.shape)
return new_op.new_tileables(op.inputs, kws=[kw])
@classmethod
def _execute_map(cls, ctx, op: "KMeansScalablePlusPlusInit"):
distances = ctx[op.inputs[0].key]
min_distance_ids = np.argmin(distances, axis=1)
min_distances = distances[range(len(distances)), min_distance_ids]
ctx[op.outputs[0].key] = (min_distances, min_distance_ids)
@classmethod
def _execute_combine(cls, ctx, op: "KMeansScalablePlusPlusInit"):
out = op.outputs[0]
all_distances, all_min_distance_ids = tuple(zip(*(ctx[inp.key] for inp in op.inputs)))
distances = np.stack(all_distances).T
min_distance_ids = np.stack(all_min_distance_ids).T
combined_min_distance_id = np.argmin(distances, axis=1)
min_distance_ids = min_distance_ids[range(len(distances)), combined_min_distance_id]
count = np.bincount(min_distance_ids)
result = np.zeros(out.shape[0], dtype=np.int64)
result[:len(count)] = count
ctx[out.key] = result
@classmethod
def _execute_reduce(cls, ctx, op: "KMeansScalablePlusPlusInit"):
from sklearn.cluster import KMeans
inputs = [ctx[inp.key] for inp in op.inputs]
count = np.zeros(inputs[1].shape[0], dtype=np.int64)
for inp in inputs[1:]:
count += inp
weight = count / count.sum()
centers = inputs[0]
kmeans = KMeans(n_clusters=op.n_clusters, n_init=1,
random_state=op.state)
kmeans.fit(centers, sample_weight=weight)
ctx[op.outputs[0].key] = kmeans.cluster_centers_
@classmethod
def execute(cls, ctx, op: "KMeansScalablePlusPlusInit"):
if op.stage == OperandStage.map:
return cls._execute_map(ctx, op)
elif op.stage == OperandStage.combine:
return cls._execute_combine(ctx, op)
else:
return cls._execute_reduce(ctx, op)
def _scalable_k_init(X, n_clusters, x_squared_norms, random_state,
oversampling_factor=2, init_iter=5):
op = KMeansScalablePlusPlusInit(x=X, n_clusters=n_clusters,
x_squared_norms=x_squared_norms,
state=random_state, init_iter=init_iter,
oversampling_factor=oversampling_factor)
return op()
| [((14667, 14695), 'numpy.argmin', 'np.argmin', (['distances'], {'axis': '(1)'}), '(distances, axis=1)\n', (14676, 14695), True, 'import numpy as np\n'), ((15191, 15219), 'numpy.argmin', 'np.argmin', (['distances'], {'axis': '(1)'}), '(distances, axis=1)\n', (15200, 15219), True, 'import numpy as np\n'), ((15329, 15358), 'numpy.bincount', 'np.bincount', (['min_distance_ids'], {}), '(min_distance_ids)\n', (15340, 15358), True, 'import numpy as np\n'), ((15376, 15414), 'numpy.zeros', 'np.zeros', (['out.shape[0]'], {'dtype': 'np.int64'}), '(out.shape[0], dtype=np.int64)\n', (15384, 15414), True, 'import numpy as np\n'), ((15682, 15726), 'numpy.zeros', 'np.zeros', (['inputs[1].shape[0]'], {'dtype': 'np.int64'}), '(inputs[1].shape[0], dtype=np.int64)\n', (15690, 15726), True, 'import numpy as np\n'), ((15867, 15932), 'sklearn.cluster.KMeans', 'KMeans', ([], {'n_clusters': 'op.n_clusters', 'n_init': '(1)', 'random_state': 'op.state'}), '(n_clusters=op.n_clusters, n_init=1, random_state=op.state)\n', (15873, 15932), False, 'from sklearn.cluster import KMeans\n'), ((15069, 15092), 'numpy.stack', 'np.stack', (['all_distances'], {}), '(all_distances)\n', (15077, 15092), True, 'import numpy as np\n'), ((15122, 15152), 'numpy.stack', 'np.stack', (['all_min_distance_ids'], {}), '(all_min_distance_ids)\n', (15130, 15152), True, 'import numpy as np\n'), ((1787, 1805), 'numpy.log', 'np.log', (['n_clusters'], {}), '(n_clusters)\n', (1793, 1805), True, 'import numpy as np\n'), ((7295, 7423), 'sklearn.cluster._kmeans._kmeans_plusplus', '_kmeans_plusplus', (['x', 'op.n_clusters'], {'x_squared_norms': 'x_squared_norms', 'random_state': 'op.state', 'n_local_trials': 'op.n_local_trials'}), '(x, op.n_clusters, x_squared_norms=x_squared_norms,\n random_state=op.state, n_local_trials=op.n_local_trials)\n', (7311, 7423), False, 'from sklearn.cluster._kmeans import _kmeans_plusplus\n'), ((12951, 12969), 'numpy.dtype', 'np.dtype', (['np.int64'], {}), '(np.int64)\n', (12959, 12969), True, 'import numpy as np\n'), ((13574, 13592), 'numpy.dtype', 'np.dtype', (['np.int64'], {}), '(np.int64)\n', (13582, 13592), True, 'import numpy as np\n'), ((7049, 7073), 'sklearn.cluster.k_means_._k_init', '_k_init', (['*args'], {}), '(*args, **kwargs)\n', (7056, 7073), False, 'from sklearn.cluster.k_means_ import _k_init\n')] |
ojones/wikipedia_parser | wikipedia_parser/infobox/wikitext_parser.py | db548290fbc392299bba8adfda9fe18baa1e66fe | import re
from wikipedia_parser.infobox import clean_text as clean_help
from wikipedia_parser.infobox import wikitext_helpers as wtext_help
from wikipedia_parser.third_party_adapters import parserfromhell_adapter as adapter
__author__ = 'oswaldjones'
def get_simple_text(wtext, key, clean=True):
text = None
keys = key if type(key) is list else [key]
template_dict = adapter.template_dict(wtext)
wtext_lines = wtext_help.get_wtext_lines(wtext)
if keys:
for possible_key in keys:
# try getting from parserfromhell
if not text and template_dict:
text = template_dict.get(possible_key)
# final attempt if still no text
if not text and wtext_lines:
matched_line = wtext_help.find_key_val_line(wtext, possible_key)
if matched_line:
key_val = matched_line.strip(' \t\n\r').split("=", 1)
if len(key_val) == 2:
text = key_val[1].strip()
if text and clean:
text = clean_help.clean_text(text)
return text
def extract_page_links(wtext, key):
links = []
keys = key if type(key) is list else [key]
template_dict = adapter.template_dict(wtext)
wtext_lines = wtext_help.get_wtext_lines(wtext)
if keys:
for possible_key in keys:
# try parserfromhell
if not links and template_dict:
if template_dict.get(possible_key):
matches = re.findall("\[\[(.*?)\]\]", template_dict.get(possible_key))
links = [link.split("|", 1)[0] for link in matches]
# final attempt if still no links
if not links and wtext_lines:
matched_line = wtext_help.find_key_val_line(wtext_lines, possible_key)
if matched_line:
key_val = matched_line.strip(' \t\n\r').split("=")
if len(key_val) == 2:
matches = re.findall("\[\[(.*?)\]\]", key_val[1].strip())
links = [link.split("|", 1)[0] for link in matches]
return links
| [((386, 414), 'wikipedia_parser.third_party_adapters.parserfromhell_adapter.template_dict', 'adapter.template_dict', (['wtext'], {}), '(wtext)\n', (407, 414), True, 'from wikipedia_parser.third_party_adapters import parserfromhell_adapter as adapter\n'), ((433, 466), 'wikipedia_parser.infobox.wikitext_helpers.get_wtext_lines', 'wtext_help.get_wtext_lines', (['wtext'], {}), '(wtext)\n', (459, 466), True, 'from wikipedia_parser.infobox import wikitext_helpers as wtext_help\n'), ((1234, 1262), 'wikipedia_parser.third_party_adapters.parserfromhell_adapter.template_dict', 'adapter.template_dict', (['wtext'], {}), '(wtext)\n', (1255, 1262), True, 'from wikipedia_parser.third_party_adapters import parserfromhell_adapter as adapter\n'), ((1281, 1314), 'wikipedia_parser.infobox.wikitext_helpers.get_wtext_lines', 'wtext_help.get_wtext_lines', (['wtext'], {}), '(wtext)\n', (1307, 1314), True, 'from wikipedia_parser.infobox import wikitext_helpers as wtext_help\n'), ((1066, 1093), 'wikipedia_parser.infobox.clean_text.clean_text', 'clean_help.clean_text', (['text'], {}), '(text)\n', (1087, 1093), True, 'from wikipedia_parser.infobox import clean_text as clean_help\n'), ((778, 827), 'wikipedia_parser.infobox.wikitext_helpers.find_key_val_line', 'wtext_help.find_key_val_line', (['wtext', 'possible_key'], {}), '(wtext, possible_key)\n', (806, 827), True, 'from wikipedia_parser.infobox import wikitext_helpers as wtext_help\n'), ((1776, 1831), 'wikipedia_parser.infobox.wikitext_helpers.find_key_val_line', 'wtext_help.find_key_val_line', (['wtext_lines', 'possible_key'], {}), '(wtext_lines, possible_key)\n', (1804, 1831), True, 'from wikipedia_parser.infobox import wikitext_helpers as wtext_help\n')] |
threefoldtech/threebot_prebuilt | sandbox/lib/jumpscale/JumpscaleLibs/clients/graphql/GraphQLFactory.py | 1f0e1c65c14cef079cd80f73927d7c8318755c48 | from .GraphQLClient import GraphQLClient
from Jumpscale import j
JSConfigs = j.baseclasses.object_config_collection
class GraphQLFactory(JSConfigs):
__jslocation__ = "j.clients.graphql"
_CHILDCLASS = GraphQLClient
| [] |
tumb1er/django-video-transcoding | src/video_transcoding/defaults.py | 54c85fb4a3b58b3f3b82e461b2f54f3c8dd5fcc6 | from os import getenv as e
from kombu import Queue
CELERY_APP_NAME = 'video_transcoding'
VIDEO_TRANSCODING_CELERY_CONF = {
'broker_url': e('VIDEO_TRANSCODING_CELERY_BROKER_URL',
'amqp://guest:guest@rabbitmq:5672/'),
'result_backend': e('VIDEO_TRANSCODING_CELERY_RESULT_BACKEND', None),
'task_default_exchange': CELERY_APP_NAME,
'task_default_exchange_type': 'topic',
'task_default_queue': CELERY_APP_NAME,
'worker_prefetch_multiplier': 1,
'worker_concurrency': e('VIDEO_TRANSCODING_CELERY_CONCURRENCY'),
'task_acks_late': True,
'task_reject_on_worker_lost': True,
'task_queues': [
Queue(CELERY_APP_NAME, routing_key=CELERY_APP_NAME),
]
}
# Directory for large output files
VIDEO_TEMP_DIR = '/tmp'
# Download source before processing
VIDEO_DOWNLOAD_SOURCE = bool(int(e('VIDEO_DOWNLOAD_SOURCE', 0)))
# A list of WebDAV endpoints for storing video results
VIDEO_ORIGINS = e('VIDEO_ORIGINS',
'http://storage.localhost:8080/videos/').split(',')
# Video streamer public urls (comma-separated)
VIDEO_EDGES = e('VIDEO_EDGES', 'http://storage.localhost:8080/').split(',')
# Edge video manifest url template
VIDEO_URL = '{edge}/hls/{filename}1080p.mp4/index.m3u8'
# Output source files checksum
CHECKSUM_SOURCE = bool(int(e('CHECKSUM_SOURCE', 0)))
| [((146, 223), 'os.getenv', 'e', (['"""VIDEO_TRANSCODING_CELERY_BROKER_URL"""', '"""amqp://guest:guest@rabbitmq:5672/"""'], {}), "('VIDEO_TRANSCODING_CELERY_BROKER_URL', 'amqp://guest:guest@rabbitmq:5672/')\n", (147, 223), True, 'from os import getenv as e\n'), ((267, 317), 'os.getenv', 'e', (['"""VIDEO_TRANSCODING_CELERY_RESULT_BACKEND"""', 'None'], {}), "('VIDEO_TRANSCODING_CELERY_RESULT_BACKEND', None)\n", (268, 317), True, 'from os import getenv as e\n'), ((514, 555), 'os.getenv', 'e', (['"""VIDEO_TRANSCODING_CELERY_CONCURRENCY"""'], {}), "('VIDEO_TRANSCODING_CELERY_CONCURRENCY')\n", (515, 555), True, 'from os import getenv as e\n'), ((654, 705), 'kombu.Queue', 'Queue', (['CELERY_APP_NAME'], {'routing_key': 'CELERY_APP_NAME'}), '(CELERY_APP_NAME, routing_key=CELERY_APP_NAME)\n', (659, 705), False, 'from kombu import Queue\n'), ((845, 874), 'os.getenv', 'e', (['"""VIDEO_DOWNLOAD_SOURCE"""', '(0)'], {}), "('VIDEO_DOWNLOAD_SOURCE', 0)\n", (846, 874), True, 'from os import getenv as e\n'), ((949, 1008), 'os.getenv', 'e', (['"""VIDEO_ORIGINS"""', '"""http://storage.localhost:8080/videos/"""'], {}), "('VIDEO_ORIGINS', 'http://storage.localhost:8080/videos/')\n", (950, 1008), True, 'from os import getenv as e\n'), ((1100, 1150), 'os.getenv', 'e', (['"""VIDEO_EDGES"""', '"""http://storage.localhost:8080/"""'], {}), "('VIDEO_EDGES', 'http://storage.localhost:8080/')\n", (1101, 1150), True, 'from os import getenv as e\n'), ((1313, 1336), 'os.getenv', 'e', (['"""CHECKSUM_SOURCE"""', '(0)'], {}), "('CHECKSUM_SOURCE', 0)\n", (1314, 1336), True, 'from os import getenv as e\n')] |
jmboettcher/fall2019_sentiment_in_alternative_words | wordSenseByContext.py | d88fd0ed7d1396bb3755431d6aff85b880ffe149 | from collections import defaultdict
from nltk.tokenize import sent_tokenize
from nltk.corpus import wordnet as wn
from nltk.corpus import semcor as sc
from nltk.corpus import stopwords
import mywordtokenizer
class SenseContextWordDict:
def __init__(self):
self.dictionary = self._create_dictionary()
def _create_dictionary(self):
dictionary = defaultdict(lambda: defaultdict(int))
myStopWords = stopwords.words('english')
for sentence in sc.tagged_sents(tag='sem'):
plainWordSent = []
taggedWordSent = []
self._make_word_lists(plainWordSent, taggedWordSent, sentence)
for taggedItemTuple in taggedWordSent:
self._update_tagged_item_entry(myStopWords, dictionary, plainWordSent, taggedItemTuple[0],taggedItemTuple[1])
return dictionary
def _make_word_lists(self, plainWordSent, taggedWordSent, sentence):
for i in range(0,len(sentence)):
item = sentence[i]
if(type(item)) == list:
plainWordSent.append(item[0])
else:
if type(item.label()) == str:
plainWordSent.append(item.leaves()[0])
else:
plainWordSent.append(item.label().name())
taggedWordSent.append([item, i])
def _update_tagged_item_entry(self, myStopWords,dictionary,plainWordSent,taggedItem,taggedItemPosition):
for j in range(0,len(plainWordSent)):
word = plainWordSent[j]
if taggedItem.label().name() != word:
taggedSynset = taggedItem.label().synset()
splitUp = word.split("_")
for thisword in splitUp:
wordTokened = mywordtokenizer.simple(thisword)
if len(wordTokened) > 0:
word = wordTokened[0]
if word not in myStopWords:
dictionary[taggedSynset][word]+=1
dictionary[taggedSynset][".total."]+=1
dictionary[taggedSynset][".totalNoStops."]+=1
elif abs(j - taggedItemPosition) == 1:
dictionary[taggedSynset][word]+=1
dictionary[taggedSynset][".total."]+=1
def getMostLikelySynset(self, word, sentence):
"""Find the set of a word's synonyms.
Parameters
----------
word : str
The string representing a given word.
Returns
-------
a set pf the given word's synonyms.
"""
myStopWords = stopwords.words('english')
highestCoverageSyn = self._synset_search(".totalNoStops.", myStopWords, word, sentence)
if highestCoverageSyn is None:
highestCoverageSyn = self._synset_search(".total.", [], word, sentence)
return highestCoverageSyn
def _synset_search(self, totalToUse, exclusionSet, word, sentence):
"""Find the set of a word's synonyms.
Parameters
----------
word : str
The string representing a given word.
Returns
-------
a set pf the given word's synonyms.
"""
myMap = self.dictionary
highestCoverage = 0
highestCoverageSyn = None
for syn in wn.synsets(word):
totalContextWordMatches = 0
totalSet = myMap[syn][totalToUse]
if totalSet > 0:
for contextWord in sentence:
if contextWord != word and contextWord not in exclusionSet:
totalContextWordMatches += myMap[syn][contextWord]
coverage = totalContextWordMatches / totalSet
if coverage > highestCoverage:
highestCoverage = coverage
highestCoverageSyn = syn
return highestCoverageSyn
def listAlternatives(self, word, sentence):
synonyms = set([])
mostLikelySynset = self.getMostLikelySynset(word, sentence)
if not mostLikelySynset is None:
for synonym in mostLikelySynset.lemmas():
synonyms.add(synonym.name())
return synonyms
def mostFrequentAlternative(self, word, sentence):
mostLikelySynset = self.getMostLikelySynset(word, sentence)
highestCount = 0
mostFrequentAlternative = None
if not mostLikelySynset is None:
for synonym in mostLikelySynset.lemmas():
count = synonym.count()
if count > highestCount:
mostFrequentAlternative = synonym.name()
highestCount = count
return mostFrequentAlternative
"""===================================================================
Place all function calls below the following conditional so that they
are called only if this module is called with
`python ling278_assign02.py`
No functions should execute if it is instead imported with
import ling278_assign02
in the interactive shell.
"""
if __name__ == '__main__':
pass
| [((430, 456), 'nltk.corpus.stopwords.words', 'stopwords.words', (['"""english"""'], {}), "('english')\n", (445, 456), False, 'from nltk.corpus import stopwords\n'), ((481, 507), 'nltk.corpus.semcor.tagged_sents', 'sc.tagged_sents', ([], {'tag': '"""sem"""'}), "(tag='sem')\n", (496, 507), True, 'from nltk.corpus import semcor as sc\n'), ((2641, 2667), 'nltk.corpus.stopwords.words', 'stopwords.words', (['"""english"""'], {}), "('english')\n", (2656, 2667), False, 'from nltk.corpus import stopwords\n'), ((3351, 3367), 'nltk.corpus.wordnet.synsets', 'wn.synsets', (['word'], {}), '(word)\n', (3361, 3367), True, 'from nltk.corpus import wordnet as wn\n'), ((390, 406), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (401, 406), False, 'from collections import defaultdict\n'), ((1756, 1788), 'mywordtokenizer.simple', 'mywordtokenizer.simple', (['thisword'], {}), '(thisword)\n', (1778, 1788), False, 'import mywordtokenizer\n')] |
tjwalch/django-restshop | paymentmethods/stripejs/tests.py | 569b57a5694e76a365556d7c4c9a97dd293d96c6 | import decimal
from unittest import mock
from django.conf import settings
from django.test import modify_settings
from rest_framework import test
from rest_framework.reverse import reverse
import stripe
from restshop import serializers
from restshop.models import Order
from paymentmethods.stripejs.models import StripeInvoice
import restshop.exceptions
from restshop.tests.test_product import products_and_price
@modify_settings(INSTALLED_APPS={
'append': 'restshop.paymentmethods.stripejs'
})
class StripeTest(test.APITestCase):
def setUp(self):
stripe.api_key = settings.STRIPE_API_KEY
self.order = Order.objects.create(
email='tester@test.com',
)
self.order.items.create(
description='test purchase',
price='1000',
vat='250',
quantity=3,
product=products_and_price(1000).skus.all()[0]
)
session = self.client.session
session['order_id'] = self.order.pk
session.save()
def get_token(self):
return stripe.Token.create(card={
"number": '4242424242424242',
"exp_month": 12,
"exp_year": 2016,
"cvc": '123'
}).id
def test_pay(self):
response = self.client.post(
reverse(
'order-pay',
args=['stripejs.stripeinvoice']
),
{
'stripeToken': self.get_token(),
'order': serializers.OrderSerializer(instance=self.order).data
}
)
self.assertEqual(201, response.status_code, response.data)
self.assertEqual(0,
decimal.Decimal(response.data['owed']) -
decimal.Decimal(response.data['paid']))
order = Order.objects.get()
self.assertEqual(
Order.STATUS.completed,
order.status
)
self.assertEqual(
decimal.Decimal('3750.00'),
order.invoices.all()[0].paid
)
@mock.patch('stripe.Charge.create')
def test_card_error(self, create_mock):
create_mock.side_effect = stripe.CardError('fail!', '', '402')
si = StripeInvoice.objects.create(
order=self.order,
owed=self.order.amount,
stripeToken=self.get_token(),
)
try:
si.authorize()
except restshop.exceptions.PaymentFailed as e:
self.assertEqual('fail!', e.detail)
else:
self.assertRaises(restshop.exceptions.PaymentFailed, lambda: None)
def test_cancel_auth(self):
si = StripeInvoice.objects.create(
order=self.order,
owed=self.order.amount,
stripeToken=self.get_token(),
)
self.assertRaises(
restshop.exceptions.InvalidOperation,
si.cancel_auth
)
self.assertTrue(si.authorize())
self.assertTrue(si.cancel_auth())
si.refresh_from_db()
self.assertEqual(2, si.events.all().count())
self.assertEqual(StripeInvoice.STATUS.canceled, si.status)
| [((418, 496), 'django.test.modify_settings', 'modify_settings', ([], {'INSTALLED_APPS': "{'append': 'restshop.paymentmethods.stripejs'}"}), "(INSTALLED_APPS={'append': 'restshop.paymentmethods.stripejs'})\n", (433, 496), False, 'from django.test import modify_settings\n'), ((2052, 2086), 'unittest.mock.patch', 'mock.patch', (['"""stripe.Charge.create"""'], {}), "('stripe.Charge.create')\n", (2062, 2086), False, 'from unittest import mock\n'), ((631, 676), 'restshop.models.Order.objects.create', 'Order.objects.create', ([], {'email': '"""tester@test.com"""'}), "(email='tester@test.com')\n", (651, 676), False, 'from restshop.models import Order\n'), ((1812, 1831), 'restshop.models.Order.objects.get', 'Order.objects.get', ([], {}), '()\n', (1829, 1831), False, 'from restshop.models import Order\n'), ((2165, 2201), 'stripe.CardError', 'stripe.CardError', (['"""fail!"""', '""""""', '"""402"""'], {}), "('fail!', '', '402')\n", (2181, 2201), False, 'import stripe\n'), ((1062, 1171), 'stripe.Token.create', 'stripe.Token.create', ([], {'card': "{'number': '4242424242424242', 'exp_month': 12, 'exp_year': 2016, 'cvc': '123'}"}), "(card={'number': '4242424242424242', 'exp_month': 12,\n 'exp_year': 2016, 'cvc': '123'})\n", (1081, 1171), False, 'import stripe\n'), ((1303, 1356), 'rest_framework.reverse.reverse', 'reverse', (['"""order-pay"""'], {'args': "['stripejs.stripeinvoice']"}), "('order-pay', args=['stripejs.stripeinvoice'])\n", (1310, 1356), False, 'from rest_framework.reverse import reverse\n'), ((1967, 1993), 'decimal.Decimal', 'decimal.Decimal', (['"""3750.00"""'], {}), "('3750.00')\n", (1982, 1993), False, 'import decimal\n'), ((1690, 1728), 'decimal.Decimal', 'decimal.Decimal', (["response.data['owed']"], {}), "(response.data['owed'])\n", (1705, 1728), False, 'import decimal\n'), ((1756, 1794), 'decimal.Decimal', 'decimal.Decimal', (["response.data['paid']"], {}), "(response.data['paid'])\n", (1771, 1794), False, 'import decimal\n'), ((1492, 1540), 'restshop.serializers.OrderSerializer', 'serializers.OrderSerializer', ([], {'instance': 'self.order'}), '(instance=self.order)\n', (1519, 1540), False, 'from restshop import serializers\n'), ((867, 891), 'restshop.tests.test_product.products_and_price', 'products_and_price', (['(1000)'], {}), '(1000)\n', (885, 891), False, 'from restshop.tests.test_product import products_and_price\n')] |
tnnt-devteam/python-backend | tnnt/uniqdeaths.py | 1ecb0ddaccf176726739b64212831d038a7463a0 | from tnnt.settings import UNIQUE_DEATH_REJECTIONS, UNIQUE_DEATH_NORMALIZATIONS
import re
def normalize(death):
# Given a death string, apply normalizations from settings.
for regtuple in UNIQUE_DEATH_NORMALIZATIONS:
death = re.sub(regtuple[0], regtuple[1], death)
return death
def reject(death):
# Given a death string, return True if it should be excluded as a
# unique death and False if not.
for regex in UNIQUE_DEATH_REJECTIONS:
if re.search(regex, death) is not None:
return True
return False
def compile_unique_deaths(gameQS):
# Given a QuerySet of Game objects, return a set containing strings of all
# the unique deaths from those games after rejections and normalizations are
# applied.
# This is primarily for aggregation, and runs somewhat faster than it would
# if we wanted to return the players who got a death and when. This is a
# post 2021 TODO.
# First, get all unique, un-normalized deaths.
raw_uniq_deaths = \
gameQS.values_list('death', flat=True).distinct()
# Then apply normalizations and rejections, and turn it into a set
# to automatically remove any duplicates produced by normalization.
return set(normalize(d) for d in raw_uniq_deaths if not reject(d))
# post 2021 TODO: showing unique deaths of a player or clan:
# 1. list(Game.objects.values_list('death', 'player__name', 'endtime'))
# 2. iterate through list, filtering any death for which reject is True, and
# normalizing all death strings.
# 3. sort by first death, then endtime.
# 4. filter again by taking only the first player/endtime for each death and
# ignoring later ones.
| [((241, 280), 're.sub', 're.sub', (['regtuple[0]', 'regtuple[1]', 'death'], {}), '(regtuple[0], regtuple[1], death)\n', (247, 280), False, 'import re\n'), ((478, 501), 're.search', 're.search', (['regex', 'death'], {}), '(regex, death)\n', (487, 501), False, 'import re\n')] |
anubhavvardhan/qutip | qutip/graph.py | daf384840efbb44b86e39d8bda64d907d9f6b47f | # This file is part of QuTiP: Quantum Toolbox in Python.
#
# Copyright (c) 2011 and later, Paul D. Nation and Robert J. Johansson.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the QuTiP: Quantum Toolbox in Python nor the names
# of its contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###############################################################################
"""
This module contains a collection of graph theory routines used mainly
to reorder matrices for iterative steady state solvers.
"""
__all__ = ['graph_degree', 'column_permutation', 'breadth_first_search',
'reverse_cuthill_mckee', 'maximum_bipartite_matching',
'weighted_bipartite_matching']
import numpy as np
import scipy.sparse as sp
from qutip.cy.graph_utils import (
_breadth_first_search, _node_degrees,
_reverse_cuthill_mckee, _maximum_bipartite_matching,
_weighted_bipartite_matching)
def graph_degree(A):
"""
Returns the degree for the nodes (rows) of a symmetric
graph in sparse CSR or CSC format, or a qobj.
Parameters
----------
A : qobj, csr_matrix, csc_matrix
Input quantum object or csr_matrix.
Returns
-------
degree : array
Array of integers giving the degree for each node (row).
"""
if not (sp.isspmatrix_csc(A) or sp.isspmatrix_csr(A)):
raise TypeError('Input must be CSC or CSR sparse matrix.')
return _node_degrees(A.indices, A.indptr, A.shape[0])
def breadth_first_search(A, start):
"""
Breadth-First-Search (BFS) of a graph in CSR or CSC matrix format starting
from a given node (row). Takes Qobjs and CSR or CSC matrices as inputs.
This function requires a matrix with symmetric structure.
Use A+trans(A) if original matrix is not symmetric or not sure.
Parameters
----------
A : csc_matrix, csr_matrix
Input graph in CSC or CSR matrix format
start : int
Staring node for BFS traversal.
Returns
-------
order : array
Order in which nodes are traversed from starting node.
levels : array
Level of the nodes in the order that they are traversed.
"""
if not (sp.isspmatrix_csc(A) or sp.isspmatrix_csr(A)):
raise TypeError('Input must be CSC or CSR sparse matrix.')
num_rows = A.shape[0]
start = int(start)
order, levels = _breadth_first_search(A.indices, A.indptr, num_rows, start)
# since maybe not all nodes are in search, check for unused entires in
# arrays
return order[order != -1], levels[levels != -1]
def column_permutation(A):
"""
Finds the non-symmetric column permutation of A such that the columns
are given in ascending order according to the number of nonzero entries.
This is sometimes useful for decreasing the fill-in of sparse LU
factorization.
Parameters
----------
A : csc_matrix
Input sparse CSC sparse matrix.
Returns
-------
perm : array
Array of permuted row and column indices.
"""
if not sp.isspmatrix_csc(A):
A = sp.csc_matrix(A)
count = np.diff(A.indptr)
perm = np.argsort(count)
return perm
def reverse_cuthill_mckee(A, sym=False):
"""
Returns the permutation array that orders a sparse CSR or CSC matrix
in Reverse-Cuthill McKee ordering. Since the input matrix must be
symmetric, this routine works on the matrix A+Trans(A) if the sym flag is
set to False (Default).
It is assumed by default (*sym=False*) that the input matrix is not
symmetric. This is because it is faster to do A+Trans(A) than it is to
check for symmetry for a generic matrix. If you are guaranteed that the
matrix is symmetric in structure (values of matrix element do not matter)
then set *sym=True*
Parameters
----------
A : csc_matrix, csr_matrix
Input sparse CSC or CSR sparse matrix format.
sym : bool {False, True}
Flag to set whether input matrix is symmetric.
Returns
-------
perm : array
Array of permuted row and column indices.
Notes
-----
This routine is used primarily for internal reordering of Lindblad
superoperators for use in iterative solver routines.
References
----------
E. Cuthill and J. McKee, "Reducing the Bandwidth of Sparse Symmetric
Matrices", ACM '69 Proceedings of the 1969 24th national conference,
(1969).
"""
if not (sp.isspmatrix_csc(A) or sp.isspmatrix_csr(A)):
raise TypeError('Input must be CSC or CSR sparse matrix.')
nrows = A.shape[0]
if not sym:
A = A + A.transpose()
return _reverse_cuthill_mckee(A.indices, A.indptr, nrows)
def maximum_bipartite_matching(A, perm_type='row'):
"""
Returns an array of row or column permutations that removes nonzero
elements from the diagonal of a nonsingular square CSC sparse matrix. Such
a permutation is always possible provided that the matrix is nonsingular.
This function looks at the structure of the matrix only.
The input matrix will be converted to CSC matrix format if
necessary.
Parameters
----------
A : sparse matrix
Input matrix
perm_type : str {'row', 'column'}
Type of permutation to generate.
Returns
-------
perm : array
Array of row or column permutations.
Notes
-----
This function relies on a maximum cardinality bipartite matching algorithm
based on a breadth-first search (BFS) of the underlying graph[1]_.
References
----------
I. S. Duff, K. Kaya, and B. Ucar, "Design, Implementation, and
Analysis of Maximum Transversal Algorithms", ACM Trans. Math. Softw.
38, no. 2, (2011).
"""
nrows = A.shape[0]
if A.shape[0] != A.shape[1]:
raise ValueError(
'Maximum bipartite matching requires a square matrix.')
if sp.isspmatrix_csr(A) or sp.isspmatrix_coo(A):
A = A.tocsc()
elif not sp.isspmatrix_csc(A):
raise TypeError("matrix must be in CSC, CSR, or COO format.")
if perm_type == 'column':
A = A.transpose().tocsc()
perm = _maximum_bipartite_matching(A.indices, A.indptr, nrows)
if np.any(perm == -1):
raise Exception('Possibly singular input matrix.')
return perm
def weighted_bipartite_matching(A, perm_type='row'):
"""
Returns an array of row permutations that attempts to maximize
the product of the ABS values of the diagonal elements in
a nonsingular square CSC sparse matrix. Such a permutation is
always possible provided that the matrix is nonsingular.
This function looks at both the structure and ABS values of the
underlying matrix.
Parameters
----------
A : csc_matrix
Input matrix
perm_type : str {'row', 'column'}
Type of permutation to generate.
Returns
-------
perm : array
Array of row or column permutations.
Notes
-----
This function uses a weighted maximum cardinality bipartite matching
algorithm based on breadth-first search (BFS). The columns are weighted
according to the element of max ABS value in the associated rows and
are traversed in descending order by weight. When performing the BFS
traversal, the row associated to a given column is the one with maximum
weight. Unlike other techniques[1]_, this algorithm does not guarantee the
product of the diagonal is maximized. However, this limitation is offset
by the substantially faster runtime of this method.
References
----------
I. S. Duff and J. Koster, "The design and use of algorithms for
permuting large entries to the diagonal of sparse matrices", SIAM J.
Matrix Anal. and Applics. 20, no. 4, 889 (1997).
"""
nrows = A.shape[0]
if A.shape[0] != A.shape[1]:
raise ValueError('weighted_bfs_matching requires a square matrix.')
if sp.isspmatrix_csr(A) or sp.isspmatrix_coo(A):
A = A.tocsc()
elif not sp.isspmatrix_csc(A):
raise TypeError("matrix must be in CSC, CSR, or COO format.")
if perm_type == 'column':
A = A.transpose().tocsc()
perm = _weighted_bipartite_matching(
np.asarray(np.abs(A.data), dtype=float),
A.indices, A.indptr, nrows)
if np.any(perm == -1):
raise Exception('Possibly singular input matrix.')
return perm
| [((2857, 2903), 'qutip.cy.graph_utils._node_degrees', '_node_degrees', (['A.indices', 'A.indptr', 'A.shape[0]'], {}), '(A.indices, A.indptr, A.shape[0])\n', (2870, 2903), False, 'from qutip.cy.graph_utils import _breadth_first_search, _node_degrees, _reverse_cuthill_mckee, _maximum_bipartite_matching, _weighted_bipartite_matching\n'), ((3798, 3857), 'qutip.cy.graph_utils._breadth_first_search', '_breadth_first_search', (['A.indices', 'A.indptr', 'num_rows', 'start'], {}), '(A.indices, A.indptr, num_rows, start)\n', (3819, 3857), False, 'from qutip.cy.graph_utils import _breadth_first_search, _node_degrees, _reverse_cuthill_mckee, _maximum_bipartite_matching, _weighted_bipartite_matching\n'), ((4549, 4566), 'numpy.diff', 'np.diff', (['A.indptr'], {}), '(A.indptr)\n', (4556, 4566), True, 'import numpy as np\n'), ((4578, 4595), 'numpy.argsort', 'np.argsort', (['count'], {}), '(count)\n', (4588, 4595), True, 'import numpy as np\n'), ((6090, 6140), 'qutip.cy.graph_utils._reverse_cuthill_mckee', '_reverse_cuthill_mckee', (['A.indices', 'A.indptr', 'nrows'], {}), '(A.indices, A.indptr, nrows)\n', (6112, 6140), False, 'from qutip.cy.graph_utils import _breadth_first_search, _node_degrees, _reverse_cuthill_mckee, _maximum_bipartite_matching, _weighted_bipartite_matching\n'), ((7599, 7654), 'qutip.cy.graph_utils._maximum_bipartite_matching', '_maximum_bipartite_matching', (['A.indices', 'A.indptr', 'nrows'], {}), '(A.indices, A.indptr, nrows)\n', (7626, 7654), False, 'from qutip.cy.graph_utils import _breadth_first_search, _node_degrees, _reverse_cuthill_mckee, _maximum_bipartite_matching, _weighted_bipartite_matching\n'), ((7663, 7681), 'numpy.any', 'np.any', (['(perm == -1)'], {}), '(perm == -1)\n', (7669, 7681), True, 'import numpy as np\n'), ((9791, 9809), 'numpy.any', 'np.any', (['(perm == -1)'], {}), '(perm == -1)\n', (9797, 9809), True, 'import numpy as np\n'), ((4486, 4506), 'scipy.sparse.isspmatrix_csc', 'sp.isspmatrix_csc', (['A'], {}), '(A)\n', (4503, 4506), True, 'import scipy.sparse as sp\n'), ((4520, 4536), 'scipy.sparse.csc_matrix', 'sp.csc_matrix', (['A'], {}), '(A)\n', (4533, 4536), True, 'import scipy.sparse as sp\n'), ((7349, 7369), 'scipy.sparse.isspmatrix_csr', 'sp.isspmatrix_csr', (['A'], {}), '(A)\n', (7366, 7369), True, 'import scipy.sparse as sp\n'), ((7373, 7393), 'scipy.sparse.isspmatrix_coo', 'sp.isspmatrix_coo', (['A'], {}), '(A)\n', (7390, 7393), True, 'import scipy.sparse as sp\n'), ((9393, 9413), 'scipy.sparse.isspmatrix_csr', 'sp.isspmatrix_csr', (['A'], {}), '(A)\n', (9410, 9413), True, 'import scipy.sparse as sp\n'), ((9417, 9437), 'scipy.sparse.isspmatrix_coo', 'sp.isspmatrix_coo', (['A'], {}), '(A)\n', (9434, 9437), True, 'import scipy.sparse as sp\n'), ((2732, 2752), 'scipy.sparse.isspmatrix_csc', 'sp.isspmatrix_csc', (['A'], {}), '(A)\n', (2749, 2752), True, 'import scipy.sparse as sp\n'), ((2756, 2776), 'scipy.sparse.isspmatrix_csr', 'sp.isspmatrix_csr', (['A'], {}), '(A)\n', (2773, 2776), True, 'import scipy.sparse as sp\n'), ((3614, 3634), 'scipy.sparse.isspmatrix_csc', 'sp.isspmatrix_csc', (['A'], {}), '(A)\n', (3631, 3634), True, 'import scipy.sparse as sp\n'), ((3638, 3658), 'scipy.sparse.isspmatrix_csr', 'sp.isspmatrix_csr', (['A'], {}), '(A)\n', (3655, 3658), True, 'import scipy.sparse as sp\n'), ((5893, 5913), 'scipy.sparse.isspmatrix_csc', 'sp.isspmatrix_csc', (['A'], {}), '(A)\n', (5910, 5913), True, 'import scipy.sparse as sp\n'), ((5917, 5937), 'scipy.sparse.isspmatrix_csr', 'sp.isspmatrix_csr', (['A'], {}), '(A)\n', (5934, 5937), True, 'import scipy.sparse as sp\n'), ((7430, 7450), 'scipy.sparse.isspmatrix_csc', 'sp.isspmatrix_csc', (['A'], {}), '(A)\n', (7447, 7450), True, 'import scipy.sparse as sp\n'), ((9474, 9494), 'scipy.sparse.isspmatrix_csc', 'sp.isspmatrix_csc', (['A'], {}), '(A)\n', (9491, 9494), True, 'import scipy.sparse as sp\n'), ((9704, 9718), 'numpy.abs', 'np.abs', (['A.data'], {}), '(A.data)\n', (9710, 9718), True, 'import numpy as np\n')] |
Pierre-Thibault/neo-insert-imports | test/source_dir/comments_blank_lines_code.py | c20399d5666b2c3590be7f40c8be1130343bbadc | # comments------------------
def a(x):
print x
if True:
a(10) | [] |
super-resolution/Locan | locan/data/hulls/__init__.py | 94ed7759f7d7ceddee7c7feaabff80010cfedf30 | """
Hull objects of localization data.
Submodules:
-----------
.. autosummary::
:toctree: ./
hull
alpha_shape
"""
from locan.data.hulls.alpha_shape import *
from locan.data.hulls.hull import *
__all__ = []
__all__.extend(hull.__all__)
__all__.extend(alpha_shape.__all__)
| [] |
tschoonj/cgat-daisy | tests/test_workflow_build_combinations.py | f85a2c82ca04f352aad00660cfc14a9aa6773168 | import pytest
from daisy.workflow import build_combinations
def test_one_option():
assert build_combinations(
{"option1": ["value1", "value2"]}) == \
[{'option1': 'value1'},
{'option1': 'value2'}]
def test_two_options():
assert build_combinations(
{'option1': ["value1", "value2"],
'option2': 'valueA'}) == \
[{'option2': 'valueA', 'option1': 'value1'},
{'option2': 'valueA', 'option1': 'value2'}]
def test_two_options():
assert build_combinations(
{'option1': ["value1", "value2"],
'option2': ["valueA", "valueB"]}) == \
[{'option1': 'value1', 'option2': 'valueA'},
{'option1': 'value1', 'option2': 'valueB'},
{'option1': 'value2', 'option2': 'valueA'},
{'option1': 'value2', 'option2': 'valueB'}]
def test_complex_values():
assert build_combinations(
{'option1': [{"value1": [1, 2, 3]},
{"value2": [4, 5, 6]}]}) == \
[{'option1': {'value1': [1, 2, 3]}},
{'option1': {'value2': [4, 5, 6]}}]
def test_groupby_design(tmp_path):
design_file = tmp_path / "design.tsv"
with open(design_file, "w") as outf:
outf.write("label\tc_option1\tc_option2\n")
outf.write("label1\tvalue1\tvalueA\n")
outf.write("label2\tvalue1\tvalueB\n")
outf.write("label3\tvalue2\tvalueA\n")
outf.write("label4\tvalue2\tvalueB\n")
assert build_combinations(
{"groupby": "file",
"label": "label",
"input": design_file,
"option1": "c_option1",
"option2": "c_option2"}) == \
[{'option1': 'value1', 'option2': 'valueA', "name": "label1"},
{'option1': 'value1', 'option2': 'valueB', "name": "label2"},
{'option1': 'value2', 'option2': 'valueA', "name": "label3"},
{'option1': 'value2', 'option2': 'valueB', "name": "label4"}]
def test_groupby_design_with_constant_option(tmp_path):
design_file = tmp_path / "design.tsv"
with open(design_file, "w") as outf:
outf.write("label\tc_option1\tc_option2\n")
outf.write("label1\tvalue1\tvalueA\n")
outf.write("label2\tvalue1\tvalueB\n")
outf.write("label3\tvalue2\tvalueA\n")
outf.write("label4\tvalue2\tvalueB\n")
assert build_combinations(
{"groupby": "file",
"label": "label",
"input": design_file,
"option1": "c_option1",
"option2": "c_option2",
"option3": "valueX"}) == \
[{'option1': 'value1', 'option2': 'valueA', "name": "label1", "option3": "valueX"},
{'option1': 'value1', 'option2': 'valueB', "name": "label2", "option3": "valueX"},
{'option1': 'value2', 'option2': 'valueA', "name": "label3", "option3": "valueX"},
{'option1': 'value2', 'option2': 'valueB', "name": "label4", "option3": "valueX"}]
def test_groupby_design_with_combinatorial_option(tmp_path):
design_file = tmp_path / "design.tsv"
with open(design_file, "w") as outf:
outf.write("label\tc_option1\tc_option2\n")
outf.write("label1\tvalue1\tvalueA\n")
outf.write("label2\tvalue1\tvalueB\n")
outf.write("label3\tvalue2\tvalueA\n")
outf.write("label4\tvalue2\tvalueB\n")
assert build_combinations(
{"groupby": "file",
"label": "label",
"input": design_file,
"option1": "c_option1",
"option2": "c_option2",
"option3": ["valueX", "valueY"]}) == \
[{'option1': 'value1', 'option2': 'valueA', "name": "label1", "option3": "valueX"},
{'option1': 'value1', 'option2': 'valueA', "name": "label1", "option3": "valueY"},
{'option1': 'value1', 'option2': 'valueB', "name": "label2", "option3": "valueX"},
{'option1': 'value1', 'option2': 'valueB', "name": "label2", "option3": "valueY"},
{'option1': 'value2', 'option2': 'valueA', "name": "label3", "option3": "valueX"},
{'option1': 'value2', 'option2': 'valueA', "name": "label3", "option3": "valueY"},
{'option1': 'value2', 'option2': 'valueB', "name": "label4", "option3": "valueX"},
{'option1': 'value2', 'option2': 'valueB', "name": "label4", "option3": "valueY"}]
def test_groupby_regex(tmp_path):
assert build_combinations(
{"groupby": "regex",
"files_a": ["{}/data_0.a".format(tmp_path),
"{}/data_1.a".format(tmp_path)],
"files_b": ["{}/data_0.b".format(tmp_path),
"{}/data_1.b".format(tmp_path)],
"files_a_regex": r"data_(\d+).a",
"files_b_regex": r"data_(\d+).b"}) == \
[{'files_a': "{}/data_0.a".format(tmp_path),
'files_b': "{}/data_0.b".format(tmp_path),
'name': "0"},
{'files_a': "{}/data_1.a".format(tmp_path),
'files_b': "{}/data_1.b".format(tmp_path),
'name': "1"}]
def test_groupby_regex_filters_when_data_point_missing(tmp_path):
assert build_combinations(
{"groupby": "regex",
"files_a": ["{}/data_0.a".format(tmp_path)],
"files_b": ["{}/data_0.b".format(tmp_path),
"{}/data_1.b".format(tmp_path)],
"files_a_regex": r"data_(\d+).a",
"files_b_regex": r"data_(\d+).b"}) == \
[{'files_a': "{}/data_0.a".format(tmp_path),
'files_b': "{}/data_0.b".format(tmp_path),
'name': "0"}]
def test_groupby_regex_with_constant(tmp_path):
assert build_combinations(
{"groupby": "regex",
"files_x": "x.y",
"files_a": ["{}/data_0.a".format(tmp_path),
"{}/data_1.a".format(tmp_path)],
"files_b": ["{}/data_0.b".format(tmp_path),
"{}/data_1.b".format(tmp_path)],
"files_a_regex": r"data_(\d+).a",
"files_b_regex": r"data_(\d+).b"}) == \
[
{'files_a': "{}/data_0.a".format(tmp_path),
'files_b': "{}/data_0.b".format(tmp_path),
'files_x': "x.y",
'name': "0"},
{'files_a': "{}/data_1.a".format(tmp_path),
'files_b': "{}/data_1.b".format(tmp_path),
'files_x': "x.y",
'name': "1"},
]
def test_groupby_regex_with_combinatorial_option(tmp_path):
assert build_combinations(
{"groupby": "regex",
"files_x": ["y.x", "z.x"],
"files_a": ["{}/data_0.a".format(tmp_path),
"{}/data_1.a".format(tmp_path)],
"files_b": ["{}/data_0.b".format(tmp_path),
"{}/data_1.b".format(tmp_path)],
"files_a_regex": r"data_(\d+).a",
"files_b_regex": r"data_(\d+).b"}) == \
[
{'files_a': "{}/data_0.a".format(tmp_path),
'files_b': "{}/data_0.b".format(tmp_path),
'files_x': "y.x",
'name': "0"},
{'files_a': "{}/data_0.a".format(tmp_path),
'files_b': "{}/data_0.b".format(tmp_path),
'files_x': "z.x",
'name': "0"},
{'files_a': "{}/data_1.a".format(tmp_path),
'files_b': "{}/data_1.b".format(tmp_path),
'files_x': "y.x",
'name': "1"},
{'files_a': "{}/data_1.a".format(tmp_path),
'files_b': "{}/data_1.b".format(tmp_path),
'files_x': "z.x",
'name': "1"},
]
def test_groupby_named_regex(tmp_path):
assert build_combinations(
{"groupby": "regex",
"files_a": ["{}/data_0.a".format(tmp_path),
"{}/data_1.a".format(tmp_path)],
"files_b": ["{}/data_0.b".format(tmp_path),
"{}/data_1.b".format(tmp_path)],
"files_a_regex": r"data_(?P<key1>\d+).a",
"files_b_regex": r"data_(?P<key1>\d+).b"}) == \
[{'files_a': "{}/data_0.a".format(tmp_path),
'files_b': "{}/data_0.b".format(tmp_path),
'name': "0"},
{'files_a': "{}/data_1.a".format(tmp_path),
'files_b': "{}/data_1.b".format(tmp_path),
'name': "1"}]
def test_groupby_named_regex_paired(tmp_path):
assert build_combinations(
{"groupby": "regex",
"files_a": ["{}/data_0_2.a".format(tmp_path),
"{}/data_0_3.a".format(tmp_path),
"{}/data_1_2.a".format(tmp_path),
"{}/data_1_3.a".format(tmp_path)],
"files_b": ["{}/data_0.b".format(tmp_path),
"{}/data_1.b".format(tmp_path)],
"files_a_regex": r"data_(?P<key1>\d+)_(?P<key2>\d+).a",
"files_b_regex": r"data_(?P<key1>\d+).b"}) == \
[{'files_a': "{}/data_0_2.a".format(tmp_path),
'files_b': "{}/data_0.b".format(tmp_path),
'name': "0_2"},
{'files_a': "{}/data_0_3.a".format(tmp_path),
'files_b': "{}/data_0.b".format(tmp_path),
'name': "0_3"},
{'files_a': "{}/data_1_2.a".format(tmp_path),
'files_b': "{}/data_1.b".format(tmp_path),
'name': "1_2"},
{'files_a': "{}/data_1_3.a".format(tmp_path),
'files_b': "{}/data_1.b".format(tmp_path),
'name': "1_3"}]
| [((96, 149), 'daisy.workflow.build_combinations', 'build_combinations', (["{'option1': ['value1', 'value2']}"], {}), "({'option1': ['value1', 'value2']})\n", (114, 149), False, 'from daisy.workflow import build_combinations\n'), ((265, 339), 'daisy.workflow.build_combinations', 'build_combinations', (["{'option1': ['value1', 'value2'], 'option2': 'valueA'}"], {}), "({'option1': ['value1', 'value2'], 'option2': 'valueA'})\n", (283, 339), False, 'from daisy.workflow import build_combinations\n'), ((508, 598), 'daisy.workflow.build_combinations', 'build_combinations', (["{'option1': ['value1', 'value2'], 'option2': ['valueA', 'valueB']}"], {}), "({'option1': ['value1', 'value2'], 'option2': ['valueA',\n 'valueB']})\n", (526, 598), False, 'from daisy.workflow import build_combinations\n'), ((874, 953), 'daisy.workflow.build_combinations', 'build_combinations', (["{'option1': [{'value1': [1, 2, 3]}, {'value2': [4, 5, 6]}]}"], {}), "({'option1': [{'value1': [1, 2, 3]}, {'value2': [4, 5, 6]}]})\n", (892, 953), False, 'from daisy.workflow import build_combinations\n'), ((1452, 1583), 'daisy.workflow.build_combinations', 'build_combinations', (["{'groupby': 'file', 'label': 'label', 'input': design_file, 'option1':\n 'c_option1', 'option2': 'c_option2'}"], {}), "({'groupby': 'file', 'label': 'label', 'input':\n design_file, 'option1': 'c_option1', 'option2': 'c_option2'})\n", (1470, 1583), False, 'from daisy.workflow import build_combinations\n'), ((2312, 2468), 'daisy.workflow.build_combinations', 'build_combinations', (["{'groupby': 'file', 'label': 'label', 'input': design_file, 'option1':\n 'c_option1', 'option2': 'c_option2', 'option3': 'valueX'}"], {}), "({'groupby': 'file', 'label': 'label', 'input':\n design_file, 'option1': 'c_option1', 'option2': 'c_option2', 'option3':\n 'valueX'})\n", (2330, 2468), False, 'from daisy.workflow import build_combinations\n'), ((3295, 3463), 'daisy.workflow.build_combinations', 'build_combinations', (["{'groupby': 'file', 'label': 'label', 'input': design_file, 'option1':\n 'c_option1', 'option2': 'c_option2', 'option3': ['valueX', 'valueY']}"], {}), "({'groupby': 'file', 'label': 'label', 'input':\n design_file, 'option1': 'c_option1', 'option2': 'c_option2', 'option3':\n ['valueX', 'valueY']})\n", (3313, 3463), False, 'from daisy.workflow import build_combinations\n')] |
katnoria/world-models | src/train_vae.py | 6584f35fa9508c991050ddc9c17f5862a00008fe | # class Encoder:
# pass
# class Decoder:
# pass
# class VariationAutoEncoder:
# pass
import os
os.environ['CUDA_VISIBLE_DEVICES'] = "0"
import pickle
import logging
from glob import glob
import numpy as np
from time import time
from datetime import datetime
from PIL import Image
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import tensorflow as tf
import tensorflow.keras.backend as K
from tensorflow import keras
if not os.path.exists("logs"):
os.makedirs("logs")
today = datetime.now().strftime('%Y%m%d')
logger = logging.getLogger('worldmodels')
logger.setLevel(logging.DEBUG)
# Create logger
logger = logging.getLogger("worldmodels")
formatter = logging.Formatter('%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
logger.setLevel(logging.DEBUG)
# Uncomment to enable console logger
streamhandler = logging.StreamHandler()
streamhandler.setFormatter(formatter)
streamhandler.setLevel(logging.DEBUG)
logger.addHandler(streamhandler)
filehandler = logging.FileHandler(filename='logs/dataset.{}.log'.format(today))
filehandler.setFormatter(formatter)
filehandler.setLevel(logging.DEBUG)
logger.addHandler(filehandler)
AUTOTUNE = tf.data.experimental.AUTOTUNE
def load_preprocess_image(fname, resize_to=[64,64]):
image = tf.io.read_file(fname)
image = tf.image.decode_jpeg(image, channels=3)
# image = tf.image.resize(image, [64, 64])
image = tf.image.resize(image, resize_to)
image /= 255.0
return image
INPUT_SHAPE = (64,64,3)
# INPUT_SHAPE = (128,128,3)
LATENT_DIM = 32
encoder_input = keras.Input(shape=(INPUT_SHAPE), name='encoder_input_image')
x = keras.layers.Conv2D(32, 4, strides=(2,2), activation='relu', name='conv-1')(encoder_input)
x = keras.layers.Conv2D(64, 4, strides=(2,2), activation='relu', name='conv-2')(x)
x = keras.layers.Conv2D(128, 4, strides=(2,2), activation='relu', name='conv-3')(x)
x = keras.layers.Conv2D(256, 4, strides=(2,2), activation='relu', name='conv-4')(x)
# x = keras.layers.Conv2D(512, 4, strides=(2,2), activation='relu', name='conv-5')(x)
encoder_last_conv_shape = K.int_shape(x)[1:]
logger.info("encoder_last_conv_shape: {}".format(encoder_last_conv_shape))
x = keras.layers.Flatten()(x)
mu = keras.layers.Dense(LATENT_DIM, activation='linear', name="mean")(x)
logvar = keras.layers.Dense(LATENT_DIM, activation='linear', name="variance")(x)
encoder = keras.Model(encoder_input, [mu, logvar], name='encoder')
encoder.summary()
def sample(args):
mean, logvar = args
# reparameterizaton trick: allows gradients to pass through the sample
# 1. sample from unit gaussian, then
# 2. multiply it with standard deviation and add mean
e = tf.random.normal(shape=(K.shape(mean)[0], LATENT_DIM))
return e * tf.math.exp(logvar) + mean
sampled_latent_vector = keras.layers.Lambda(sample)([mu, logvar])
decoder_input = keras.layers.Input(shape=K.int_shape(sampled_latent_vector)[1:], name='decoder_input')
x = keras.layers.Dense(np.prod(encoder_last_conv_shape))(decoder_input)
x = keras.layers.Reshape((1,1,np.prod(encoder_last_conv_shape)))(x)
x = keras.layers.Conv2DTranspose(128, kernel_size=5, strides=(2,2), activation='relu')(x)
x = keras.layers.Conv2DTranspose(64, kernel_size=5, strides=(2,2), activation='relu')(x)
x = keras.layers.Conv2DTranspose(32, kernel_size=6, strides=(2,2), activation='relu')(x)
# x = keras.layers.Conv2DTranspose(32, kernel_size=4, strides=(2,2), activation='relu')(x)
decoder_output = keras.layers.Conv2DTranspose(3, kernel_size=6, strides=(2,2))(x)
decoder = keras.Model(decoder_input, decoder_output, name='decoder')
decoder.summary()
# Taken from tensorflow VAE example
def log_normal_pdf(sample, mean, logvar):
log2pi = tf.math.log(2. * np.pi)
return tf.reduce_sum(
-.5 * ((sample - mean) ** 2. * tf.exp(-logvar) + logvar + log2pi), axis=1)
@tf.function
def calculate_loss(mean, logvar, labels, decoded_logits):
xent_loss = tf.nn.sigmoid_cross_entropy_with_logits(labels=labels, logits=decoded_logits)
z = sample([mean, logvar])
logpx_z = -tf.reduce_sum(xent_loss, axis=[1,2,3])
logpz = log_normal_pdf(z, 0., 0.)
logqz_x = log_normal_pdf(z, mean, logvar)
loss = -tf.reduce_mean(logpx_z + logpz - logqz_x)
return loss
class VAE(keras.Model):
def __init__(self, encoder, decoder):
super(VAE, self).__init__()
self.encoder = encoder
self.decoder = decoder
def train_vars(self):
return self.encoder.trainable_variables + self.decoder.trainable_variables
def encode(self, x):
encoded = self.encoder(x)
return encoded
def decode(self, z, apply_sigmoid=False):
logits = self.decoder(z)
if apply_sigmoid:
return tf.sigmoid(logits)
return logits
@tf.function
def train_step(train_x, model, optimizer):
with tf.GradientTape() as tape:
# use training inputs to approximate the posterior
mean, logvar = model.encode(train_x)
# sample latent vector from the learned mean and variance
latent_z = sample([mean, logvar])
# decode z
decoded_logits = model.decode(latent_z)
# calculate loss
loss = calculate_loss(mean, logvar, labels=train_x, decoded_logits=decoded_logits)
# calculate gradients
gradients = tape.gradient(loss, model.trainable_variables)
# apply gradients
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
return loss
def train(fnames, output_dirname="output", epochs=600, save_every_pct=0.3, print_every_pct=0.05):
logger.info('Total files: {}'.format(len(fnames)))
path_ds = tf.data.Dataset.from_tensor_slices(fnames)
image_ds = path_ds.map(load_preprocess_image, num_parallel_calls=AUTOTUNE)
# Dataset
BATCH_SIZE = 64
SHUFFLE_BUFFER_SIZE = len(fnames)
train_dataset = image_ds \
.shuffle(SHUFFLE_BUFFER_SIZE) \
.repeat() \
.batch(BATCH_SIZE) \
.prefetch(buffer_size=AUTOTUNE)
if not os.path.exists(output_dirname):
os.makedirs('{}/ckpt'.format(output_dirname))
os.makedirs('{}/imgs'.format(output_dirname))
# Number of training epochs
# EPOCHS = 600
logger.info('Training epochs: {}'.format(epochs))
# Initialize the Variational Autoencoder model
model = VAE(encoder, decoder)
# Define optimizer
optimizer = keras.optimizers.Adam(1e-4)
# keep track of losses
losses = []
# How often to print the loss
print_every = max(int(print_every_pct * epochs), 1)
# Model Checkpoint
# Save model and optimizer
ckpt = tf.train.Checkpoint(optimizer=optimizer, model=model)
# Set save path and how many checkpoints to save
checkpoint_path = '{}/ckpt/'.format(output_dirname)
logger.info('Checkpoints will be stored at {}'.format(checkpoint_path))
manager = tf.train.CheckpointManager(ckpt, checkpoint_path, max_to_keep=2)
# Load the latest checkpoint and restore
latest_ckpt = manager.latest_checkpoint
ckpt.restore(latest_ckpt)
if latest_ckpt:
logger.info('Restored from {}'.format(latest_ckpt))
else:
logger.info('Training from scratch')
# How often to save the checkpoint
save_every = max(int(save_every_pct * epochs), 1)
# We are now ready to start the training loop
elapsed_loop_time = time()
for epoch in range(0, epochs):
for train_x in train_dataset:
loss = train_step(train_x, model, optimizer)
losses.append(loss)
if epoch % print_every == 0:
now = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
logger.info('{}:Epoch {}/{}: train loss {} in {} seconds'.format(epoch, epochs, losses[-1], time()-elapsed_loop_time))
elapsed_loop_time = time()
if epoch % save_every == 0:
save_path = manager.save()
logger.info('Saved checkpoint for step {}:{}'.format(epoch, save_path))
# Final Save
save_path = manager.save()
logger.info('Saved checkpoint for step {}'.format(save_path))
if __name__ == "__main__":
# Toons
# fnames = glob('{}/*.png'.format("/mnt/bigdrive/datasets/cartoonset/cartoonset10k/"))
# train(fnames, output_dirname="toons128")
# Car racing
fnames = glob('{}/*.png'.format("/mnt/bigdrive/projects/public_repos/world-models/src/imgs/"))
train(fnames, output_dirname="car_racing")
| [((566, 598), 'logging.getLogger', 'logging.getLogger', (['"""worldmodels"""'], {}), "('worldmodels')\n", (583, 598), False, 'import logging\n'), ((656, 688), 'logging.getLogger', 'logging.getLogger', (['"""worldmodels"""'], {}), "('worldmodels')\n", (673, 688), False, 'import logging\n'), ((701, 773), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s %(name)-12s %(levelname)-8s %(message)s"""'], {}), "('%(asctime)s %(name)-12s %(levelname)-8s %(message)s')\n", (718, 773), False, 'import logging\n'), ((860, 883), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (881, 883), False, 'import logging\n'), ((1576, 1634), 'tensorflow.keras.Input', 'keras.Input', ([], {'shape': 'INPUT_SHAPE', 'name': '"""encoder_input_image"""'}), "(shape=INPUT_SHAPE, name='encoder_input_image')\n", (1587, 1634), False, 'from tensorflow import keras\n'), ((2384, 2440), 'tensorflow.keras.Model', 'keras.Model', (['encoder_input', '[mu, logvar]'], {'name': '"""encoder"""'}), "(encoder_input, [mu, logvar], name='encoder')\n", (2395, 2440), False, 'from tensorflow import keras\n'), ((3545, 3603), 'tensorflow.keras.Model', 'keras.Model', (['decoder_input', 'decoder_output'], {'name': '"""decoder"""'}), "(decoder_input, decoder_output, name='decoder')\n", (3556, 3603), False, 'from tensorflow import keras\n'), ((465, 487), 'os.path.exists', 'os.path.exists', (['"""logs"""'], {}), "('logs')\n", (479, 487), False, 'import os\n'), ((493, 512), 'os.makedirs', 'os.makedirs', (['"""logs"""'], {}), "('logs')\n", (504, 512), False, 'import os\n'), ((1286, 1308), 'tensorflow.io.read_file', 'tf.io.read_file', (['fname'], {}), '(fname)\n', (1301, 1308), True, 'import tensorflow as tf\n'), ((1321, 1360), 'tensorflow.image.decode_jpeg', 'tf.image.decode_jpeg', (['image'], {'channels': '(3)'}), '(image, channels=3)\n', (1341, 1360), True, 'import tensorflow as tf\n'), ((1420, 1453), 'tensorflow.image.resize', 'tf.image.resize', (['image', 'resize_to'], {}), '(image, resize_to)\n', (1435, 1453), True, 'import tensorflow as tf\n'), ((1641, 1717), 'tensorflow.keras.layers.Conv2D', 'keras.layers.Conv2D', (['(32)', '(4)'], {'strides': '(2, 2)', 'activation': '"""relu"""', 'name': '"""conv-1"""'}), "(32, 4, strides=(2, 2), activation='relu', name='conv-1')\n", (1660, 1717), False, 'from tensorflow import keras\n'), ((1736, 1812), 'tensorflow.keras.layers.Conv2D', 'keras.layers.Conv2D', (['(64)', '(4)'], {'strides': '(2, 2)', 'activation': '"""relu"""', 'name': '"""conv-2"""'}), "(64, 4, strides=(2, 2), activation='relu', name='conv-2')\n", (1755, 1812), False, 'from tensorflow import keras\n'), ((1819, 1896), 'tensorflow.keras.layers.Conv2D', 'keras.layers.Conv2D', (['(128)', '(4)'], {'strides': '(2, 2)', 'activation': '"""relu"""', 'name': '"""conv-3"""'}), "(128, 4, strides=(2, 2), activation='relu', name='conv-3')\n", (1838, 1896), False, 'from tensorflow import keras\n'), ((1903, 1980), 'tensorflow.keras.layers.Conv2D', 'keras.layers.Conv2D', (['(256)', '(4)'], {'strides': '(2, 2)', 'activation': '"""relu"""', 'name': '"""conv-4"""'}), "(256, 4, strides=(2, 2), activation='relu', name='conv-4')\n", (1922, 1980), False, 'from tensorflow import keras\n'), ((2095, 2109), 'tensorflow.keras.backend.int_shape', 'K.int_shape', (['x'], {}), '(x)\n', (2106, 2109), True, 'import tensorflow.keras.backend as K\n'), ((2193, 2215), 'tensorflow.keras.layers.Flatten', 'keras.layers.Flatten', ([], {}), '()\n', (2213, 2215), False, 'from tensorflow import keras\n'), ((2224, 2288), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['LATENT_DIM'], {'activation': '"""linear"""', 'name': '"""mean"""'}), "(LATENT_DIM, activation='linear', name='mean')\n", (2242, 2288), False, 'from tensorflow import keras\n'), ((2301, 2369), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['LATENT_DIM'], {'activation': '"""linear"""', 'name': '"""variance"""'}), "(LATENT_DIM, activation='linear', name='variance')\n", (2319, 2369), False, 'from tensorflow import keras\n'), ((2807, 2834), 'tensorflow.keras.layers.Lambda', 'keras.layers.Lambda', (['sample'], {}), '(sample)\n', (2826, 2834), False, 'from tensorflow import keras\n'), ((3097, 3185), 'tensorflow.keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(128)'], {'kernel_size': '(5)', 'strides': '(2, 2)', 'activation': '"""relu"""'}), "(128, kernel_size=5, strides=(2, 2), activation\n ='relu')\n", (3125, 3185), False, 'from tensorflow import keras\n'), ((3187, 3274), 'tensorflow.keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(64)'], {'kernel_size': '(5)', 'strides': '(2, 2)', 'activation': '"""relu"""'}), "(64, kernel_size=5, strides=(2, 2), activation=\n 'relu')\n", (3215, 3274), False, 'from tensorflow import keras\n'), ((3276, 3363), 'tensorflow.keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(32)'], {'kernel_size': '(6)', 'strides': '(2, 2)', 'activation': '"""relu"""'}), "(32, kernel_size=6, strides=(2, 2), activation=\n 'relu')\n", (3304, 3363), False, 'from tensorflow import keras\n'), ((3469, 3531), 'tensorflow.keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(3)'], {'kernel_size': '(6)', 'strides': '(2, 2)'}), '(3, kernel_size=6, strides=(2, 2))\n', (3497, 3531), False, 'from tensorflow import keras\n'), ((3714, 3738), 'tensorflow.math.log', 'tf.math.log', (['(2.0 * np.pi)'], {}), '(2.0 * np.pi)\n', (3725, 3738), True, 'import tensorflow as tf\n'), ((3933, 4010), 'tensorflow.nn.sigmoid_cross_entropy_with_logits', 'tf.nn.sigmoid_cross_entropy_with_logits', ([], {'labels': 'labels', 'logits': 'decoded_logits'}), '(labels=labels, logits=decoded_logits)\n', (3972, 4010), True, 'import tensorflow as tf\n'), ((5697, 5739), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['fnames'], {}), '(fnames)\n', (5731, 5739), True, 'import tensorflow as tf\n'), ((6434, 6463), 'tensorflow.keras.optimizers.Adam', 'keras.optimizers.Adam', (['(0.0001)'], {}), '(0.0001)\n', (6455, 6463), False, 'from tensorflow import keras\n'), ((6664, 6717), 'tensorflow.train.Checkpoint', 'tf.train.Checkpoint', ([], {'optimizer': 'optimizer', 'model': 'model'}), '(optimizer=optimizer, model=model)\n', (6683, 6717), True, 'import tensorflow as tf\n'), ((6917, 6981), 'tensorflow.train.CheckpointManager', 'tf.train.CheckpointManager', (['ckpt', 'checkpoint_path'], {'max_to_keep': '(2)'}), '(ckpt, checkpoint_path, max_to_keep=2)\n', (6943, 6981), True, 'import tensorflow as tf\n'), ((7405, 7411), 'time.time', 'time', ([], {}), '()\n', (7409, 7411), False, 'from time import time\n'), ((522, 536), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (534, 536), False, 'from datetime import datetime\n'), ((2976, 3008), 'numpy.prod', 'np.prod', (['encoder_last_conv_shape'], {}), '(encoder_last_conv_shape)\n', (2983, 3008), True, 'import numpy as np\n'), ((4057, 4097), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['xent_loss'], {'axis': '[1, 2, 3]'}), '(xent_loss, axis=[1, 2, 3])\n', (4070, 4097), True, 'import tensorflow as tf\n'), ((4192, 4233), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['(logpx_z + logpz - logqz_x)'], {}), '(logpx_z + logpz - logqz_x)\n', (4206, 4233), True, 'import tensorflow as tf\n'), ((4865, 4882), 'tensorflow.GradientTape', 'tf.GradientTape', ([], {}), '()\n', (4880, 4882), True, 'import tensorflow as tf\n'), ((6063, 6093), 'os.path.exists', 'os.path.exists', (['output_dirname'], {}), '(output_dirname)\n', (6077, 6093), False, 'import os\n'), ((2754, 2773), 'tensorflow.math.exp', 'tf.math.exp', (['logvar'], {}), '(logvar)\n', (2765, 2773), True, 'import tensorflow as tf\n'), ((2891, 2925), 'tensorflow.keras.backend.int_shape', 'K.int_shape', (['sampled_latent_vector'], {}), '(sampled_latent_vector)\n', (2902, 2925), True, 'import tensorflow.keras.backend as K\n'), ((3055, 3087), 'numpy.prod', 'np.prod', (['encoder_last_conv_shape'], {}), '(encoder_last_conv_shape)\n', (3062, 3087), True, 'import numpy as np\n'), ((4749, 4767), 'tensorflow.sigmoid', 'tf.sigmoid', (['logits'], {}), '(logits)\n', (4759, 4767), True, 'import tensorflow as tf\n'), ((7837, 7843), 'time.time', 'time', ([], {}), '()\n', (7841, 7843), False, 'from time import time\n'), ((2708, 2721), 'tensorflow.keras.backend.shape', 'K.shape', (['mean'], {}), '(mean)\n', (2715, 2721), True, 'import tensorflow.keras.backend as K\n'), ((7629, 7643), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (7641, 7643), False, 'from datetime import datetime\n'), ((3801, 3816), 'tensorflow.exp', 'tf.exp', (['(-logvar)'], {}), '(-logvar)\n', (3807, 3816), True, 'import tensorflow as tf\n'), ((7778, 7784), 'time.time', 'time', ([], {}), '()\n', (7782, 7784), False, 'from time import time\n')] |
harryzcy/canvas-file-syncer | login.py | 16b98ee164df8570605b1a274c02f0dc7403730e | import time
from config import get_password, get_username
from playwright.sync_api import Page
def login(page: Page, url: str, landing_url: str):
raise RuntimeError("default login not supported")
def login_kenan_flagler(page: Page, url: str, landing_url: str) -> None:
page.goto(url)
page.wait_for_load_state('load')
if page.url.startswith(landing_url):
return
with page.expect_navigation():
page.locator("text=ONYEN Login").click()
time.sleep(0.5)
page.locator("input[type=email]").fill(get_username())
with page.expect_navigation():
page.locator("input[type=submit]").click()
time.sleep(1)
page.locator("input[type=password]").fill(get_password())
with page.expect_navigation():
page.click('input[type=submit]')
if page.url.endswith("/login"):
# 2-factor auth
page.locator("div[role=\"button\"]:has-text(\"Text\")").click()
print("Enter code: ", end="")
code = input()
code = code.strip()
page.locator("[aria-label=\"Code\"]").fill(code)
with page.expect_navigation():
page.locator("text=Verify").click()
page.locator("[aria-label=\"Don\\'t\\ show\\ this\\ again\"]").check()
page.locator("text=Yes").click()
time.sleep(0.5)
assert page.url.startswith(landing_url)
| [((480, 495), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (490, 495), False, 'import time\n'), ((646, 659), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (656, 659), False, 'import time\n'), ((1291, 1306), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (1301, 1306), False, 'import time\n'), ((539, 553), 'config.get_username', 'get_username', ([], {}), '()\n', (551, 553), False, 'from config import get_password, get_username\n'), ((706, 720), 'config.get_password', 'get_password', ([], {}), '()\n', (718, 720), False, 'from config import get_password, get_username\n')] |
xa4a/multitidal | multitidal/client_lib.py | 26f757f12464e8f935c0389c6356b97cfaa9f03f | import asyncio
import json
import os
import pty
import shutil
import sys
import tty
import termios
import time
import threading
import tornado.iostream
from tornado.ioloop import IOLoop
from tornado.websocket import websocket_connect
ioloop = tornado.ioloop.IOLoop.instance()
SSH_LOGIN = "root"
SSH_PASSWORD = "algorave"
SCREEN_TO_SCREEN_0_SEQ = b"ls -l\r\x1bOC" + b"\x010" # ^A 0
async def send_stdin_to_ws_task(ws, on_finish_cb):
print("mangling terminal")
try:
fn = os.dup(sys.stdin.fileno())
inp = tornado.iostream.PipeIOStream(fn)
mode = termios.tcgetattr(sys.stdin.fileno())
tty.setraw(fn)
while True:
try:
print("reading stdin", end="\r\n")
content = await inp.read_bytes(100, partial=True)
print("read stdin", end="\r\n")
# content = await self.inp.read_bytes(100, partial=True)
except tornado.iostream.StreamClosedError:
print("Stdin closed", end="\r\n")
# await self.finish()
ioloop.add_callback(on_finish_cb)
break
print(f"stdin: {content}", end="\r\n")
if content[0] == 3 or not content: # CTRL-C
print("Got a ^C", end="\r\n")
ioloop.add_callback(on_finish_cb)
break
ioloop.add_callback(
ws.write_message,
json.dumps(
{
"client_command": "keystrokes",
"keystrokes": [int(x) for x in content],
}
),
)
print("no exc", end="\r\n")
except asyncio.CancelledError:
print("stdin read task cancelled", end="\r\n")
except Exception as e: # pylint: disable=broad-except
print(f"Exception: {e}")
finally:
inp.close()
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, mode)
print("finally")
async def run_ssh(host, port, login=SSH_LOGIN, password=SSH_PASSWORD):
os.environ["SSHPASS"] = password
ssh_cmd = [
"ssh",
"-o",
"PreferredAuthentications=password",
"-o",
"PubkeyAuthentication=no",
"-o",
"StrictHostKeyChecking=no", # Skip fingerpint warning.
f"{login}@{host}",
"-p",
str(port),
]
sshpass_cmd = [shutil.which("sshpass"), "-e"] + ssh_cmd
args = sshpass_cmd
print(" ".join(args))
e = threading.Event()
def stdin_read(fd):
if not e.is_set():
e.set()
return SCREEN_TO_SCREEN_0_SEQ + os.read(fd, 1024)
b = os.read(fd, 1024)
return b
def master_read(fd):
b = os.read(fd, 1024)
return b
# Let Web UI connect to screen 0 first.
time.sleep(3)
res = pty.spawn(args, master_read=master_read, stdin_read=stdin_read)
print(f"ssh returned {res}")
class Client:
mode: str
def __init__(self, url, timeout):
self.url = url
self.timeout = timeout
self.ioloop = IOLoop.instance()
self.ws = None
self.send_stdin_task = None
async def connect(self):
print("trying to connect")
try:
self.ws = await websocket_connect(self.url)
except Exception as e: # pylint: disable=broad-except
print(f"connection error: {str(e)}")
else:
print("connected")
# await self.ws.write_message({'client': self.i})
self.mode = "idle"
self.ioloop.spawn_callback(self.run_idle)
self.ioloop.spawn_callback(self.run)
def finish_ws(self):
if self.ws:
self.ws.close()
self.ws = None
async def finish(self):
if self.send_stdin_task:
await self.stop_idle()
self.finish_ws()
self.ioloop.stop()
async def run_idle(self):
assert not self.send_stdin_task
print("running idle, spawning task")
self.send_stdin_task = asyncio.create_task(
send_stdin_to_ws_task(self.ws, self.finish)
)
async def stop_idle(self):
assert self.send_stdin_task
self.send_stdin_task.cancel()
await self.send_stdin_task
self.send_stdin_task = None
@staticmethod
async def run_ssh(host, port):
# Blocks ioloop
await run_ssh(host, port)
async def run(self):
while True:
msg = await self.ws.read_message()
if msg is None:
print("server left, terminating", end="\r\n")
self.ioloop.add_callback(self.finish)
return
msg = json.loads(msg)
print(f"got msg: {msg}", end="\r\n")
if "mode" not in msg:
continue
if msg["mode"] == "ssh":
host, port = msg["ssh"]["host"], msg["ssh"]["port"]
print(f"Connecting to ssh {host}:{port}...", end="\r\n")
await self.stop_idle()
await self.run_ssh(host, port)
print("restarting idle task")
self.finish_ws()
await self.connect()
break
| [((2501, 2518), 'threading.Event', 'threading.Event', ([], {}), '()\n', (2516, 2518), False, 'import threading\n'), ((2823, 2836), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (2833, 2836), False, 'import time\n'), ((2847, 2910), 'pty.spawn', 'pty.spawn', (['args'], {'master_read': 'master_read', 'stdin_read': 'stdin_read'}), '(args, master_read=master_read, stdin_read=stdin_read)\n', (2856, 2910), False, 'import pty\n'), ((629, 643), 'tty.setraw', 'tty.setraw', (['fn'], {}), '(fn)\n', (639, 643), False, 'import tty\n'), ((1911, 1964), 'termios.tcsetattr', 'termios.tcsetattr', (['sys.stdin', 'termios.TCSADRAIN', 'mode'], {}), '(sys.stdin, termios.TCSADRAIN, mode)\n', (1928, 1964), False, 'import termios\n'), ((2666, 2683), 'os.read', 'os.read', (['fd', '(1024)'], {}), '(fd, 1024)\n', (2673, 2683), False, 'import os\n'), ((2739, 2756), 'os.read', 'os.read', (['fd', '(1024)'], {}), '(fd, 1024)\n', (2746, 2756), False, 'import os\n'), ((3089, 3106), 'tornado.ioloop.IOLoop.instance', 'IOLoop.instance', ([], {}), '()\n', (3104, 3106), False, 'from tornado.ioloop import IOLoop\n'), ((500, 518), 'sys.stdin.fileno', 'sys.stdin.fileno', ([], {}), '()\n', (516, 518), False, 'import sys\n'), ((601, 619), 'sys.stdin.fileno', 'sys.stdin.fileno', ([], {}), '()\n', (617, 619), False, 'import sys\n'), ((2402, 2425), 'shutil.which', 'shutil.which', (['"""sshpass"""'], {}), "('sshpass')\n", (2414, 2425), False, 'import shutil\n'), ((4707, 4722), 'json.loads', 'json.loads', (['msg'], {}), '(msg)\n', (4717, 4722), False, 'import json\n'), ((2635, 2652), 'os.read', 'os.read', (['fd', '(1024)'], {}), '(fd, 1024)\n', (2642, 2652), False, 'import os\n'), ((3273, 3300), 'tornado.websocket.websocket_connect', 'websocket_connect', (['self.url'], {}), '(self.url)\n', (3290, 3300), False, 'from tornado.websocket import websocket_connect\n')] |
olds/ha_gehome | custom_components/ge_home/entities/common/ge_water_heater.py | 5cb24deab64bcade45861da0497a84631845922c | import abc
import logging
from typing import Any, Dict, List, Optional
from homeassistant.components.water_heater import WaterHeaterEntity
from homeassistant.const import (
TEMP_FAHRENHEIT,
TEMP_CELSIUS
)
from gehomesdk import ErdCode, ErdMeasurementUnits
from ...const import DOMAIN
from .ge_erd_entity import GeEntity
_LOGGER = logging.getLogger(__name__)
class GeWaterHeater(GeEntity, WaterHeaterEntity, metaclass=abc.ABCMeta):
"""Mock temperature/operation mode supporting device as a water heater"""
@property
def heater_type(self) -> str:
raise NotImplementedError
@property
def operation_list(self) -> List[str]:
raise NotImplementedError
@property
def unique_id(self) -> str:
return f"{DOMAIN}_{self.serial_or_mac}_{self.heater_type}"
@property
def name(self) -> Optional[str]:
return f"{self.serial_or_mac} {self.heater_type.title()}"
@property
def temperature_unit(self):
measurement_system = self.appliance.get_erd_value(ErdCode.TEMPERATURE_UNIT)
if measurement_system == ErdMeasurementUnits.METRIC:
return TEMP_CELSIUS
return TEMP_FAHRENHEIT
@property
def supported_features(self):
raise NotImplementedError
| [((340, 367), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (357, 367), False, 'import logging\n')] |
zyuchuan/scrapy | scrapy/contracts/default.py | ce24f53957b41877319a5ffc6cf26f0a18baaec2 | import json
from scrapy.item import BaseItem
from scrapy.http import Request
from scrapy.exceptions import ContractFail
from scrapy.contracts import Contract
# contracts
class UrlContract(Contract):
""" Contract to set the url of the request (mandatory)
@url http://scrapy.org
"""
name = 'url'
def adjust_request_args(self, args):
args['url'] = self.args[0]
return args
class CallbackKeywordArgumentsContract(Contract):
""" Contract to set the keyword arguments for the request.
The value should be a JSON-encoded dictionary, e.g.:
@cb_kwargs {"arg1": "some value"}
"""
name = 'cb_kwargs'
def adjust_request_args(self, args):
args['cb_kwargs'] = json.loads(' '.join(self.args))
return args
class ReturnsContract(Contract):
""" Contract to check the output of a callback
general form:
@returns request(s)/item(s) [min=1 [max]]
e.g.:
@returns request
@returns request 2
@returns request 2 10
@returns request 0 10
"""
name = 'returns'
objects = {
'request': Request,
'requests': Request,
'item': (BaseItem, dict),
'items': (BaseItem, dict),
}
def __init__(self, *args, **kwargs):
super(ReturnsContract, self).__init__(*args, **kwargs)
assert len(self.args) in [1, 2, 3]
self.obj_name = self.args[0] or None
self.obj_type = self.objects[self.obj_name]
try:
self.min_bound = int(self.args[1])
except IndexError:
self.min_bound = 1
try:
self.max_bound = int(self.args[2])
except IndexError:
self.max_bound = float('inf')
def post_process(self, output):
occurrences = 0
for x in output:
if isinstance(x, self.obj_type):
occurrences += 1
assertion = (self.min_bound <= occurrences <= self.max_bound)
if not assertion:
if self.min_bound == self.max_bound:
expected = self.min_bound
else:
expected = '%s..%s' % (self.min_bound, self.max_bound)
raise ContractFail("Returned %s %s, expected %s" % \
(occurrences, self.obj_name, expected))
class ScrapesContract(Contract):
""" Contract to check presence of fields in scraped items
@scrapes page_name page_body
"""
name = 'scrapes'
def post_process(self, output):
for x in output:
if isinstance(x, (BaseItem, dict)):
missing = [arg for arg in self.args if arg not in x]
if missing:
raise ContractFail(
"Missing fields: %s" % ", ".join(missing))
| [((2209, 2297), 'scrapy.exceptions.ContractFail', 'ContractFail', (["('Returned %s %s, expected %s' % (occurrences, self.obj_name, expected))"], {}), "('Returned %s %s, expected %s' % (occurrences, self.obj_name,\n expected))\n", (2221, 2297), False, 'from scrapy.exceptions import ContractFail\n')] |
pabloduque0/cnn_deconv_viz | networks/metrics.py | 3fc3d8a9dbad8e8e28d4df4023bdb438e4c9cf85 | from keras import backend as K
import tensorflow as tf
import numpy as np
def custom_dice_coefficient(y_true, y_pred, recall_weight=0.3):
recall_weight = tf.Variable(recall_weight, dtype=tf.float32)
regular_dice = dice_coefficient(y_true, y_pred)
recall = lession_recall(y_true, y_pred)
recall = tf.cast(recall, dtype=tf.float32)
recall_addition = recall * regular_dice * recall_weight
return regular_dice + recall_addition
def lession_recall(y_true, y_pred):
conn_comp_true = tf.contrib.image.connected_components(tf.cast(tf.squeeze(y_true, axis=[-1]), tf.bool))
conn_comp_pred = conn_comp_true * tf.cast(tf.squeeze(y_pred, axis=[-1]), tf.int32)
n_conn_comp_true, _ = tf.unique(K.flatten(conn_comp_true))
n_conn_comp_pred, _ = tf.unique(K.flatten(conn_comp_pred))
n_conn_comp_true = tf.size(input=n_conn_comp_true) - 1
n_conn_comp_pred = tf.size(input=n_conn_comp_pred) - 1
recall = tf.cond(pred=tf.equal(n_conn_comp_pred, tf.Variable(0)),
true_fn=lambda: tf.Variable(1.0, dtype=tf.float64), false_fn=lambda: n_conn_comp_pred / n_conn_comp_true)
return recall
def thresholded_dice(y_true, y_pred):
y_true = tf.math.floor(y_true + 0.6)
return dice_coefficient(y_true, y_pred)
def thresholded_dice_loss(y_true, y_pred):
return -thresholded_dice(y_true, y_pred)
def custom_dice_coefficient_loss(y_true, y_pred):
return -custom_dice_coefficient(y_true, y_pred)
def dice_coefficient(y_true, y_pred, smooth=0.1):
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_pred_f * y_true_f)
return (2. * intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth)
def dice_coefficient_loss(y_true, y_pred):
return -dice_coefficient(y_true, y_pred)
def sigmoid(x):
return 1. / (1. + K.exp(-x))
def segmentation_recall(y_true, y_pred):
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
recall = K.sum(y_pred_f * y_true_f) / tf.cast(K.sum(y_true_f), tf.float32)
return recall
def weighted_crossentropy_pixelwise(y_true, y_pred):
y_pred = tf.clip_by_value(y_pred, 1e-7, 1 - 1e-7)
y_pred = K.log(y_pred / (1 - y_pred))
wmh_indexes = np.where(y_true == 1.0)
weights = np.repeat(1.0, 240 * 240)
weights = np.reshape(weights, (1, 240, 240, 1))
weights[wmh_indexes] = 5000.0
crossentropy = (y_true * weights * -K.log(sigmoid(y_pred)) + (1 - y_true * weights) * -K.log(1 - sigmoid(y_pred)))
return crossentropy
def prediction_count(y_true, y_pred):
return tf.math.count_nonzero(y_pred)
def label_count(y_true, y_pred):
return tf.math.count_nonzero(y_true)
def prediction_sum(y_true, y_pred):
return tf.reduce_sum(input_tensor=y_pred)
def label_sum(y_true, y_pred):
return tf.reduce_sum(input_tensor=y_true)
custom_dice_coef = custom_dice_coefficient
custom_dice_loss = custom_dice_coefficient_loss
dice_coef = dice_coefficient
dice_coef_loss = dice_coefficient_loss
weighted_crossentropy = weighted_crossentropy_pixelwise
predicted_count = prediction_count
predicted_sum = prediction_sum
ground_truth_count = label_count
ground_truth_sum = label_sum
pixel_recall = segmentation_recall
obj_recall = lession_recall | [((160, 204), 'tensorflow.Variable', 'tf.Variable', (['recall_weight'], {'dtype': 'tf.float32'}), '(recall_weight, dtype=tf.float32)\n', (171, 204), True, 'import tensorflow as tf\n'), ((314, 347), 'tensorflow.cast', 'tf.cast', (['recall'], {'dtype': 'tf.float32'}), '(recall, dtype=tf.float32)\n', (321, 347), True, 'import tensorflow as tf\n'), ((1197, 1224), 'tensorflow.math.floor', 'tf.math.floor', (['(y_true + 0.6)'], {}), '(y_true + 0.6)\n', (1210, 1224), True, 'import tensorflow as tf\n'), ((1528, 1545), 'keras.backend.flatten', 'K.flatten', (['y_true'], {}), '(y_true)\n', (1537, 1545), True, 'from keras import backend as K\n'), ((1561, 1578), 'keras.backend.flatten', 'K.flatten', (['y_pred'], {}), '(y_pred)\n', (1570, 1578), True, 'from keras import backend as K\n'), ((1599, 1625), 'keras.backend.sum', 'K.sum', (['(y_pred_f * y_true_f)'], {}), '(y_pred_f * y_true_f)\n', (1604, 1625), True, 'from keras import backend as K\n'), ((1910, 1927), 'keras.backend.flatten', 'K.flatten', (['y_true'], {}), '(y_true)\n', (1919, 1927), True, 'from keras import backend as K\n'), ((1943, 1960), 'keras.backend.flatten', 'K.flatten', (['y_pred'], {}), '(y_pred)\n', (1952, 1960), True, 'from keras import backend as K\n'), ((2128, 2170), 'tensorflow.clip_by_value', 'tf.clip_by_value', (['y_pred', '(1e-07)', '(1 - 1e-07)'], {}), '(y_pred, 1e-07, 1 - 1e-07)\n', (2144, 2170), True, 'import tensorflow as tf\n'), ((2182, 2210), 'keras.backend.log', 'K.log', (['(y_pred / (1 - y_pred))'], {}), '(y_pred / (1 - y_pred))\n', (2187, 2210), True, 'from keras import backend as K\n'), ((2230, 2253), 'numpy.where', 'np.where', (['(y_true == 1.0)'], {}), '(y_true == 1.0)\n', (2238, 2253), True, 'import numpy as np\n'), ((2268, 2293), 'numpy.repeat', 'np.repeat', (['(1.0)', '(240 * 240)'], {}), '(1.0, 240 * 240)\n', (2277, 2293), True, 'import numpy as np\n'), ((2308, 2345), 'numpy.reshape', 'np.reshape', (['weights', '(1, 240, 240, 1)'], {}), '(weights, (1, 240, 240, 1))\n', (2318, 2345), True, 'import numpy as np\n'), ((2575, 2604), 'tensorflow.math.count_nonzero', 'tf.math.count_nonzero', (['y_pred'], {}), '(y_pred)\n', (2596, 2604), True, 'import tensorflow as tf\n'), ((2651, 2680), 'tensorflow.math.count_nonzero', 'tf.math.count_nonzero', (['y_true'], {}), '(y_true)\n', (2672, 2680), True, 'import tensorflow as tf\n'), ((2730, 2764), 'tensorflow.reduce_sum', 'tf.reduce_sum', ([], {'input_tensor': 'y_pred'}), '(input_tensor=y_pred)\n', (2743, 2764), True, 'import tensorflow as tf\n'), ((2809, 2843), 'tensorflow.reduce_sum', 'tf.reduce_sum', ([], {'input_tensor': 'y_true'}), '(input_tensor=y_true)\n', (2822, 2843), True, 'import tensorflow as tf\n'), ((720, 745), 'keras.backend.flatten', 'K.flatten', (['conn_comp_true'], {}), '(conn_comp_true)\n', (729, 745), True, 'from keras import backend as K\n'), ((783, 808), 'keras.backend.flatten', 'K.flatten', (['conn_comp_pred'], {}), '(conn_comp_pred)\n', (792, 808), True, 'from keras import backend as K\n'), ((833, 864), 'tensorflow.size', 'tf.size', ([], {'input': 'n_conn_comp_true'}), '(input=n_conn_comp_true)\n', (840, 864), True, 'import tensorflow as tf\n'), ((892, 923), 'tensorflow.size', 'tf.size', ([], {'input': 'n_conn_comp_pred'}), '(input=n_conn_comp_pred)\n', (899, 923), True, 'import tensorflow as tf\n'), ((1975, 2001), 'keras.backend.sum', 'K.sum', (['(y_pred_f * y_true_f)'], {}), '(y_pred_f * y_true_f)\n', (1980, 2001), True, 'from keras import backend as K\n'), ((555, 584), 'tensorflow.squeeze', 'tf.squeeze', (['y_true'], {'axis': '[-1]'}), '(y_true, axis=[-1])\n', (565, 584), True, 'import tensorflow as tf\n'), ((642, 671), 'tensorflow.squeeze', 'tf.squeeze', (['y_pred'], {'axis': '[-1]'}), '(y_pred, axis=[-1])\n', (652, 671), True, 'import tensorflow as tf\n'), ((1842, 1851), 'keras.backend.exp', 'K.exp', (['(-x)'], {}), '(-x)\n', (1847, 1851), True, 'from keras import backend as K\n'), ((2012, 2027), 'keras.backend.sum', 'K.sum', (['y_true_f'], {}), '(y_true_f)\n', (2017, 2027), True, 'from keras import backend as K\n'), ((982, 996), 'tensorflow.Variable', 'tf.Variable', (['(0)'], {}), '(0)\n', (993, 996), True, 'import tensorflow as tf\n'), ((1036, 1070), 'tensorflow.Variable', 'tf.Variable', (['(1.0)'], {'dtype': 'tf.float64'}), '(1.0, dtype=tf.float64)\n', (1047, 1070), True, 'import tensorflow as tf\n'), ((1669, 1684), 'keras.backend.sum', 'K.sum', (['y_true_f'], {}), '(y_true_f)\n', (1674, 1684), True, 'from keras import backend as K\n'), ((1687, 1702), 'keras.backend.sum', 'K.sum', (['y_pred_f'], {}), '(y_pred_f)\n', (1692, 1702), True, 'from keras import backend as K\n')] |
kk0501/spider | scrapy_template/scrapy_template/pipelines.py | 404540a76922885f9dd12f9a513f5ec88b0d2072 | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
from scrapy.exceptions import DropItem
from hashlib import md5
from scrapy import log
from twisted.enterprise import adbapi
from scrapy_template.items import ScrapyTemplateItem
class ScrapyTemplatePipeline(object):
def __init__(self, dbpool):
self.urls_seen = set()
self.dbpool = dbpool
@classmethod
def from_settings(cls, settings):
dbargs = dict(
host=settings['MYSQL_HOST'],
db=settings['MYSQL_DBNAME'],
user=settings['MYSQL_USER'],
passwd=settings['MYSQL_PASSWD'],
charset='utf8',
use_unicode=True,
)
dbpool = adbapi.ConnectionPool('MySQLdb', **dbargs)
return cls(dbpool)
def process_item(self, item, spider):
if isinstance(item, ScrapyTemplateItem):
if item['url'] in self.urls_seen:
raise DropItem("Duplicate item found: %s" % item['url'])
else:
self.urls_seen.add(item['url'])
d = self.dbpool.runInteraction(self._do_upsert, item, spider)
d.addErrback(self._handle_error, item, spider)
d.addBoth(lambda _: item)
return d
else:
return item
def _do_upsert(self, conn, item, spider):
guid = self._get_id(item)
conn.execute("""SELECT EXISTS(
SELECT 1 FROM example WHERE guid = %s
)""", (guid, ))
ret = conn.fetchone()[0]
if not ret:
conn.execute("""
INSERT INTO example (category, name, color, images, price, url, guid)
VALUES (%s, %s, %s, %s, %s, %s, %s)
""", (item['category'], item['name'], item['color'],
item['images'], item['price'], item['url'], guid))
spider.log("Item stored in db: %s %r" % (guid, item))
def _handle_error(self, failure, item, spider):
log.err(failure)
def _get_id(self, item):
return md5(item['url']).hexdigest() | [((834, 876), 'twisted.enterprise.adbapi.ConnectionPool', 'adbapi.ConnectionPool', (['"""MySQLdb"""'], {}), "('MySQLdb', **dbargs)\n", (855, 876), False, 'from twisted.enterprise import adbapi\n'), ((2103, 2119), 'scrapy.log.err', 'log.err', (['failure'], {}), '(failure)\n', (2110, 2119), False, 'from scrapy import log\n'), ((1064, 1114), 'scrapy.exceptions.DropItem', 'DropItem', (["('Duplicate item found: %s' % item['url'])"], {}), "('Duplicate item found: %s' % item['url'])\n", (1072, 1114), False, 'from scrapy.exceptions import DropItem\n'), ((2165, 2181), 'hashlib.md5', 'md5', (["item['url']"], {}), "(item['url'])\n", (2168, 2181), False, 'from hashlib import md5\n')] |
willferreira/multilabel-stance-detection | run_training_size_bootstrap.py | ddc0ed9caa26b63f40e89a377f1738e83fcb7724 | import click
import pickle
import numpy as np
from collections import defaultdict
from utils import reset_seeds, get_dataset, load_embeddings
from mlp_multilabel_wrapper import PowersetKerasWrapper, MultiOutputKerasWrapper
from mlp_utils import CrossLabelDependencyLoss
def get_random_sample(dataset_name='bbc', train_frac=0.25):
# get model runner specific dataset
_, _, y_train, y_test = get_dataset(dataset_name)
X_train, X_test = load_embeddings(dataset_name)
grps = y_train.apply(lambda v: ''.join(map(str, v)), axis=1).to_frame(0).groupby(0)[0]
train_idx = grps.apply(lambda g: g.sample(frac=train_frac)).index.get_level_values(1)
X_train_sample = X_train.loc[train_idx, :]
y_train_sample = y_train.loc[train_idx, :]
return X_train_sample, X_test, y_train_sample, y_test
def _get_label_set(y):
return set(y.apply(lambda v: ''.join(map(str, v)), axis=1).values)
@click.command()
@click.option('--n-samples', default=10)
@click.option('--dataset-name', default='moral-dataset-MeToo')
def run(n_samples, dataset_name):
mlp_cld_bootstrap_results = defaultdict(lambda: defaultdict(list))
mlp_powerset_bootstrap_results = defaultdict(lambda: defaultdict(list))
mlp_labels_bootstrap_results = defaultdict(lambda: defaultdict(list))
reset_seeds()
for i in range(n_samples):
print('Running bootstrap sample: {}'.format(i + 1))
for f in np.arange(0.1, 1.1, 0.1):
X_train, X_test, y_train, y_test = get_random_sample(dataset_name, train_frac=f)
print('Training set size: {}'.format(X_train.shape))
print('Test set size: {}'.format(X_test.shape))
mlp_powerset_model = PowersetKerasWrapper(columns=y_train.columns)
mlp_powerset_model.fit(X_train.values, y_train.values)
y_pred_mlp = mlp_powerset_model.predict(X_test.values)
mlp_powerset_bootstrap_results[i][f].append(y_pred_mlp)
cld_loss = CrossLabelDependencyLoss(alpha=0.2)
mlp_cld_model = MultiOutputKerasWrapper(columns=y_train.columns, loss=cld_loss)
mlp_cld_model.fit(X_train.values, y_train.values)
y_pred_cld = mlp_cld_model.predict(X_test.values)
mlp_cld_bootstrap_results[i][f].append(y_pred_cld)
mlp_labels_bootstrap_results[i][f].append((_get_label_set(y_train), _get_label_set(y_test)))
with open('training_size_bootstrap_{}.pkl'.format(dataset_name), 'wb') as f:
pickle.dump({'cld': dict(mlp_cld_bootstrap_results),
'powerset': dict(mlp_powerset_bootstrap_results),
'labels': dict(mlp_labels_bootstrap_results)}, f)
if __name__ == '__main__':
run()
| [((912, 927), 'click.command', 'click.command', ([], {}), '()\n', (925, 927), False, 'import click\n'), ((929, 968), 'click.option', 'click.option', (['"""--n-samples"""'], {'default': '(10)'}), "('--n-samples', default=10)\n", (941, 968), False, 'import click\n'), ((970, 1031), 'click.option', 'click.option', (['"""--dataset-name"""'], {'default': '"""moral-dataset-MeToo"""'}), "('--dataset-name', default='moral-dataset-MeToo')\n", (982, 1031), False, 'import click\n'), ((400, 425), 'utils.get_dataset', 'get_dataset', (['dataset_name'], {}), '(dataset_name)\n', (411, 425), False, 'from utils import reset_seeds, get_dataset, load_embeddings\n'), ((448, 477), 'utils.load_embeddings', 'load_embeddings', (['dataset_name'], {}), '(dataset_name)\n', (463, 477), False, 'from utils import reset_seeds, get_dataset, load_embeddings\n'), ((1292, 1305), 'utils.reset_seeds', 'reset_seeds', ([], {}), '()\n', (1303, 1305), False, 'from utils import reset_seeds, get_dataset, load_embeddings\n'), ((1414, 1438), 'numpy.arange', 'np.arange', (['(0.1)', '(1.1)', '(0.1)'], {}), '(0.1, 1.1, 0.1)\n', (1423, 1438), True, 'import numpy as np\n'), ((1118, 1135), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (1129, 1135), False, 'from collections import defaultdict\n'), ((1194, 1211), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (1205, 1211), False, 'from collections import defaultdict\n'), ((1268, 1285), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (1279, 1285), False, 'from collections import defaultdict\n'), ((1693, 1738), 'mlp_multilabel_wrapper.PowersetKerasWrapper', 'PowersetKerasWrapper', ([], {'columns': 'y_train.columns'}), '(columns=y_train.columns)\n', (1713, 1738), False, 'from mlp_multilabel_wrapper import PowersetKerasWrapper, MultiOutputKerasWrapper\n'), ((1965, 2000), 'mlp_utils.CrossLabelDependencyLoss', 'CrossLabelDependencyLoss', ([], {'alpha': '(0.2)'}), '(alpha=0.2)\n', (1989, 2000), False, 'from mlp_utils import CrossLabelDependencyLoss\n'), ((2029, 2092), 'mlp_multilabel_wrapper.MultiOutputKerasWrapper', 'MultiOutputKerasWrapper', ([], {'columns': 'y_train.columns', 'loss': 'cld_loss'}), '(columns=y_train.columns, loss=cld_loss)\n', (2052, 2092), False, 'from mlp_multilabel_wrapper import PowersetKerasWrapper, MultiOutputKerasWrapper\n')] |
Shuailong/CCGSupertagging | code/evaluate.py | 891a6a477a4a05daeb847d4a4c33a1bc929d97b2 | #!/usr/bin/env python
# encoding: utf-8
"""
evaluate.py
Created by Shuailong on 2016-12-2.
Evaluate model accuracy on test set.
"""
from __future__ import print_function
from time import time
from keras.models import load_model
import os
from utils import true_accuracy
from dataset import get_data
from train import MODEL_FILE, MODEL_DIR
from train import data_generator
def main():
start_time = time()
print('\nGetting data...')
data = get_data(force=False)
X_test = data['X_test']
X_test_feats = data['X_test_feats']
y_test = data['y_test']
tag_size = len(data['tag_index'])
print('\nLoading models...')
model = load_model(os.path.join(MODEL_DIR, MODEL_FILE), custom_objects={'true_accuracy': true_accuracy})
print('\nEvaluating...')
_, true_acc = model.evaluate_generator(data_generator(X_test, X_test_feats, y_test, tag_size),
val_samples=len(X_test))
print('Test accuracy: {}.'.format(true_acc))
seconds = time() - start_time
minutes = seconds / 60
print('[Finished in {} seconds ({} minutes)]'.format(str(round(seconds, 1)),
str(round(minutes, 1))))
if __name__ == '__main__':
main()
| [((410, 416), 'time.time', 'time', ([], {}), '()\n', (414, 416), False, 'from time import time\n'), ((459, 480), 'dataset.get_data', 'get_data', ([], {'force': '(False)'}), '(force=False)\n', (467, 480), False, 'from dataset import get_data\n'), ((672, 707), 'os.path.join', 'os.path.join', (['MODEL_DIR', 'MODEL_FILE'], {}), '(MODEL_DIR, MODEL_FILE)\n', (684, 707), False, 'import os\n'), ((831, 885), 'train.data_generator', 'data_generator', (['X_test', 'X_test_feats', 'y_test', 'tag_size'], {}), '(X_test, X_test_feats, y_test, tag_size)\n', (845, 885), False, 'from train import data_generator\n'), ((1019, 1025), 'time.time', 'time', ([], {}), '()\n', (1023, 1025), False, 'from time import time\n')] |
Cloudlock/bravado | setup.py | bacf49ea9d791ec9f564a3a141c77995d2f395b0 | #!/usr/bin/env python
# Copyright (c) 2013, Digium, Inc.
# Copyright (c) 2014-2016, Yelp, Inc.
import os
from setuptools import setup
import bravado
setup(
name="bravado",
# cloudlock version, no twisted dependency
version=bravado.version + "cl",
license="BSD 3-Clause License",
description="Library for accessing Swagger-enabled API's",
long_description=open(os.path.join(os.path.dirname(__file__),
"README.rst")).read(),
author="Digium, Inc. and Yelp, Inc.",
author_email="opensource+bravado@yelp.com",
url="https://github.com/Yelp/bravado",
packages=["bravado"],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
],
install_requires=[
"bravado-core >= 4.2.2",
"yelp_bytes",
"python-dateutil",
"pyyaml",
"requests",
"six",
],
extras_require={
},
)
| [((402, 427), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (417, 427), False, 'import os\n')] |
devdattakulkarni/test-solum | solum/api/controllers/v1/assembly.py | 4e9ddb82d217116aa2c30a6f2581080cbdfae325 | # Copyright 2013 - Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
from pecan import rest
import wsme
import wsmeext.pecan as wsme_pecan
from solum.api.controllers.v1.datamodel import assembly
import solum.api.controllers.v1.userlog as userlog_controller
from solum.api.handlers import assembly_handler
from solum.common import exception
from solum.common import request
from solum import objects
from solum.openstack.common.gettextutils import _
class AssemblyController(rest.RestController):
"""Manages operations on a single assembly."""
def __init__(self, assembly_id):
super(AssemblyController, self).__init__()
self._id = assembly_id
@pecan.expose()
def _lookup(self, primary_key, *remainder):
if remainder and not remainder[-1]:
remainder = remainder[:-1]
if primary_key == 'logs':
logs = userlog_controller.UserlogsController(self._id)
return logs, remainder
@exception.wrap_wsme_pecan_controller_exception
@wsme_pecan.wsexpose(assembly.Assembly)
def get(self):
"""Return this assembly."""
request.check_request_for_https()
handler = assembly_handler.AssemblyHandler(
pecan.request.security_context)
return assembly.Assembly.from_db_model(handler.get(self._id),
pecan.request.host_url)
@exception.wrap_wsme_pecan_controller_exception
@wsme_pecan.wsexpose(assembly.Assembly, body=assembly.Assembly)
def put(self, data):
"""Modify this assembly."""
handler = assembly_handler.AssemblyHandler(
pecan.request.security_context)
res = handler.update(self._id,
data.as_dict(objects.registry.Assembly))
return assembly.Assembly.from_db_model(res, pecan.request.host_url)
@exception.wrap_wsme_pecan_controller_exception
@wsme_pecan.wsexpose(status_code=204)
def delete(self):
"""Delete this assembly."""
handler = assembly_handler.AssemblyHandler(
pecan.request.security_context)
return handler.delete(self._id)
class AssembliesController(rest.RestController):
"""Manages operations on the assemblies collection."""
@pecan.expose()
def _lookup(self, assembly_id, *remainder):
if remainder and not remainder[-1]:
remainder = remainder[:-1]
return AssemblyController(assembly_id), remainder
@exception.wrap_wsme_pecan_controller_exception
@wsme_pecan.wsexpose(assembly.Assembly, body=assembly.Assembly,
status_code=201)
def post(self, data):
"""Create a new assembly."""
js_data = data.as_dict(objects.registry.Assembly)
if data.plan_uri is not wsme.Unset:
plan_uri = data.plan_uri
if plan_uri.startswith(pecan.request.host_url):
pl_uuid = plan_uri.split('/')[-1]
pl = objects.registry.Plan.get_by_uuid(
pecan.request.security_context, pl_uuid)
js_data['plan_id'] = pl.id
else:
# TODO(asalkeld) we are not hosting the plan so
# download the plan and insert it into our db.
raise exception.BadRequest(reason=_(
'The plan was not hosted in solum'))
if js_data.get('plan_id') is None:
raise exception.BadRequest(reason=_(
'The plan was not given or could not be found'))
handler = assembly_handler.AssemblyHandler(
pecan.request.security_context)
return assembly.Assembly.from_db_model(
handler.create(js_data), pecan.request.host_url)
@exception.wrap_wsme_pecan_controller_exception
@wsme_pecan.wsexpose([assembly.Assembly])
def get_all(self):
"""Return all assemblies, based on the query provided."""
request.check_request_for_https()
handler = assembly_handler.AssemblyHandler(
pecan.request.security_context)
return [assembly.Assembly.from_db_model(assm, pecan.request.host_url)
for assm in handler.get_all()]
| [((1201, 1215), 'pecan.expose', 'pecan.expose', ([], {}), '()\n', (1213, 1215), False, 'import pecan\n'), ((1541, 1579), 'wsmeext.pecan.wsexpose', 'wsme_pecan.wsexpose', (['assembly.Assembly'], {}), '(assembly.Assembly)\n', (1560, 1579), True, 'import wsmeext.pecan as wsme_pecan\n'), ((1972, 2034), 'wsmeext.pecan.wsexpose', 'wsme_pecan.wsexpose', (['assembly.Assembly'], {'body': 'assembly.Assembly'}), '(assembly.Assembly, body=assembly.Assembly)\n', (1991, 2034), True, 'import wsmeext.pecan as wsme_pecan\n'), ((2435, 2471), 'wsmeext.pecan.wsexpose', 'wsme_pecan.wsexpose', ([], {'status_code': '(204)'}), '(status_code=204)\n', (2454, 2471), True, 'import wsmeext.pecan as wsme_pecan\n'), ((2782, 2796), 'pecan.expose', 'pecan.expose', ([], {}), '()\n', (2794, 2796), False, 'import pecan\n'), ((3044, 3123), 'wsmeext.pecan.wsexpose', 'wsme_pecan.wsexpose', (['assembly.Assembly'], {'body': 'assembly.Assembly', 'status_code': '(201)'}), '(assembly.Assembly, body=assembly.Assembly, status_code=201)\n', (3063, 3123), True, 'import wsmeext.pecan as wsme_pecan\n'), ((4298, 4338), 'wsmeext.pecan.wsexpose', 'wsme_pecan.wsexpose', (['[assembly.Assembly]'], {}), '([assembly.Assembly])\n', (4317, 4338), True, 'import wsmeext.pecan as wsme_pecan\n'), ((1643, 1676), 'solum.common.request.check_request_for_https', 'request.check_request_for_https', ([], {}), '()\n', (1674, 1676), False, 'from solum.common import request\n'), ((1695, 1759), 'solum.api.handlers.assembly_handler.AssemblyHandler', 'assembly_handler.AssemblyHandler', (['pecan.request.security_context'], {}), '(pecan.request.security_context)\n', (1727, 1759), False, 'from solum.api.handlers import assembly_handler\n'), ((2114, 2178), 'solum.api.handlers.assembly_handler.AssemblyHandler', 'assembly_handler.AssemblyHandler', (['pecan.request.security_context'], {}), '(pecan.request.security_context)\n', (2146, 2178), False, 'from solum.api.handlers import assembly_handler\n'), ((2316, 2376), 'solum.api.controllers.v1.datamodel.assembly.Assembly.from_db_model', 'assembly.Assembly.from_db_model', (['res', 'pecan.request.host_url'], {}), '(res, pecan.request.host_url)\n', (2347, 2376), False, 'from solum.api.controllers.v1.datamodel import assembly\n'), ((2548, 2612), 'solum.api.handlers.assembly_handler.AssemblyHandler', 'assembly_handler.AssemblyHandler', (['pecan.request.security_context'], {}), '(pecan.request.security_context)\n', (2580, 2612), False, 'from solum.api.handlers import assembly_handler\n'), ((4053, 4117), 'solum.api.handlers.assembly_handler.AssemblyHandler', 'assembly_handler.AssemblyHandler', (['pecan.request.security_context'], {}), '(pecan.request.security_context)\n', (4085, 4117), False, 'from solum.api.handlers import assembly_handler\n'), ((4436, 4469), 'solum.common.request.check_request_for_https', 'request.check_request_for_https', ([], {}), '()\n', (4467, 4469), False, 'from solum.common import request\n'), ((4488, 4552), 'solum.api.handlers.assembly_handler.AssemblyHandler', 'assembly_handler.AssemblyHandler', (['pecan.request.security_context'], {}), '(pecan.request.security_context)\n', (4520, 4552), False, 'from solum.api.handlers import assembly_handler\n'), ((1400, 1447), 'solum.api.controllers.v1.userlog.UserlogsController', 'userlog_controller.UserlogsController', (['self._id'], {}), '(self._id)\n', (1437, 1447), True, 'import solum.api.controllers.v1.userlog as userlog_controller\n'), ((4582, 4643), 'solum.api.controllers.v1.datamodel.assembly.Assembly.from_db_model', 'assembly.Assembly.from_db_model', (['assm', 'pecan.request.host_url'], {}), '(assm, pecan.request.host_url)\n', (4613, 4643), False, 'from solum.api.controllers.v1.datamodel import assembly\n'), ((3482, 3556), 'solum.objects.registry.Plan.get_by_uuid', 'objects.registry.Plan.get_by_uuid', (['pecan.request.security_context', 'pl_uuid'], {}), '(pecan.request.security_context, pl_uuid)\n', (3515, 3556), False, 'from solum import objects\n'), ((3966, 4015), 'solum.openstack.common.gettextutils._', '_', (['"""The plan was not given or could not be found"""'], {}), "('The plan was not given or could not be found')\n", (3967, 4015), False, 'from solum.openstack.common.gettextutils import _\n'), ((3816, 3853), 'solum.openstack.common.gettextutils._', '_', (['"""The plan was not hosted in solum"""'], {}), "('The plan was not hosted in solum')\n", (3817, 3853), False, 'from solum.openstack.common.gettextutils import _\n')] |
mjain2/ottertune | server/website/website/migrations/0003_background_task_optimization.py | 011e896bf89df831fb1189b1ab4c9a7d7dca420a | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2018-08-02 07:58
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('website', '0002_enable_compression'),
]
operations = [
migrations.AddField(
model_name='workload',
name='status',
field=models.IntegerField(choices=[(1, 'MODIFIED'), (2, 'PROCESSING'), (3, 'PROCESSED')], default=1, editable=False),
)
]
| [((434, 548), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(1, 'MODIFIED'), (2, 'PROCESSING'), (3, 'PROCESSED')]", 'default': '(1)', 'editable': '(False)'}), "(choices=[(1, 'MODIFIED'), (2, 'PROCESSING'), (3,\n 'PROCESSED')], default=1, editable=False)\n", (453, 548), False, 'from django.db import migrations, models\n')] |
bobbyluig/6.A01 | src/agility/usc/settings.py | 16dd8963951eca4a1312a15c216d0cc3c117d063 | from agility.usc.enumeration import uscSerialMode, ChannelMode, HomeMode
from agility.usc.reader import BytecodeReader
class UscSettings:
def __init__(self):
self.servosAvailable = 6
self.servoPeriod = 156
self.miniMaestroServoPeriod = 80000
self.servoMultiplier = 1
self.serialMode = uscSerialMode.SERIAL_MODE_UART_DETECT_BAUD_RATE
self.fixedBaudRate = 9600
self.enableCrc = False
self.neverSuspend = False
self.serialDeviceNumber = 12
self.miniSscOffset = 0
self.serialTimeout = 0
self.scriptDone = True
self.channelSettings = []
self.enablePullups = True
self.scriptInconsistent = False
self.script = None
self.bytecodeProgram = None
def __len__(self):
return len(self.channelSettings)
def setAndCompileScript(self, script):
self.script = None
reader = BytecodeReader()
self.bytecodeProgram = reader.read(script, len(self) != 6)
self.script = script
class ChannelSetting:
def __init__(self):
self.name = ''
self.mode = ChannelMode.Servo
self.homeMode = HomeMode.Off
self.home = 6000
self.minimum = 3968
self.maximum = 8000
self.neutral = 6000
self.range = 1905
self.speed = 0
self.acceleration = 0
| [((932, 948), 'agility.usc.reader.BytecodeReader', 'BytecodeReader', ([], {}), '()\n', (946, 948), False, 'from agility.usc.reader import BytecodeReader\n')] |
jtom38/invmon-api | invmonInfra/domain/__init__.py | 28f163bef47ee5c95bac0f40198e25e44090758f | from .dbApiInterface import DbApiTableInterface
from .dbApiTableInterface import DbApiTableInterface
from .cacheInterface import CacheInterface
from .loggerInterface import LoggerInterface
from .envReaderInterface import EnvReaderInterface
from .driverInterface import DriverInterface
from .jobsInterface import JobsInterface
from .jobsInventoryInterface import JobsInventoryInterface
from .emailInterface import EmailInterface
from .sqlTableInterface import SqlTableInterface | [] |
ameenetemady/DeepPep | app/app8_18mix/h_noSeqSearch.py | 121826309667f1290fa1121746a2992943d0927b | import sys
import csv
import os
sys.path.append('../../')
import h_lib
import h_lib_noSeqSearch
in_strFastaFilename = '{!s}/data/protein/18mix/18mix_db_plus_contaminants_20081209.fasta'.format(os.environ.get('HOME'))
in_strPeptideFilename = '{!s}/data/protein/18mix/18_mixtures_peptide_identification.txt'.format(os.environ.get('HOME'))
out_strOutputBaseDir = './sparseData_h'
out_strFile = out_strOutputBaseDir + "/h_noSeqSearch.csv"
YInfo = h_lib.getPeptides(in_strPeptideFilename, "\t", 0, 2)
###assuming proteins are already broken to individual files under in_strProtRefsDir
#XMatchProb = h_lib.getYInfo(YInfo, in_strProtRefsDir, strXMatchProb_filename, True)
XMatchProb = h_lib_noSeqSearch.getXInfo(YInfo, in_strPeptideFilename, "\t", 0, 1)
YMatchProbCount = h_lib.getPeptideProteinMatches(YInfo, XMatchProb)
h_lib.updateXMatchingProbabilities(XMatchProb, YMatchProbCount)
XPred = h_lib.getAccumulatedXMatchingProbabilities(XMatchProb)
XPred.sort()
with open(out_strFile, "w") as bfFile:
for row in XPred:
bfFile.write('{!s},{:.6f}\n'.format(row[0], row[1]))
print("result saved in:" + out_strFile)
| [((33, 58), 'sys.path.append', 'sys.path.append', (['"""../../"""'], {}), "('../../')\n", (48, 58), False, 'import sys\n'), ((447, 499), 'h_lib.getPeptides', 'h_lib.getPeptides', (['in_strPeptideFilename', '"""\t"""', '(0)', '(2)'], {}), "(in_strPeptideFilename, '\\t', 0, 2)\n", (464, 499), False, 'import h_lib\n'), ((682, 750), 'h_lib_noSeqSearch.getXInfo', 'h_lib_noSeqSearch.getXInfo', (['YInfo', 'in_strPeptideFilename', '"""\t"""', '(0)', '(1)'], {}), "(YInfo, in_strPeptideFilename, '\\t', 0, 1)\n", (708, 750), False, 'import h_lib_noSeqSearch\n'), ((769, 818), 'h_lib.getPeptideProteinMatches', 'h_lib.getPeptideProteinMatches', (['YInfo', 'XMatchProb'], {}), '(YInfo, XMatchProb)\n', (799, 818), False, 'import h_lib\n'), ((819, 882), 'h_lib.updateXMatchingProbabilities', 'h_lib.updateXMatchingProbabilities', (['XMatchProb', 'YMatchProbCount'], {}), '(XMatchProb, YMatchProbCount)\n', (853, 882), False, 'import h_lib\n'), ((891, 945), 'h_lib.getAccumulatedXMatchingProbabilities', 'h_lib.getAccumulatedXMatchingProbabilities', (['XMatchProb'], {}), '(XMatchProb)\n', (933, 945), False, 'import h_lib\n'), ((195, 217), 'os.environ.get', 'os.environ.get', (['"""HOME"""'], {}), "('HOME')\n", (209, 217), False, 'import os\n'), ((315, 337), 'os.environ.get', 'os.environ.get', (['"""HOME"""'], {}), "('HOME')\n", (329, 337), False, 'import os\n')] |
ashokn414/python_floating_conversions | normalizer.py | 7a132c703272e6651daf555816171f04ee5b5555 | # for normalization we need to have the maxima of x and y values with the help of which
# we can normalise the given values
import csv
filename = "values.csv"
fields = []
rows = []
with open(filename,'r') as csvfile:
reader = csv.reader(csvfile)
fields = next(reader)
for row in reader:
rows.append(row)
for row in rows:
for col in row:
a = col[0]
norm=50
#a = float(input("enter the x cordinate:"))
#b = float(input("enter the y cordinate:"))
if (a>norm or b>norm or a<-(norm) or b<-(norm)):
print("the value given is invalid/out of bound")
else:
a = a/norm
b = b/norm
print("the normalized values are "+str(a)+","+str(b)) | [((238, 257), 'csv.reader', 'csv.reader', (['csvfile'], {}), '(csvfile)\n', (248, 257), False, 'import csv\n')] |
jiwalker-usgs/pyGDP | pygdp/fwgs.py | dca4789fb0c53c889d6fa1b38ec867bc939a2d04 | from pygdp import _execute_request
from pygdp import _get_geotype
from owslib.util import log
def submitFeatureWeightedGridStatistics(geoType, dataSetURI, varID, startTime, endTime, attribute, value, gmlIDs,
verbose, coverage, delim, stat, grpby, timeStep, summAttr, weighted, WFS_URL, outputfname, sleepSecs):
"""
Makes a featureWeightedGridStatistics algorithm call.
The web service interface implemented is summarized here:
https://my.usgs.gov/confluence/display/GeoDataPortal/Generating+Area+Weighted+Statistics+Of+A+Gridded+Dataset+For+A+Set+Of+Vector+Polygon+Features
Note that varID and stat can be a list of strings.
"""
# test for dods:
dataSetURI = _execute_request.dodsReplace(dataSetURI)
log.info('Generating feature collection.')
featureCollection = _get_geotype._getFeatureCollectionGeoType(geoType, attribute, value, gmlIDs, WFS_URL)
if featureCollection is None:
return
processid = 'gov.usgs.cida.gdp.wps.algorithm.FeatureWeightedGridStatisticsAlgorithm'
if weighted==False:
processid = 'gov.usgs.cida.gdp.wps.algorithm.FeatureGridStatisticsAlgorithm'
solo_inputs = [("FEATURE_ATTRIBUTE_NAME",attribute),
("DATASET_URI", dataSetURI),
("TIME_START",startTime),
("TIME_END",endTime),
("REQUIRE_FULL_COVERAGE",str(coverage).lower()),
("DELIMITER",delim),
("GROUP_BY", grpby),
("SUMMARIZE_TIMESTEP", str(timeStep).lower()),
("SUMMARIZE_FEATURE_ATTRIBUTE",str(summAttr).lower()),
("FEATURE_COLLECTION", featureCollection)]
if isinstance(stat, list):
num_stats=len(stat)
if num_stats > 7:
raise Exception('Too many statistics were submitted.')
else:
num_stats=1
if isinstance(varID, list):
num_varIDs=len(varID)
else:
num_varIDs=1
inputs = [('','')]*(len(solo_inputs)+num_varIDs+num_stats)
count=0
rmvCnt=0
for solo_input in solo_inputs:
if solo_input[1]!=None:
inputs[count] = solo_input
count+=1
else:
rmvCnt+=1
del inputs[count:count+rmvCnt]
if num_stats > 1:
for stat_in in stat:
if stat_in not in ["MEAN", "MINIMUM", "MAXIMUM", "VARIANCE", "STD_DEV", "SUM", "COUNT"]:
raise Exception('The statistic %s is not in the allowed list: "MEAN", "MINIMUM", "MAXIMUM", "VARIANCE", "STD_DEV", "SUM", "COUNT"' % stat_in)
inputs[count] = ("STATISTICS",stat_in)
count+=1
elif num_stats == 1:
if stat not in ["MEAN", "MINIMUM", "MAXIMUM", "VARIANCE", "STD_DEV", "SUM", "COUNT"]:
raise Exception('The statistic %s is not in the allowed list: "MEAN", "MINIMUM", "MAXIMUM", "VARIANCE", "STD_DEV", "SUM", "COUNT"' % stat)
inputs[count] = ("STATISTICS",stat)
count+=1
if num_varIDs > 1:
for var in varID:
inputs[count] = ("DATASET_ID",var)
count+=1
elif num_varIDs == 1:
inputs[count] = ("DATASET_ID",varID)
output = "OUTPUT"
return _execute_request._executeRequest(processid, inputs, output, verbose, outputfname, sleepSecs)
| [((744, 784), 'pygdp._execute_request.dodsReplace', '_execute_request.dodsReplace', (['dataSetURI'], {}), '(dataSetURI)\n', (772, 784), False, 'from pygdp import _execute_request\n'), ((794, 836), 'owslib.util.log.info', 'log.info', (['"""Generating feature collection."""'], {}), "('Generating feature collection.')\n", (802, 836), False, 'from owslib.util import log\n'), ((866, 955), 'pygdp._get_geotype._getFeatureCollectionGeoType', '_get_geotype._getFeatureCollectionGeoType', (['geoType', 'attribute', 'value', 'gmlIDs', 'WFS_URL'], {}), '(geoType, attribute, value, gmlIDs,\n WFS_URL)\n', (907, 955), False, 'from pygdp import _get_geotype\n'), ((3384, 3480), 'pygdp._execute_request._executeRequest', '_execute_request._executeRequest', (['processid', 'inputs', 'output', 'verbose', 'outputfname', 'sleepSecs'], {}), '(processid, inputs, output, verbose,\n outputfname, sleepSecs)\n', (3416, 3480), False, 'from pygdp import _execute_request\n')] |
rbanffy/bjoern | tests/pure-req.py | b177b62aa626cee97972a2e73f8543e6d86b5eb7 | import sys
import socket
conn = socket.create_connection(('0.0.0.0', 8080))
msgs = [
# 0 Keep-Alive, Transfer-Encoding chunked
'GET / HTTP/1.1\r\nConnection: Keep-Alive\r\n\r\n',
# 1,2,3 Close, EOF "encoding"
'GET / HTTP/1.1\r\n\r\n',
'GET / HTTP/1.1\r\nConnection: close\r\n\r\n',
'GET / HTTP/1.0\r\nConnection: Keep-Alive\r\n\r\n',
# 4 Bad Request
'GET /%20%20% HTTP/1.1\r\n\r\n',
# 5 Bug #14
'GET /%20abc HTTP/1.0\r\n\r\n',
# 6 Content-{Length, Type}
'GET / HTTP/1.0\r\nContent-Length: 11\r\n'
'Content-Type: text/blah\r\nContent-Fype: bla\r\n'
'Content-Tength: bla\r\n\r\nhello world',
# 7 POST memory leak
'POST / HTTP/1.0\r\nContent-Length: 1000\r\n\r\n%s' % ('a'*1000),
# 8,9 CVE-2015-0219
'GET / HTTP/1.1\r\nFoo_Bar: bad\r\n\r\n',
'GET / HTTP/1.1\r\nFoo-Bar: good\r\nFoo_Bar: bad\r\n\r\n'
]
conn.send(msgs[int(sys.argv[1])].encode())
while 1:
data = conn.recv(100)
if not data: break
print(repr(data))
if data.endswith(b'0\r\n\r\n'):
if raw_input('new request? Y/n') == 'n':
exit()
conn.send(b'GET / HTTP/1.1\r\nConnection: Keep-Alive\r\n\r\n')
| [((33, 76), 'socket.create_connection', 'socket.create_connection', (["('0.0.0.0', 8080)"], {}), "(('0.0.0.0', 8080))\n", (57, 76), False, 'import socket\n')] |
gtfarng/Odoo_migrade | apps/odoo/lib/odoo-10.0.post20170615-py2.7.egg/odoo/addons/stock/models/web_planner.py | 9cc28fae4c379e407645248a29d22139925eafe7 | # -*- coding: utf-8 -*-
from odoo import models
class PlannerInventory(models.Model):
_inherit = 'web.planner'
def _get_planner_application(self):
planner = super(PlannerInventory, self)._get_planner_application()
planner.append(['planner_inventory', 'Inventory Planner'])
return planner
| [] |
Wizard-Of-Chaos/WizardBot | wizbot.py | 75a2e482c7d7921e9a06dde4d210c68330c6fbe2 | #WIZARD BOT IS LIVE
import calendar
import discord as dc
from discord.ext.commands import Bot
from discord.ext import commands
from functools import partial
import asyncio as aio
import time
from random import randint
from datetime import datetime
from discord.ext import commands
from guildconfig import GuildConfig
from rolesaver import RoleSaver
#initializes bot, sets up command sign
bot = commands.Bot(command_prefix = '!')
bot.remove_command('help')
guild_config = GuildConfig(bot, 'config.pkl')
role_saver = RoleSaver(bot, 'roles.pkl')
#GAME STUFF
class Monster:
def __init__(self, speed, damage, health, dmg_type):
self.spd = speed
self.dmg = damage
self.hp = health
self.dmg_type = dmg_type
self.is_alive = True
#All integers.
#Last one is 1 or 0 - there are two damage types. Magical and physical.
#Physical is 0, Magical is 1.
#Attacks return a tuple containing a 1 or a 0 as the first number, then the damage as the second number.
#ACCESSORS
def health(self):
return self.hp
def speed(self):
return self.spd
def damage(self):
return self.dmg
def life(self):
return self.is_alive
#MUTATORS
def take_hit(self, damage):
self.hp = self.hp - damage
if self.hp <= 0:
self.is_alive = False
def make_attack(self):
attack = ""
attack += str(self.dmg_type)
attack += " "
attack += str(self.dmg)
return attack
class Player:
def __init__(self):
self.hp = 100 #Classic!
self.dmg = 10
self.shield = 0
self.s_dur = 0
self.is_alive = True
#Player has four shield conditions.
#0 - has no shield. 1 - Physical shield. 2 - Magical shield. 3 - Both.
#ACCESSORS
def damage(self):
return self.dmg
def life(self):
return self.is_alive
def shield_type(self):
return self.shield
def shield_dur(self):
return self.s_dur
def health(self):
return self.hp
#MUTATORS
def take_hit(self, damage):
self.hp = self.hp - damage
if self.hp <= 0:
self.is_alive = False
def shield_hit(self):
self.s_dur = self.s_dur - 1
if self.s_dur == 0:
self.shield = 0
#Kills your shield when the durability hits 0.
def heal(self, heal):
self.hp = self.hp + heal
def dangerify(self, damage):
self.dmg = self.dmg + damage
def get_shield(self, shield):
#This one's a bit tricky. The shield is 0 or 1 - Physical or magical.
#It then updates the player's shield accordingly.
if shield == 0:
if self.shield == 0:
self.shield = 1
self.s_dur = 10
if self.shield == 2:
self.shield = 3
self.s_dur = 5
elif shield == 1:
if self.shield == 0:
self.shield = 2
self.s_dur = 10
if self.shield == 1:
self.shield = 3
self.s_dur = 5
#Shield durabilty goes to 5, regardless of what it was before, on picking up a SECOND shield.
#Other four cases don't need to be covered.
#WIZBOT OLD STUFF ENDS HERE
#FUNCTIONS HERE
def get_token():
with open('token.dat', 'r') as tokenfile:
return ''.join(
chr(int(''.join(c), 16))
for c in zip(*[iter(tokenfile.read().strip())]*2)
)
def monthdelta(date, delta):
m, y = (date.month+delta) % 12, date.year + ((date.month)+delta-1) // 12
if not m: m = 12
d = min(date.day, calendar.monthrange(y, m)[1])
return date.replace(day=d, month=m, year=y)
async def get_last_seen(member, pendant=None):
lastseen = None
for channel in member.guild.text_channels:
lastmsg = await channel.history(limit=None, after=pendant).get(author__name=member.display_name)
if lastmsg and (lastseen is None or lastseen < lastmsg.created_at):
lastseen = lastmsg.created_at
return lastseen
#START OF EVENTS
@bot.event
async def on_ready():
print(f'{bot.user} has connected to Discord!')
@bot.event
async def on_message(message):
if message.content == "EAT THAT HORSE!":
await message.channel.send(":horse:")
await bot.process_commands(message)
@bot.event
async def on_message_edit(bfr, aft):
if bfr.author == bot.user:
return
if not hasattr(bfr.channel, 'guild'):
return
guild_id = bfr.channel.guild.id
if guild_id in guild_config.mod_channels:
embed = dc.Embed(color=dc.Color.gold(), timestamp=aft.created_at)
embed.set_author(
name=f'@{bfr.author} edited a message in #{bfr.channel}:',
icon_url=bfr.author.avatar_url,
)
embed.add_field(name='**Before:**', value=bfr.content, inline=False)
embed.add_field(name='**After:**', value=aft.content, inline=False)
embed.add_field(name='**MESSAGE ID:**', value=f'`{aft.id}`')
embed.add_field(name='**USER ID:**', value=f'`{bfr.author.id}`')
await bot.get_channel(guild_config.mod_channels[guild_id]['msglog']).send(
embed=embed
)
@bot.event
async def on_message_delete(msg):
if not hasattr(msg.channel, 'guild'):
return
guild_id = msg.channel.guild.id
if guild_id in guild_config.mod_channels:
embed = dc.Embed(
color=dc.Color.darker_grey(),
timestamp=msg.created_at,
description=msg.content,
)
embed.set_author(
name=f'@{msg.author} deleted a message in #{msg.channel}:',
icon_url=msg.author.avatar_url,
)
embed.add_field(name='**MESSAGE ID:**', value=f'`{msg.id}`')
embed.add_field(name='**USER ID:**', value=f'`{msg.author.id}`')
await bot.get_channel(guild_config.mod_channels[guild_id]['msglog']).send(
embed=embed
)
@bot.event
async def on_member_join(member):
guild = member.guild
if guild.id in guild_config.mod_channels:
await role_saver.load_roles(member)
embed = dc.Embed(
color=dc.Color.green(),
timestamp=datetime.utcnow(),
description=f':green_circle: **{member}** has joined **{guild}**!\n'
f'The guild now has {len(guild.members)} members!\n'
f'This account was created on `{member.created_at.strftime("%d/%m/%Y %H:%M:%S")}`'
)
embed.set_author(name=f'A user has joined the server!')
embed.set_thumbnail(url=member.avatar_url)
embed.add_field(name='**USER ID:**', value=f'`{member.id}`')
await bot.get_channel(guild_config.mod_channels[guild.id]['usrlog']).send(
embed=embed
)
@bot.event
async def on_member_remove(member):
guild = member.guild
if guild.id in guild_config.mod_channels:
role_saver.save_roles(member)
timestamp = datetime.utcnow()
lastseen = await get_last_seen(member, monthdelta(timestamp, -1)) # Moved grabbing last seen to a function
if lastseen is not None:
lastseenmsg = f'This user was last seen on `{lastseen.strftime("%d/%m/%Y %H:%M:%S")}`'
else:
lastseenmsg = 'This user has not spoken for at least 1 month!'
embed = dc.Embed(
color=dc.Color.red(),
timestamp=timestamp,
description=f':red_circle: **{member}** has left **{guild}**!\n'
f'The guild now has {len(guild.members)} members!\n{lastseenmsg}'
)
embed.set_author(name=f'A user left or got beaned!')
embed.set_thumbnail(url=member.avatar_url)
embed.add_field(
name='**ROLES SNAGGED:**',
value=(', '.join(
f'`{guild.get_role(role).name}`'
for role in role_saver.get_roles(member)
)
or None),
inline=False)
embed.add_field(name='**USER ID:**', value=f'`{member.id}`')
await bot.get_channel(guild_config.mod_channels[guild.id]['usrlog']).send(
embed=embed
)
@bot.event
async def on_member_update(bfr, aft): # Log role and nickname changes
guild = bfr.guild
if guild.id in guild_config.mod_channels:
changetype = None
if bfr.nick != aft.nick:
changetype = 'Nickname Update:'
changelog = f'**{bfr}** had their nickname changed to **{aft.nick}**'
if bfr.roles != aft.roles:
changetype = 'Role Update:'
diffrole = next(iter(set(aft.roles) ^ set(bfr.roles)))
difftype = 'added' if len(bfr.roles) < len(aft.roles) else 'removed'
changelog = f'**{aft}** had the following role {difftype}: `{diffrole.name}`'
if changetype is not None:
embed = dc.Embed(
color=dc.Color.blue(),
timestamp=datetime.utcnow(),
description=changelog,
)
embed.set_author(name=changetype, icon_url=aft.avatar_url)
embed.add_field(name='**USER ID:**', value=f'`{aft.id}`', inline=False)
await bot.get_channel(guild_config.mod_channels[guild.id]['usrlog']).send(
embed=embed
)
@bot.event
async def on_user_update(bfr, aft): # Log avatar, name, discrim changes
for guild in bot.guilds:
if guild.get_member(bfr.id) is not None:
changetype = None
if bfr.name != aft.name:
changetype = 'Username Update:'
changelog = f'@{bfr} has changed their username to {aft}'
if bfr.discriminator != aft.discriminator:
changetype = 'Discriminator Update:'
changelog = (
f'@{bfr} had their discriminator changed from '
f'{bfr.discriminator} to {aft.discriminator}'
)
if bfr.avatar != aft.avatar:
changetype = 'Avatar Update:'
changelog = f'@{bfr} has changed their avatar to:'
if changetype is not None:
embed = dc.Embed(
color=dc.Color.purple(),
timestamp=datetime.utcnow(),
description=changelog,
)
embed.set_author(name=changetype, icon_url=bfr.avatar_url)
if changetype.startswith('Avatar'):
embed.set_thumbnail(url=f'{aft.avatar_url}')
embed.add_field(name='**USER ID:**', value=f'`{aft.id}`', inline=False)
await bot.get_channel(guild_config.mod_channels[guild.id]['usrlog']).send(
embed=embed
)
#END OF EVENTS
@bot.command()
async def slap(ctx, arg):
await ctx.send("You have slapped {1}!" .format(ctx, arg))
@bot.command()
async def hello(ctx):
await ctx.send("Hello, World!")
@bot.command()
async def echo(ctx, arg):
await ctx.send(arg)
@bot.command()
async def roll(ctx, arg):
value = randint(1, int(arg))
await ctx.send("You have rolled a {1}!" .format(ctx, value))
@bot.command()
async def help(ctx):
embed = dc.Embed(
color=ctx.author.color,
timestamp=ctx.message.created_at,
description=f'It seems you have asked about the Homestuck and Hiveswap Discord Utility Bot:tm:.'
f'This is a bot designed to cater to the server\'s moderation, utility, and statistic '
f'tracking needs. If the functions herein described are not performing to the degree '
f'that is claimed, please direct your attention to Wizard of Chaos#2459.\n\n'
f'**Command List:**',
)
embed.set_author(name='Help message', icon_url=bot.user.avatar_url)
embed.add_field(name='`help`', value='Display this message.', inline=False)
embed.add_field(
name='`info [username]`',
value='Grabs user information. Leave username empty to get your own info.',
inline=False
)
embed.add_field(name='`ping`', value='Pong!', inline=False)
embed.add_field(
name='`config (msglog|usrlog)`',
value='(Manage Server only) Sets the appropriate log channel.',
inline=False
)
await ctx.send(embed=embed)
@bot.command()
async def info(ctx, member : str=None):
if member is not None:
for gmember in ctx.guild.members:
if member == gmember.display_name:
member = gmember
break
else:
await ctx.send(
'It seems that user can\'t be found. Please check your spelling. '
'Alternatively, try adding double quotes ("") around the name.'
)
return
else:
member = ctx.author
timestamp = datetime.utcnow()
lastseen = await get_last_seen(member, monthdelta(timestamp, -1))
if lastseen is not None:
lastseenmsg = lastseen.strftime("%d/%m/%Y %H:%M:%S")
else:
lastseenmsg = 'This user has not spoken for at least 1 month!'
embed = dc.Embed(color=member.color, timestamp=timestamp)
embed.set_author(name=f'Information for {member}')
embed.set_thumbnail(url=member.avatar_url)
embed.add_field(name='User ID:', value=f'{member.id}')
embed.add_field(name='Last Seen:', value=lastseenmsg, inline=False)
embed.add_field(name='Account Created On:', value=member.created_at.strftime('%d/%m/%Y %H:%M:%S'))
embed.add_field(name='Guild Joined On:', value=member.joined_at.strftime('%d/%m/%Y %H:%M:%S'))
embed.add_field(name='Roles:', value=', '.join(f'`{role.name}`' for role in member.roles[1:]), inline=False)
if ctx.author != member:
msg = 'It seems you\'re a bit of a stalker, aren\'t you?'
else:
msg = None
await ctx.send(msg, embed=embed)
@bot.command()
async def ping(ctx):
await ctx.send(f'Pong, <@!{ctx.message.author.id}>!')
@bot.group()
async def config(ctx):
if ctx.invoked_subcommand is None:
await ctx.send(
'It seems that you have attempted to run a nonexistent command. '
'Would you like to try again? Redos are free, you know.'
)
@config.command()
async def usrlog(ctx):
if ctx.author.guild_permissions.manage_guild == True:
await ctx.send(guild_config.setlog(ctx, 'usrlog'))
else:
await ctx.send("It seems that you don't have the appropriate permissions for this command.")
@config.command()
async def msglog(ctx):
if ctx.author.guild_permissions.manage_guild == True:
await ctx.send(guild_config.setlog(ctx, 'msglog'))
else:
await ctx.send("It seems that you don't have the appropriate permissions for this command.")
#GAME EVENT
#ABANDON ALL HOPE YE WHO GO BELOW HERE
@bot.command()
async def rogue_game(ctx):
await ctx.send("Game started! Choose a starting buff - 'Health' or 'Damage'.")
def check(m):
if m.author == ctx.author:
return m.content == "Health" or m.content == "Damage" or m.content == "CMSC280 FREE PASS"
else:
return False
gamer = Player() #Initializing player class
msg = await bot.wait_for("message", check=check)
if msg.content == "Health":
await ctx.send("+25 HP!")
gamer.heal(25)
elif msg.content == "Damage":
await ctx.send("+5 Damage!")
gamer.dangerify(5)
elif msg.content == "CMSC280 FREE PASS":
await ctx.send("Free shield!")
gamer.get_shield(1)
gamer.get_shield(0)
await ctx.send("OPTIONS: You can 'Block', 'Dodge' or 'Attack' a monster. Alternatively, you may 'Die'.")
slain_enemies = 0
def continue_check(m): #Check used several times
if m.author == ctx.author:
return m.content == "Yes" or m.content == "No"
else:
return False
while gamer.life() == True:
game_roll = randint(1, 1) #placeholder
if game_roll == 1:
#Monster speed is between 5 and 12.
#Monster health is between 40 and 120.
#Monster damage is between 5 and 20.
#Monster damage type is random one or the other (physical or magical).
m_speed = randint(5, 12)
m_hp = randint(40, 120)
m_dmg = randint(5, 20)
m_type = randint(0, 1)
danger = Monster(m_speed, m_dmg, m_hp, m_type) #Initializing monster class
print(f"Monster generated.")
await ctx.send("There is a beast, and you must tenderize it!")
while danger.life() == True:
await ctx.send("Monsters speed is {1}, damage {2}, health {3}." .format(ctx, danger.speed(), danger.damage(), danger.health()))
m_attk_str = danger.make_attack()
m_attk = m_attk_str.split(" ")
if "0" in m_attk:
await ctx.send("The monster is about to bite you!")
elif "1" in m_attk:
await ctx.send("The monster is about to breathe fire at you!")
def game_response(m): #Player response
if m.author == ctx.author:
return m.content == "Block" or m.content == "Dodge" or m.content == "Attack" or m.content == "Die"
else:
return False
#Reactions to the monster's attack
try:
g_msg = await bot.wait_for("message",timeout=m_speed, check=game_response)
if g_msg.content == "Block":
if "0" in m_attk:
if gamer.shield_type() == 1 or gamer.shield_type() == 3:
gamer.shield_hit()
await ctx.send("You block the attack!")
if gamer.shield_type() == 0:
await ctx.send("Your shield shatters from the force of the blow.")
else:
await ctx.send("You try to block it, but your shield isn't rated for this kind of damage!")
bp_damage = int(m_attk[1])
gamer.take_hit(bp_damage)
curhp = gamer.health()
await ctx.send("Your health is {1}." .format(ctx, curhp))
if "1" in m_attk:
if gamer.shield_type() == 2 or gamer.shield_type() == 3:
gamer.shield_hit()
await ctx.send("You block the attack!")
if gamer.shield_type() == 0:
await ctx.send("Your shield falls to pieces in a burst of multicolored light.")
else:
await ctx.send("The magical assault burns right through your shield!")
bm_damage = int(m_attk[1])
gamer.take_hit(bm_damage)
curhp = gamer.health()
await ctx.send("Your health is {1}." .format(ctx, curhp))
if g_msg.content == "Dodge":
await ctx.send("You roll to one side, avoiding some of the damage!")
d_damage = int(m_attk[1])
hit = d_damage - randint(5, 18)
gamer.take_hit(hit)
await ctx.send("Your health is {1}." .format(ctx, gamer.health()))
if g_msg.content == "Attack":
await ctx.send("You strike at the monster, but in doing so, expose yourself to the blow!") #Heh. Expose yourself. Good one, me.
a_damage = int(m_attk[1])
hit = a_damage + randint(5, 10)
gamer.take_hit(hit)
danger.take_hit(gamer.damage())
await ctx.send("Your health is {1}." .format(ctx, gamer.health()))
if g_msg.content == "Die":
await ctx.send("You die before the blow hits, confusing the monster.")
gamer.take_hit(gamer.health())
except asyncio.TimeoutError:
await ctx.send("You didn't move fast enough! The attack lands!")
t_damage = int(m_attk[1])
gamer.take_hit(t_damage)
await ctx.send("Your health is {1}." .format(ctx, gamer.health()))
if gamer.life() == False:
break
await ctx.send("The monster rears back! Quickly, hit the thing!")
def attack_response(m):
if m.author == ctx.author:
return m.content == "Attack"
else:
return False
try:
a_msg = await bot.wait_for("message", timeout=m_speed, check=attack_response)
if a_msg.content == "Attack":
await ctx.send("You hit the monster!")
danger.take_hit(gamer.damage())
except asyncio.TimeoutError:
await ctx.send("You didn't move fast enough!")
#Right, by this point, the monster has attacked, and the player has attacked.
#Need to check if the player is dead or not.
if gamer.life() == False:
break
#Only other option now is that the monster is still alive, requiring another turn, or it's dead, in which case...
#We should end up here, outside the loop.
if gamer.life() == True: #Necessary. Can break above loop without being alive, due to 'Die'.
await ctx.send("The monster has been defeated.")
slain_enemies = slain_enemies + 1
lootroll = randint(0, 4)
#Five cases. 0 - nothing. 1 - Physical shield. 2 - Magic shield. 3 - Health. 4 - Damage.
if lootroll == 0:
await ctx.send("The monster dropped nothing.")
if lootroll == 1:
await ctx.send("In the monster's digestive tract, you find a metal shield!")
gamer.get_shield(0)
if lootroll == 2:
await ctx.send("In the monster's spleen, you find a runic shield, glowing with spellcraft!")
gamer.get_shield(1)
if lootroll == 3:
healthroll = randint(5, 30)
await ctx.send("The monster's blood is a powerful restorative! You heal for {1}." .format(ctx, healthroll))
gamer.heal(healthroll)
if lootroll == 4:
dmgroll = randint(3, 12)
await ctx.send("You monster's bones make an excellent weapon! Your damage increases by {1}." .format(ctx, dmgroll))
gamer.dangerify(dmgroll)
#Loot handled. Looping again after describing player stats.
await ctx.send("Your health is {1} and your damage is {2}." .format(ctx, gamer.health(), gamer.damage()))
if gamer.shield_type() == 0:
await ctx.send("You have no shield.")
elif gamer.shield_type() == 1:
await ctx.send("You have a sturdy metal shield. It can take {1} more hits." .format(ctx, gamer.shield_dur()))
elif gamer.shield_type() == 2:
await ctx.send("You have a rune-inscribed shield. It can take {1} more hits." .format(ctx, gamer.shield_dur()))
elif gamer.shield_type() == 3:
await ctx.send("You have an inscribed metal shield. Powerful! It can take {1} more hits." .format(ctx, gamer.shield_dur()))
await ctx.send("Continue?")
con_msg = await bot.wait_for("message", check=continue_check)
if con_msg.content == "No":
break
#End of combat loop. Player is dead.
if game_roll == 2:
await ctx.send("You encounter a great and terrible wizard.")
await ctx.send("Continue?")
con_msg = await bot.wait_for("message", check=continue_check)
if game_roll == 3:
await ctx.send("You stumble into a trap!")
await ctx.send("Continue?")
con_msg = await bot.wait_for("message", check=continue_check)
if game_roll == 4:
await ctx.send("Rocks fall, everyone dies.")
await ctx.send("Continue?")
con_msg = await bot.wait_for("message", check=continue_check)
if game_roll == 5:
await ctx.send("A man just walks up and punches you. What a jerk.")
await ctx.send("Continue?")
con_msg = await bot.wait_for("message", check=continue_check)
#Placeholder maneuvers. Plan to expand game later with more events.
#Get duel working for demo
await ctx.send("You have died. Nice try, though! You killed {1} monsters." .format(ctx, slain_enemies))
@bot.command()
#Shoutout to my friend Janine for helping me cut this beast of a function in half.
async def duel(ctx, *, member):
await ctx.send("You have challenged {1} to a duel! How do you respond {1}?".format(ctx, member))
duelee = member # Discord member, shown as 'Wizard of Chaos#2459' or similar
player1 = Player()
dueler = ctx.author # ditto
player2 = Player()
def filter_tokens(msg, tokens):
"""Returns a list of tokens from the sequence that appear in the message."""
text = msg.content.strip().lower()
return [t for t in tokens if t in text]
def check(m): # Check if duel is accepted
return m.author == duelee and bool(filter_tokens(m, ('accept', 'decline')))
try:
msg = await bot.wait_for("message", timeout=20, check=check)
tokens = filter_tokens(msg, ('accept', 'decline'))
if len(tokens) > 1:
await ctx.send("Your indecision has weirded out your opponent. Good job.")
return
if 'decline' == tokens[0]:
await ctx.send("You have declined the challenge, everyone judges you.") #Coward.
return
if 'accept' == tokens[0]:
await ctx.send("You have accepted the duel!")
except asyncio.TimeoutError:
await ctx.send("{1} appears to be absent. Coward.".format(ctx, duelee))
return
await ctx.send(
"The duel has begun. The three attacks are 'critical strike', 'power attack', and 'flurry'. "
"You can hit someone from the 'left' or the 'right', or just not pick a direction. "
"You can also 'die'."
)
await ctx.send(
"Critical strikes cannot be parried. "
"Power attacks cannot be parried or blocked. "
"Flurries cannot be blocked or dodged effectively."
)
#Slightly more in-depth explanation:
#Critical strikes are blocked from the same direction they came in.
#Attempting to roll in any direction other than the opposite of the incoming attack results in a hit.
#Critical strikes cannot be parried, like, at all.
#Flurries must be parried from the same direction. They can be dodged for reduced damage. They cannot be blocked.
#Power attacks cannot be blocked or parried and MUST be dodged, to the opposite of the incoming direction.
#Dodges have to go in the opposite direction or they fail.
#Attack / defense checks based on incoming messages
def attack_check(m, a):
return m.author == a and bool(filter_tokens(m, attacks))
def defense_check(m, a):
return m.author == a and bool(filter_tokens(m, defenses))
atk_time = 5 # Reaction time for players in seconds, set to 10 for demo, 5 during actual play
attacks = ("critical strike", "flurry", "power attack", "die")
defenses = ("parry", "dodge", "block", "die")
dirs = ("left", "right")
while True: # External infinite loop.
for actor1, actor2, stats1, stats2 in ((duelee, dueler, player1, player2), (dueler, duelee, player2, player1)): # Turn order loop.
if not(player2.life() and player1.life()): # Check if either player died during any turn.
await ctx.send("{1} wins!".format(ctx, duelee if player1.life() else dueler))
return
await ctx.send("It's {1}'s turn to attack.".format(ctx, actor1))
try:
a1_msg = await bot.wait_for("message", timeout=20, check=lambda m: attack_check(m, actor1))
except asyncio.TimeoutError:
await ctx.send("{1} does nothing.".format(ctx, actor1))
continue
attack_tokens = filter_tokens(a1_msg, attacks)
attack_dirs = filter_tokens(a1_msg, dirs)
if len(attack_tokens) > 1 or len(attack_dirs) > 1:
await ctx.send("{1} has wasted too much time on indecisive action and got confused!".format(ctx, actor1))
continue
attack_token = attack_tokens[0]
attack_dir = attack_dirs[0] if attack_dirs else "top"
if "die" == attack_token:
await ctx.send("{1} screams that {2} will never understand their pain, then slits their wrists!".format(ctx, actor1, actor2))
stats1.take_hit(100) # It's no surprise the emo movement failed, no surprise at all.
continue
await ctx.send("{1} throws out a {2} from the {3}!".format(ctx, actor1, attack_token, attack_dir))
try:
a2_msg = await bot.wait_for("message", timeout=atk_time, check=lambda m: defense_check(m, actor2))
except asyncio.TimeoutError:
await ctx.send("{1} doesn't move fast enough, and gets hit!".format(ctx, actor2))
stats2.take_hit((20, 15, 10)[attacks.index(attack_token)])
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
continue
defense_tokens = filter_tokens(a2_msg, defenses)
defense_dirs = filter_tokens(a2_msg, dirs)
if len(defense_tokens) > 1 or len(defense_dirs) > 1:
await ctx.send("{1} doesn't get their act together fast enough and gets hit!".format(ctx, actor2))
stats2.take_hit((20, 15, 10)[attacks.index(attack_token)])
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, player2.health()))
continue
defense_token = defense_tokens[0]
defense_dir = defense_dirs[0] if defense_dirs else "top"
if "die" == defense_token:
await ctx.send("{1} accepts their fate and allows the blow to crush their skull!".format(ctx, actor2))
stats2.take_hit(100)
continue
# A whole bunch of if/elif/else chains. Asyncio REALLY does not like when you try to call outside functions.
# CRITICAL STRIKE:
if "critical strike" == attack_token:
if "left" == attack_dir:
if "block" == defense_token:
if "left" == defense_dir:
await ctx.send("{1} blocks the strike.".format(ctx, actor2))
else:
await ctx.send("{1} tries to block, but misses the direction of the blow!".format(ctx, actor2))
elif "parry" == defense_token:
await ctx.send("{1} attempts to parry, but the blow is too precisely aimed!".format(ctx, actor2))
stats2.take_hit(20)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "dodge" == defense_token:
if "left" == defense_dir:
await ctx.send("{1} tries to roll out of the way, but rolls straight into the blow!".format(ctx, actor2))
stats2.take_hit(40)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "right" == defense_token:
await ctx.send("{1} dodges the blow.".format(ctx, actor2))
else:
await ctx.send("{1} misses the dodge.".format(ctx, actor2))
stats2.take_hit(20)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "right" == attack_dir:
if "block" == defense_token:
if "right" == defense_dir:
await ctx.send("{1} blocks the strike.".format(ctx, actor2))
else:
await ctx.send("{1} tries to block, but misses the direction of the blow!".format(ctx, actor2))
stats2.take_hit(20)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "parry" == defense_token:
await ctx.send("{1} attempts to parry, but the blow is too precisely aimed!".format(ctx, actor2))
stats2.take_hit(20)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "dodge" == defense_token:
if "right" == defense_dir:
await ctx.send("{1} tries to roll out of the way, but rolls straight into the blow!".format(ctx, actor2))
stats2.take_hit(40)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "left" == defense_dir:
await ctx.send("{1} dodges the blow.".format(ctx, actor2))
else:
await ctx.send("{1} misses the dodge.".format(ctx, actor2))
stats2.take_hit(20)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
else:
if "block" == defense_token:
if defense_dir != "top":
await ctx.send("{1} fails to block the central strike!".format(ctx, actor2))
stats2.take_hit(20)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
else:
await ctx.send("{1} blocks the strike.".format(ctx, actor2))
elif "parry" == defense_token:
await ctx.send("{1} attempts to parry, but the blow is too precisely aimed!".format(ctx, actor2))
stats2.take_hit(20)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "dodge" == defense_token:
if defense_dir != "top":
await ctx.send("{1} tries to roll, but gets slapped anyway!".format(ctx, actor2))
stats2.take_hit(20)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
else:
await ctx.send("{1} dodges the blow.".format(ctx, actor2))
#All critical strike maneuvers handled.
#FLURRY:
if "flurry" == attack_token:
if "left" == attack_dir:
if "block" == defense_token:
await ctx.send("{1} attempts to block the blows, but there's just too many!".format(ctx, actor2))
stats2.take_hit(10)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "parry" == defense_token:
if "left" == defense_dir:
await ctx.send("{1} easily parries the attacks, redirecting them onto {2}!".format(ctx, actor2, actor1))
stats1.take_hit(10)
await ctx.send("{1} 's health is {2}.".format(ctx, actor1, stats1.health()))
else:
await ctx.send("{1} tries to parry, but misjudges the direction and gets hit!".format(ctx, actor2))
stats2.take_hit(15)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "dodge" == defense_token:
if "left" == defense_dir:
await ctx.send("{1} tries to roll out of the way, but rolls straight into the blows!".format(ctx, actor2))
stats2.take_hit(20)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "right" == defense_dir:
await ctx.send("{1} dodges most of the blows, but takes one across the back!".format(ctx, actor2))
stats2.take_hit(5)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
else:
await ctx.send("{1} misses the dodge.".format(ctx, actor2))
stats2.take_hit(10)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "right" == attack_dir:
if "block" == defense_token:
await ctx.send("{1} attempts to block the blows, but there's just too many!".format(ctx, actor2))
stats2.take_hit(10)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "parry" == defense_token:
if "right" == defense_dir:
await ctx.send("{1} easily parries the attacks, redirecting them onto {2}!".format(ctx, actor2, actor1))
stats1.take_hit(10)
await ctx.send("{1} 's health is {2}.".format(ctx, actor1, stats1.health()))
else:
await ctx.send("{1} tries to parry, but misjudges the direction and gets hit!".format(ctx, actor2))
stats2.take_hit(15)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "dodge" == defense_token:
if "right" == defense_dir:
await ctx.send("{1} tries to roll out of the way, but rolls straight into the blows!".format(ctx, actor2))
stats2.take_hit(20)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "left" == defense_dir:
await ctx.send("{1} dodges most of the blows, but takes one across the back!".format(ctx, actor2))
stats2.take_hit(5)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
else:
await ctx.send("{1} misses the dodge.".format(ctx, actor2))
stats2.take_hit(10)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
else:
if "block" == defense_token:
await ctx.send("{1} attempts to block the blows, but there's just too many!".format(ctx, actor2))
stats2.take_hit(10)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "parry" == defense_token:
if defense_dir != "top":
await ctx.send("{1} tries to parry, but misjudges the direction and gets hit!".format(ctx, actor2))
stats2.take_hit(5)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
else:
await ctx.send("{1} easily parries the attacks, redirecting them onto {2}!".format(ctx, actor2, actor1))
stats1.take_hit(10)
await ctx.send("{1} 's health is {2}.".format(ctx, actor1, stats1.health()))
elif "dodge" == defense_token:
if defense_dir != "top":
await ctx.send("{1} tries to roll, but gets slapped anyway!".format(ctx, actor2))
stats2.take_hit(15)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
else:
await ctx.send("{1} dodges most of the blows, but takes one hit anyway!".format(ctx, actor2))
stats2.take_hit(5)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
#Flurry maneuvers handled.
#POWER ATTACK:
if "power attack" == attack_token:
if "left" == attack_dir:
if "block" == defense_token:
await ctx.send("{1} tries to block, but the blow is too much!".format(ctx, actor2))
stats2.take_hit(10)
elif "parry" == defense_token:
await ctx.send("{1} attempts to parry, but the blow is too much!".format(ctx, actor2))
stats2.take_hit(10)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "dodge" == defense_token:
if "left" == defense_dir:
await ctx.send("{1} tries to roll out of the way, but rolls straight into the blow!".format(ctx, actor2))
stats2.take_hit(20)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "right" == defense_dir:
await ctx.send("{1} dodges the blow.".format(ctx, actor2))
else:
await ctx.send("{1} misses the dodge.".format(ctx, actor2))
stats2.take_hit(10)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "right" == attack_dir:
if "block" == defense_token:
await ctx.send("{1} tries to block, but the blow is too much!".format(ctx, actor2))
stats2.take_hit(10)
elif "parry" == defense_token:
await ctx.send("{1} attempts to parry, but the blow is too much!".format(ctx, actor2))
stats2.take_hit(10)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "dodge" == defense_token:
if "right" == defense_dir:
await ctx.send("{1} tries to roll out of the way, but rolls straight into the blow!".format(ctx, actor2))
stats2.take_hit(20)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "left" == defense_dir:
await ctx.send("{1} dodges the blow.".format(ctx, actor2))
else:
await ctx.send("{1} misses the dodge.".format(ctx, actor2))
stats2.take_hit(10)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
else:
if "block" == defense_token:
await ctx.send("{1} tries to block, but the blow is too much!".format(ctx, actor2))
stats2.take_hit(10)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "parry" == defense_token:
await ctx.send("{1} attempts to parry, but the blow is too much!".format(ctx, actor2))
stats2.take_hit(10)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
elif "dodge" == defense_token:
if defense_dir:
await ctx.send("{1} tries to roll, but gets slapped anyway!".format(ctx, actor2))
stats2.take_hit(10)
await ctx.send("{1} 's health is {2}.".format(ctx, actor2, stats2.health()))
else:
await ctx.send("{1} dodges the blow.".format(ctx, actor2))
# Power attacks handled.
# All attacks handled. Next player's attack.
#END DUEL
if __name__ == '__main__':
bot.run(get_token()) | [((397, 429), 'discord.ext.commands.Bot', 'commands.Bot', ([], {'command_prefix': '"""!"""'}), "(command_prefix='!')\n", (409, 429), False, 'from discord.ext import commands\n'), ((474, 504), 'guildconfig.GuildConfig', 'GuildConfig', (['bot', '"""config.pkl"""'], {}), "(bot, 'config.pkl')\n", (485, 504), False, 'from guildconfig import GuildConfig\n'), ((518, 545), 'rolesaver.RoleSaver', 'RoleSaver', (['bot', '"""roles.pkl"""'], {}), "(bot, 'roles.pkl')\n", (527, 545), False, 'from rolesaver import RoleSaver\n'), ((11325, 11762), 'discord.Embed', 'dc.Embed', ([], {'color': 'ctx.author.color', 'timestamp': 'ctx.message.created_at', 'description': 'f"""It seems you have asked about the Homestuck and Hiveswap Discord Utility Bot:tm:.This is a bot designed to cater to the server\'s moderation, utility, and statistic tracking needs. If the functions herein described are not performing to the degree that is claimed, please direct your attention to Wizard of Chaos#2459.\n\n**Command List:**"""'}), '(color=ctx.author.color, timestamp=ctx.message.created_at,\n description=\n f"""It seems you have asked about the Homestuck and Hiveswap Discord Utility Bot:tm:.This is a bot designed to cater to the server\'s moderation, utility, and statistic tracking needs. If the functions herein described are not performing to the degree that is claimed, please direct your attention to Wizard of Chaos#2459.\n\n**Command List:**"""\n )\n', (11333, 11762), True, 'import discord as dc\n'), ((12937, 12954), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (12952, 12954), False, 'from datetime import datetime\n'), ((13208, 13257), 'discord.Embed', 'dc.Embed', ([], {'color': 'member.color', 'timestamp': 'timestamp'}), '(color=member.color, timestamp=timestamp)\n', (13216, 13257), True, 'import discord as dc\n'), ((7057, 7074), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (7072, 7074), False, 'from datetime import datetime\n'), ((16066, 16079), 'random.randint', 'randint', (['(1)', '(1)'], {}), '(1, 1)\n', (16073, 16079), False, 'from random import randint\n'), ((3687, 3712), 'calendar.monthrange', 'calendar.monthrange', (['y', 'm'], {}), '(y, m)\n', (3706, 3712), False, 'import calendar\n'), ((16374, 16388), 'random.randint', 'randint', (['(5)', '(12)'], {}), '(5, 12)\n', (16381, 16388), False, 'from random import randint\n'), ((16408, 16424), 'random.randint', 'randint', (['(40)', '(120)'], {}), '(40, 120)\n', (16415, 16424), False, 'from random import randint\n'), ((16445, 16459), 'random.randint', 'randint', (['(5)', '(20)'], {}), '(5, 20)\n', (16452, 16459), False, 'from random import randint\n'), ((16481, 16494), 'random.randint', 'randint', (['(0)', '(1)'], {}), '(0, 1)\n', (16488, 16494), False, 'from random import randint\n'), ((4668, 4683), 'discord.Color.gold', 'dc.Color.gold', ([], {}), '()\n', (4681, 4683), True, 'import discord as dc\n'), ((5511, 5533), 'discord.Color.darker_grey', 'dc.Color.darker_grey', ([], {}), '()\n', (5531, 5533), True, 'import discord as dc\n'), ((6261, 6277), 'discord.Color.green', 'dc.Color.green', ([], {}), '()\n', (6275, 6277), True, 'import discord as dc\n'), ((6301, 6318), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (6316, 6318), False, 'from datetime import datetime\n'), ((7455, 7469), 'discord.Color.red', 'dc.Color.red', ([], {}), '()\n', (7467, 7469), True, 'import discord as dc\n'), ((22189, 22202), 'random.randint', 'randint', (['(0)', '(4)'], {}), '(0, 4)\n', (22196, 22202), False, 'from random import randint\n'), ((8992, 9007), 'discord.Color.blue', 'dc.Color.blue', ([], {}), '()\n', (9005, 9007), True, 'import discord as dc\n'), ((9035, 9052), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (9050, 9052), False, 'from datetime import datetime\n'), ((22834, 22848), 'random.randint', 'randint', (['(5)', '(30)'], {}), '(5, 30)\n', (22841, 22848), False, 'from random import randint\n'), ((23084, 23098), 'random.randint', 'randint', (['(3)', '(12)'], {}), '(3, 12)\n', (23091, 23098), False, 'from random import randint\n'), ((10297, 10314), 'discord.Color.purple', 'dc.Color.purple', ([], {}), '()\n', (10312, 10314), True, 'import discord as dc\n'), ((10346, 10363), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (10361, 10363), False, 'from datetime import datetime\n'), ((19618, 19632), 'random.randint', 'randint', (['(5)', '(18)'], {}), '(5, 18)\n', (19625, 19632), False, 'from random import randint\n'), ((20062, 20076), 'random.randint', 'randint', (['(5)', '(10)'], {}), '(5, 10)\n', (20069, 20076), False, 'from random import randint\n')] |
henryoliver/data-structures | stack.py | eb3d709543ace5197236164998b8295e72187cb0 | class Stack:
def __init__(self):
self.stack = []
self.minMaxStack = []
# O(1) time | O(1) space
def peek(self):
if (len(self.stack)):
return self.stack[-1]
return None
# O(1) time | O(1) space
def pop(self):
if (len(self.stack)):
self.minMaxStack.pop()
return self.stack.pop()
return None
# Procedure
# O(1) time | O(1) space
def push(self, value):
minNumber = value
maxNumber = value
if (len(self.minMaxStack)):
lastMinMax = self.minMaxStack[-1]
minNumber = min(lastMinMax[0], minNumber)
maxNumber = max(lastMinMax[1], maxNumber)
self.stack.append(value)
self.minMaxStack.append((minNumber, maxNumber))
print(self.stack)
print(self.minMaxStack)
# O(1) time | O(1) space
def getMin(self):
if (len(self.minMaxStack)):
return self.minMaxStack[-1][0]
return None
# O(1) time | O(1) space
def getMax(self):
if (len(self.minMaxStack)):
return self.minMaxStack[-1][1]
return None
| [] |
javiergarridomellado/proyectodai | ProyectoDAI/settings.py | 64944d10f543c3094630056906b5f101a73bdd7b | """
Django settings for TusPachangas project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
import django
import dj_database_url
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
TEMPLATE_PATH = os.path.join(BASE_DIR, 'templates')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '26*swq94+rg+-2tc2es6j&d#&(g4@@xe7vh1hu1)6*z^v@pd2q'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'registration', #add in the registration package
'rest_framework',
'restaurante',
'easy_maps',
)
if django.VERSION < (1, 7):
INSTALLED_APPS += (
'south',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'ProyectoDAI.urls'
WSGI_APPLICATION = 'ProyectoDAI.wsgi.application'
TEMPLATE_DIRS = (TEMPLATE_PATH,)
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
ON_HEROKU = os.environ.get('PORT')
if ON_HEROKU:
DATABASE_URL='postgres://kytzveedsclzaf:eIJAAuElYvSxPK-vmSdXG9Hjv8@ec2-107-21-219-235.compute-1.amazonaws.com:5432/df9sfr7a9b8vjf'
DATABASES = {'default': dj_database_url.config(default=DATABASE_URL)}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_PATH = os.path.join(BASE_DIR,'static')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
STATIC_PATH,
)
#Media
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
| [((438, 473), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""templates"""'], {}), "(BASE_DIR, 'templates')\n", (450, 473), False, 'import os\n'), ((2074, 2096), 'os.environ.get', 'os.environ.get', (['"""PORT"""'], {}), "('PORT')\n", (2088, 2096), False, 'import os\n'), ((2600, 2632), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""static"""'], {}), "(BASE_DIR, 'static')\n", (2612, 2632), False, 'import os\n'), ((2741, 2772), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""media"""'], {}), "(BASE_DIR, 'media')\n", (2753, 2772), False, 'import os\n'), ((394, 419), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (409, 419), False, 'import os\n'), ((2016, 2052), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""db.sqlite3"""'], {}), "(BASE_DIR, 'db.sqlite3')\n", (2028, 2052), False, 'import os\n'), ((2268, 2312), 'dj_database_url.config', 'dj_database_url.config', ([], {'default': 'DATABASE_URL'}), '(default=DATABASE_URL)\n', (2290, 2312), False, 'import dj_database_url\n')] |
JustinACoder/H22-GR3-UnrealAI | Plugins/UnrealEnginePython/Binaries/Win64/Lib/site-packages/tensorflow/python/eager/test.py | 361eb9ef1147f8a2991e5f98c4118cd823184adf | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for testing tfe code."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops as _ops
from tensorflow.python.platform import test as _test
from tensorflow.python.platform.test import * # pylint: disable=wildcard-import
# TODO(akshayka): Do away with this file.
def main(argv=None):
_ops.enable_eager_execution()
_test.main(argv)
| [((1118, 1147), 'tensorflow.python.framework.ops.enable_eager_execution', '_ops.enable_eager_execution', ([], {}), '()\n', (1145, 1147), True, 'from tensorflow.python.framework import ops as _ops\n'), ((1151, 1167), 'tensorflow.python.platform.test.main', '_test.main', (['argv'], {}), '(argv)\n', (1161, 1167), True, 'from tensorflow.python.platform import test as _test\n')] |
monokim/CheesyBullets | util.py | eeb5a79a69936701ff7962b846e6310f7df91cb0 | import time
import pyautogui
import win32gui
def get_screen_rect(caption='CheesyBullets'):
hwnd = win32gui.FindWindow(None, caption)
rect = win32gui.GetWindowRect(hwnd)
screen_rect = (rect[0], rect[1], rect[2] - rect[0], rect[3] - rect[1])
return rect
class Timer():
def __init__(self):
self.times = []
self.cnt = 0
def set_timer(self, name="timer"):
flag = False
for i, t in enumerate(self.times):
if t[1] == name:
flag = True
t[0] = time.time()
break
if flag == False:
self.times.append([time.time(), name])
def print_time(self, name="timer"):
flag = False
for i, t in enumerate(self.times):
if t[1] == name:
flag = True
print(name + " takes (%.5f)s" % (time.time() - t[0]))
break
if flag == False:
raise Exception("There is no timer")
def delete_timer(self, name = None):
for i, t in enumerate(self.times):
if t[1] == name:
self.times.pop(i)
break
| [((103, 137), 'win32gui.FindWindow', 'win32gui.FindWindow', (['None', 'caption'], {}), '(None, caption)\n', (122, 137), False, 'import win32gui\n'), ((149, 177), 'win32gui.GetWindowRect', 'win32gui.GetWindowRect', (['hwnd'], {}), '(hwnd)\n', (171, 177), False, 'import win32gui\n'), ((538, 549), 'time.time', 'time.time', ([], {}), '()\n', (547, 549), False, 'import time\n'), ((630, 641), 'time.time', 'time.time', ([], {}), '()\n', (639, 641), False, 'import time\n'), ((861, 872), 'time.time', 'time.time', ([], {}), '()\n', (870, 872), False, 'import time\n')] |
mbrcknl/graph-refine | graph-to-graph/elf_correlate.py | 78c74f18127db53606f18f775a5a50de86bc6b97 | #
# Copyright 2020, Data61, CSIRO (ABN 41 687 119 230)
#
# SPDX-License-Identifier: BSD-2-Clause
#
import re
import graph_refine.syntax as syntax
import graph_refine.problem as problem
import graph_refine.stack_logic as stack_logic
from graph_refine.syntax import true_term, false_term, mk_not
from graph_refine.check import *
import graph_refine.search as search
import elf_parser
import graph_refine.target_objects as target_objects
from imm_utils import *
from elf_file import *
from addr_utils import *
from call_graph_utils import gFuncsCalled
from dot_utils import toDot,toGraph
from addr_utils import gToPAddrP,callNodes
def loadCounts(dir_name):
#loop_counts.py must contain exactly 1 dict called man_loop_counts
context = {}
execfile('%s/loop_counts.py' % dir_name,context)
#we should have a dict of addr -> bound
assert 'loops_by_fs' in context
lbfs = context['loops_by_fs']
return lbfs
class immFunc (Borg):
def __init__(self,elf_fun=None,load_counts=False):
Borg.__init__(self)
if not elf_fun:
return
self.elf_fun = elf_fun
self.name = elf_fun.name
self.addr = elf_fun.addr
self.g_f = elf_fun.g_f
self.asm_fs = elfFile().asm_fs
self.imm_nodes = {}
self.bbs = {}
self.loaded_loop_counts = False
self.parse_only = False
self.loop_bounds = {}
# dict of f-> loop_heads -> (bound, description)
self.loops_by_fs = {}
#f -> p_n
self.p_entries = {}
if load_counts:
self.loaded_loops_by_fs = loadCounts(elfFile().dir_name)
self.loaded_loop_counts = True
def process(self):
if self.bbs != {}:
return
self.makeBinGraph()
self.loopheads = {}
self.findLoopheads()
lbfs = self.loops_by_fs
if self.loaded_loop_counts:
self.bin_loops_by_fs = self.loaded_loops_by_fs
print 'loaded loop counts from file'
else:
#build bin_loops_by_fs from loops_by_fs
self.bin_loops_by_fs = {}
blbf = self.bin_loops_by_fs
for f in lbfs:
blbf[f] = {}
p = self.f_problems[f]
pA = lambda x: phyAddrP(x,p)
loops = lbfs[f]
for p_head in loops:
assert pA(p_head) not in blbf
blbf[f][pA(p_head)] = loops[p_head]
def isBBHead(self,p_nf):
if not self.isRealNode(p_nf):
return False
g_n = self.phyAddr(p_nf)
if not type(g_n) == int:
return False
return g_n in self.bbs
#bin addr to bb addr
def bbAddr(self,addr):
bbs = self.bbs
for x in bbs:
if addr in bbs[x]:
return x
print 'addr: %x' % addr
assert False, 'BB not found !!'
def toPhyAddrs(self, p_nis):
return [self.phyAddr(x) for x in p_nis]
#find all possible entries of the loop for Chronos
def findLoopEntries(self, loop, f):
p = self.f_problems[f]
head = None
lp = [x for x in list(loop) if self.isRealNode( (x,f) )]
lpp = []
lp_phys = self.toPhyAddrs([(x,f) for x in lp])
for x in lp:
#loop entry, must be
#1. a basic block head and
#2. has >=1 edge from outside the loop
if (x, f ) in self.pf_deadends:
##gotta be halt / branch to halt
continue
phy_n = self.phyAddr((x,f))
node = self.imm_nodes[phy_n]
imm_ext_edges_to = [y for y in node.edges_to if (y not in lp_phys)]
if ( len(imm_ext_edges_to) >= 1 and self.isBBHead((x,f)) ):
lpp.append(x)
return lpp
def findLoopheads(self):
self.imm_loopheads = {}
#loopheads = {}
loopheads = []
#self.loopheads = loopheads
loops_by_fs = self.loops_by_fs
for (f,p) in [(f,self.f_problems[f]) for f in self.f_problems]:
p.compute_preds()
p.do_loop_analysis()
l = p.loop_data
if p.loop_heads():
loops_by_fs[f] = {}
for x in p.loop_heads():
fun,_ = self.pNToFunGN((x,f))
#dodge halt
if fun in elfFile().deadend_funcs:
continue
loopheads.append((x, f))
#the 0 worker_id will get ignored by genLoopHeads.
#FIXME: do this properly..
loops_by_fs[f][x] = (2**30,'dummy',0)
assert loopheads
for p_nf in loopheads:
p_n, f = p_nf
p = self.f_problems[f]
ll = p.loop_data[p_n][1]
z = self.findLoopEntries(ll, f)
#map from potential heads -> head, hack around chronos 'feature'
for q in z:
assert q not in self.imm_loopheads, 'one addr cannot have >1 loopcounts !'
self.imm_loopheads[self.phyAddr((q,f))] = p_nf
return
def firstRealNodes(self,p_nf,visited = None,may_multi=False,may_call=False,skip_ret=False):
"""
Locate the first real node from, and including, p_addr,
or branch targets if it hits a branch before that.
Returns a list of p_nf
"""
elf_fun = self.elf_fun
p_n,f = p_nf
next_p_nf = p_nf
ret = []
if visited == None:
#print 'fRN on p_n %d, fun: %s' % (p_n,f)
visited = []
if p_nf in visited:
return []
visited.append(p_nf)
assert self.pf_deadends != None
while True:
if self.isRealNode(next_p_nf):
return [next_p_nf]
next_p_n , next_f, next_p = self.unpackPNF(next_p_nf)
if ( next_p_n == 'Ret' and f == self.name):
return [('Ret',f)]
elif next_p_n == 'Ret':
if skip_ret:
return []
assert False,'firstRealNodes reached Ret when skip_ret is False'
p_node, edges = self.pNodeConts(next_p_nf, may_call=may_call)
if edges == []:
return []
assert (edges)
if len(edges) > 1:
assert may_multi
for p_e in edges:
for ee in self.firstRealNodes(p_e ,visited = list(visited),may_multi=may_multi,may_call=may_call,skip_ret=skip_ret):
ret.append(ee)
return ret
else:
next_p_nf = edges[0]
#function p_n belongs to, g_n
def pNToFunGN(self,p_nf):
p_n,f,p = self.unpackPNF(p_nf)
tag = p.node_tags[p_n]
_, x = tag
f_name, g_n = x
return f_name,g_n
#given p_n is an imm call, return is_taillcall
def isCallTailCall(self,p_nf):
# suc = p_n_cs[0]
g_n = self.phyAddr(p_nf)
return elf_parser.isDirectBranch(g_n)
def isStraightToRetToRoot(self,p_nf):
p_n,f,p = self.unpackPNF(p_nf)
if p_n == 'Ret' and f == self.name:
return True
elif p_n == 'Ret':
return False
if self.isRealNode(p_nf):
return False
if self.phyAddr(p_nf)=='RetToCaller':
return False
elif type(p_n) == int:
_,pf_conts = self.pNodeConts(p_nf)
p_conts = [x[0] for x in pf_conts]
if len(p_conts) == 1:
return self.isStraightToRetToRoot((p_conts[0],f))
return False
#whether the corresponding imm has a return edge
def isImmRootReturn(self,p_nf):
p_n,f = p_nf
if f != self.name :
return False
_, pf_conts = self.pNodeConts(p_nf)
for x in pf_conts:
if self.isStraightToRetToRoot(x):
return True
return False
#whether p_n leads straightly to RetToCaller
def isStraightToRetToCaller(self,p_nf):
p_n,f = p_nf
if p_n == 'Ret':
if f != self.name:
return True
else:
return False
if self.isRealNode(p_nf):
return False
if self.phyAddr(p_nf)=="RetToCaller":
return True
elif type(p_n) == int:
_,pf_conts = self.pNodeConts(p_nf)
p_conts = [x[0] for x in pf_conts]
if len(p_conts) == 1:
return self.isStraightToRetToCaller((p_conts[0],f))
return False
#All return except the root one
def isImmRetToCaller(self,p_nf):
g_n = self.phyAddr(p_nf)
p_n,f,p = self.unpackPNF(p_nf)
if isCall(p.nodes[p_n]):
return False
p_node,pf_conts = self.pNodeConts(p_nf)
p_conts = [x[0] for x in pf_conts]
conts = [x for x in p_conts if type(p_n) == int]
#print ' p_n %s p_conts %s' % (p_n,p_conts)
n_rtc = 0
assert self.phyAddr(p_nf) == g_n
for pf_cont in pf_conts:
cont_n,cont_f = pf_cont
if not isCall(self.f_problems[cont_f].nodes[cont_n]):
if self.isStraightToRetToCaller(pf_cont):
ret = (pf_cont)
n_rtc += 1
if not ( n_rtc <= 1):
#print 'p_n %s g_n %s: n_rtc %s' % (p_n, self.phyAddr(p_n), n_rtc)
assert False
if n_rtc > 0:
return ret
return False
def funName(self,p_nf):
p_n,f = p_nf
fname = self.f_problems[f].nodes[p_n].fname
if '.' in fname:
#print 'f: %s' % fname
s = []
for c in fname:
if c == '.':
s.append('_')
else:
s.append(c)
return ''.join(s)
return fname
def makeProblem(self,f):
p = problem.Problem(None, 'Functions (%s)' % f)
p.add_entry_function(self.asm_fs[f], 'ASM')
p.do_analysis()
return p
def isSpecInsFunc(self,f):
"""
Returns whether f is the name of a special function
used to model special instruction
"""
return f.startswith ("instruction'")
def makeBinGraph(self):
"""
Prepare problems for all functions transitively called by self,
and turn this into a binary CFG
"""
self.f_problems = {}
if self.name not in elfFile().tcg:
print elfFile().tcg.keys()
tc_fs = list(elfFile().tcg[self.name])
for f in tc_fs + [self.name]:
assert '.' not in f
if self.isSpecInsFunc(f):
continue
p = problem.Problem(None, 'Functions (%s)' % f)
p.add_entry_function(self.asm_fs[f], 'ASM')
self.f_problems[f] = p
#print 'f %s, p.nodes: %d' % (f,len(p.nodes) )
#get its entry
assert len(p.entries) == 1
self.p_entries[f] = p.entries[0][0]
print 'all problems generated'
self.findAllDeadends()
print "all deadends found"
#now generate the bin graph
for f,p in self.f_problems.iteritems():
for p_n in p.nodes:
if type(p_n) != int:
continue
p_nf = (p_n,f)
if p_nf in self.pf_deadends:
continue
if self.isRealNode(p_nf):
#print 'adding: %s' % str(p_nf)
self.addImmNode(p_nf)
self.imm_entry = self.phyAddr(self.firstRealNodes((self.p_entries[self.name], self.name ))[0])
#print 'self.imm_entry %x' % self.imm_entry
self.bbs = findBBs(self.imm_entry,self)
def findAllDeadends(self):
self.pf_deadends = []
pf_deadends = self.pf_deadends
self.deadend_g_ns = set()
#Halt is a deadend function, and should never be called, it's equivalent to Err for our purpose
for dead_f in elfFile().deadend_funcs:
print 'dead_f %s' % dead_f
deadend_f_g_n = elfFile().funcs[dead_f].addr
self.deadend_g_ns.add (deadend_f_g_n)
print 'deadend_f_g_n 0x%x' % deadend_f_g_n
for (f,p) in self.f_problems.iteritems():
for p_n in p.nodes:
if self.isDeadend((p_n,f)):
pf_deadends.append((p_n,f))
def isDeadend(self,p_nf,visited=None):
'''
Determine if p_nf (p_n, function) is a deadend node
'''
if p_nf in self.pf_deadends:
return True
p_n, f, p = self.unpackPNF(p_nf)
if visited == None:
visited = []
if p_n == 'Err':
return True
if p_n == 'Ret':
return False
if p_nf in visited:
return True
if isCall(p.nodes[p_n]):
#walk into the callee problem
f = self.funName(p_nf)
#FIXME: dodge dummy functions
if 'instruction' in f:
return False
if f in elfFile().deadend_funcs:
return True
p_callee = self.f_problems[f]
assert len(p_callee.entries) == 1
p_callee_n = p_callee.entries[0][0]
return self.isDeadend((p_callee_n,f),visited=visited + [p_nf])
if type(p_n) == int and self.phyAddr(p_nf) == 'RetToCaller':
return False
g_n = self.phyAddr(p_nf)
if g_n in self.deadend_g_ns:
return True
#note: pNodeConts ensures we stay in the same problem
node,fconts = self.pNodeConts(p_nf)
conts = [ x[0] for x in fconts]
for p_c in conts:
assert p_c != p_n
if not self.isDeadend( (p_c,f), visited = visited + [p_nf]):
return False
#all ends are dead, thus deadend
return True
def unpackPNF(self,p_nf):
p_n,f = p_nf
p = self.f_problems[f]
return (p_n,f,p)
def phyAddr (self,p_nf) :
p_n, f , p = self.unpackPNF(p_nf)
if not isinstance(p_n,int):
return p_n
_,x = p.node_tags[p_n]
if x == 'LoopReturn':
return 'LoopReturn'
try:
f_name,g_addr = x
except:
print f
print 'tags: %s'% str(p.node_tags[p_n])
assert False
return g_addr
#must not reach Ret
def pNodeConts(self, p_nf, no_deadends=False, may_call = False):
p_n,f, p = self.unpackPNF(p_nf)
p_node = p.nodes[p_n]
if isCall(p_node):
assert may_call
fun_called = self.funName(p_nf)
p = self.f_problems[fun_called]
entry = self.p_entries[fun_called]
pf_conts = [(entry,fun_called)]
return p_node, pf_conts
assert p_n != 'Ret'
p_conts = filter(lambda x: x != 'Err', p_node.get_conts())
if no_deadends:
p_conts = filter(lambda x: (x, p_i) not in pi_deadends, p_conts)
pf_conts = [(x , f) for x in p_conts]
return p_node,pf_conts
def isRealNode(self,p_nf):
p_n,f = p_nf
if p_n == 'Ret':
return False
g_n = self.phyAddr(p_nf)
if g_n == 'RetToCaller':
return False
elif self.isLoopReturn(p_nf):
return False
elif type(g_n) != int:
print 'g_n %s' % str(g_n)
assert False, 'g_n expected of typ int'
#elif g_n % 4 == 0 and not self.isLoopReturn(p_nf):
elif g_n % 4 == 0:
assert not self.isLoopReturn(p_nf)
return True
else:
return False
def isLoopReturn(self,p_nf):
p_n,f = p_nf
p = self.f_problems[f]
tag = p.node_tags[p_n]
return tag[1] == 'LoopReturn'
def addImmNode(self,p_nf):
imm_nodes = self.imm_nodes
g_n = self.phyAddr(p_nf)
p_node,pf_conts = self.pNodeConts(p_nf)
p_conts = [x[0] for x in pf_conts]
p_n,f,p = self.unpackPNF(p_nf)
#print "adding imm_node p_n: %s f: %s" % (p_n,f)
if g_n in imm_nodes:
#we have been here before
node = imm_nodes[g_n]
else:
node = immNode(g_n,rawVals(g_n))
imm_nodes[g_n] = node
dont_emit = []
p_imm_return_to_caller_edge = self.isImmRetToCaller(p_nf)
call_pn = self.getCallTarg(p_nf)
if call_pn:
fun_called = self.funName((call_pn, f))
if self.isSpecInsFunc(fun_called):
#Hack: go straight to the return node, do nothing else
next_addrs = p.nodes[call_pn].get_conts()
assert len(next_addrs) == 1
next_addr = next_addrs[0]
assert next_addr not in ['Ret','Err']
phy_next_addr = self.phyAddr((next_addr,f))
i_e = immEdge(phy_next_addr, emit = True)
node.addEdge(i_e)
return
imm_call = self.parseImmCall(p_nf)
assert not p_imm_return_to_caller_edge
g_call_targ,g_ret_addr,is_tail_call = imm_call
dont_emit.append(g_call_targ)
node.addCallRetEdges(g_call_targ, g_ret_addr,is_tail_call)
elif p_imm_return_to_caller_edge or self.isImmRootReturn(p_nf):
node.addRetEdge()
#add edges to the imm node,ingore Err and halt
for p_targ in p_conts:
if type(p_targ) == int and (p_targ, f) not in self.pf_deadends:
if p_targ == 'Ret':
continue
edges = self.firstRealNodes((p_targ,f),may_multi=True,may_call=True,skip_ret=True)
for p_e in edges :
#dodge halt
if (p_e) in self.pf_deadends:
continue
g_e = self.phyAddr(p_e)
assert g_e != None
if g_e == 'Ret':
continue
assert g_e != 'Ret'
i_e = immEdge(g_e,emit = g_e not in dont_emit)
node.addEdge(i_e)
def retPF(self,call_p_nf):
p_n,f,p = self.unpackPNF(call_p_nf)
assert len(p.nodes[p_n].get_conts()) == 1
return ( (p.nodes[p_n].get_conts())[0] , f)
def getCallTarg(self, p_nf):
p_n,f,p = self.unpackPNF(p_nf)
_, pf_conts = self.pNodeConts(p_nf)
p_conts = map(lambda x: x[0],pf_conts)
#is Imm call iff there is a successor of kind Call in the g graph
p_n_cs = filter(lambda p_n_c:
type(p_n_c) == int
and not self.isLoopReturn(( p_n_c, f))
and isCall(self.gNode((p_n_c,f)))
, p_conts)
if not p_n_cs:
return None
assert len(p_n_cs) == 1
#return the p_n of the call node
return p_n_cs[0]
def parseImmCall(self,p_nf):
"""
Returns (entry point to the called function, return addr, is_tailcall)
"""
call_pn = self.getCallTarg(p_nf)
assert call_pn != None
p_n,f,p = self.unpackPNF(p_nf)
#print "p_n: %s, f: %s" % (p_n,f)
p_nodes = p.nodes
#find the return addr
#print "call_pn = %d" % call_pn
suc = self.firstRealNodes( (call_pn, f) ,may_multi=False,may_call=True)
pf_call_targ = suc[0]
g_call_targ = self.phyAddr(pf_call_targ)
#locate the call return address
f_caller, _ = self.pNToFunGN(p_nf)
is_tailcall = self.isCallTailCall(p_nf)
if not is_tailcall:
#return the return addr
phy_ret_addr = self.phyAddr(self.retPF((call_pn,f)))
else:
phy_ret_addr = None
assert type(phy_ret_addr) == int or is_tailcall, "g_call_targ %s phy_ret_addr %s" % (g_call_targ, phy_ret_addr)
#print 'call detected: phy_ret_addr %x' % phy_ret_addr
return (g_call_targ, phy_ret_addr,is_tailcall)
def gNode(self,p_nf):
p_n,f,p = self.unpackPNF(p_nf)
tag = p.node_tags[p_n]
f = tag[1][0]
g_n = tag[1][1]
return self.asm_fs[f].nodes[g_n]
| [] |
Etherbay/Gelatin | Gelatin/parser/Parser.py | d2afa85a48034d6ee34580e49e16542f31ad208e | # Copyright (c) 2010-2017 Samuel Abels
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import codecs
from simpleparse import parser
from .Newline import Newline
from .Indent import Indent
from .Dedent import Dedent
from .util import error
_ebnf_file = os.path.join(os.path.dirname(__file__), 'syntax.ebnf')
with open(_ebnf_file) as _thefile:
_ebnf = _thefile.read()
class Parser(parser.Parser):
def __init__(self):
self.indent = 0
offside = (
("NEWLINE", Newline(self).table()),
("INDENT", Indent(self).table()),
("DEDENT", Dedent(self).table()),
)
parser.Parser.__init__(self, _ebnf, 'root', prebuilts=offside)
def parse_string(self, input, compiler):
compiler.reset()
start, _, end = parser.Parser.parse(self, input, processor=compiler)
if end < len(input):
error(input, end)
if 'input' not in compiler.context.grammars:
error(input, end, 'Required grammar "input" not found.')
return compiler.context
def parse(self, filename, compiler, encoding='utf8'):
with codecs.open(filename, 'r', encoding=encoding) as input_file:
string = input_file.read()
return self.parse_string(string, compiler)
| [((1285, 1310), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1300, 1310), False, 'import os\n'), ((1650, 1712), 'simpleparse.parser.Parser.__init__', 'parser.Parser.__init__', (['self', '_ebnf', '"""root"""'], {'prebuilts': 'offside'}), "(self, _ebnf, 'root', prebuilts=offside)\n", (1672, 1712), False, 'from simpleparse import parser\n'), ((1808, 1860), 'simpleparse.parser.Parser.parse', 'parser.Parser.parse', (['self', 'input'], {'processor': 'compiler'}), '(self, input, processor=compiler)\n', (1827, 1860), False, 'from simpleparse import parser\n'), ((2146, 2191), 'codecs.open', 'codecs.open', (['filename', '"""r"""'], {'encoding': 'encoding'}), "(filename, 'r', encoding=encoding)\n", (2157, 2191), False, 'import codecs\n')] |
dirchev/Python-101-Forever-1 | C03-Unit-Testing/21-C03V15/utils.py | 13c3bb182747aae244ae6f9fd6f79c8223f3e9a6 | BIG_CONSTANT = "YES"
def group_by(xs, grouper):
groups = {}
for x in xs:
group = grouper(x)
if group not in groups:
groups[group] = []
groups[group].append(x)
return groups
print(group_by([1, 2, 3, 4, 5, 6], lambda x: "even" if x % 2 == 0 else "odd"))
| [] |
streamsets/datacollector-tests-external | pipeline/test_sftp_to_s3.py | 6f255b5e7496deeef333b57a5e9df4911ba3ef00 | # Copyright 2019 StreamSets Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
import json
import logging
import os
import string
import time
from streamsets.sdk.models import Configuration
from streamsets.testframework.markers import aws, sftp, sdc_min_version
from streamsets.testframework.utils import get_random_string
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
# Sandbox prefix for S3 bucket
S3_BUCKET_PREFIX = 'sftp_upload'
@sdc_min_version('3.8.2')
@sftp
@aws('s3')
def test_sftp_origin_whole_file_to_s3(sdc_builder, sdc_executor, sftp, aws):
"""
This is a test for SDC-11273. First, it creates a large (~6MB) file and puts it on the SFTP server.
Then, it creates a pipeline with SFTP origin and S3 destination, with whole file format, and runs
until the single record (file) is complete. Then, it asserts the S3 bucket contents are correct.
It passes only if the new option ("Disable Read Ahead Stream") is enabled.
"""
sftp_file_name = get_random_string(string.ascii_letters, 10) + '.txt'
raw_text_data = get_random_string(string.printable, 6000000)
sftp.put_string(os.path.join(sftp.path, sftp_file_name), raw_text_data)
s3_bucket = aws.s3_bucket_name
s3_key = f'{S3_BUCKET_PREFIX}/{sftp_file_name}'
# Build the pipeline
builder = sdc_builder.get_pipeline_builder()
sftp_ftp_client = builder.add_stage(name='com_streamsets_pipeline_stage_origin_remote_RemoteDownloadDSource')
sftp_ftp_client.file_name_pattern = sftp_file_name
sftp_ftp_client.data_format = 'WHOLE_FILE'
sftp_ftp_client.set_attributes(disable_read_ahead_stream=True)
s3_destination = builder.add_stage('Amazon S3', type='destination')
s3_destination.file_name_expression = "${record:value('/fileInfo/filename')}"
s3_destination.set_attributes(bucket=s3_bucket, data_format='WHOLE_FILE', partition_prefix=s3_key)
sftp_ftp_client >> s3_destination
sftp_to_s3_pipeline = builder.build(title='SFTP to S3 Whole File').configure_for_environment(aws).configure_for_environment(sftp)
sdc_executor.add_pipeline(sftp_to_s3_pipeline)
client = aws.s3
try:
# start pipeline and run for one record (the file)
sdc_executor.start_pipeline(sftp_to_s3_pipeline).wait_for_pipeline_output_records_count(1)
sdc_executor.stop_pipeline(sftp_to_s3_pipeline)
# assert record count to S3 the size of the objects put
list_s3_objs = client.list_objects_v2(Bucket=s3_bucket, Prefix=s3_key)
assert len(list_s3_objs['Contents']) == 1
# read data from S3 to assert contents
s3_contents = [client.get_object(Bucket=s3_bucket, Key=s3_content['Key'])['Body'].read().decode().strip()
for s3_content in list_s3_objs['Contents']]
# compare the S3 bucket contents against the original whole file contents
assert s3_contents[0] == raw_text_data
finally:
delete_keys = {'Objects': [{'Key': k['Key']}
for k in client.list_objects_v2(Bucket=s3_bucket, Prefix=s3_key)['Contents']]}
client.delete_objects(Bucket=s3_bucket, Delete=delete_keys)
| [((865, 892), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (882, 892), False, 'import logging\n'), ((991, 1015), 'streamsets.testframework.markers.sdc_min_version', 'sdc_min_version', (['"""3.8.2"""'], {}), "('3.8.2')\n", (1006, 1015), False, 'from streamsets.testframework.markers import aws, sftp, sdc_min_version\n'), ((1023, 1032), 'streamsets.testframework.markers.aws', 'aws', (['"""s3"""'], {}), "('s3')\n", (1026, 1032), False, 'from streamsets.testframework.markers import aws, sftp, sdc_min_version\n'), ((1624, 1668), 'streamsets.testframework.utils.get_random_string', 'get_random_string', (['string.printable', '(6000000)'], {}), '(string.printable, 6000000)\n', (1641, 1668), False, 'from streamsets.testframework.utils import get_random_string\n'), ((1551, 1594), 'streamsets.testframework.utils.get_random_string', 'get_random_string', (['string.ascii_letters', '(10)'], {}), '(string.ascii_letters, 10)\n', (1568, 1594), False, 'from streamsets.testframework.utils import get_random_string\n'), ((1689, 1728), 'os.path.join', 'os.path.join', (['sftp.path', 'sftp_file_name'], {}), '(sftp.path, sftp_file_name)\n', (1701, 1728), False, 'import os\n')] |
NoahRJohnson/terra | terra/tests/__init__.py | 131954ee42fb5905ceff35101e34d89c5eb6de6c | import os
# Use this as a package level setup
def load_tests(loader, standard_tests, pattern):
if os.environ.get('TERRA_UNITTEST', None) != "1":
print('WARNING: Running terra tests without setting TERRA_UNITTEST will '
'result in side effects such as extraneouse log files being '
'generated')
this_dir = os.path.dirname(__file__)
package_tests = loader.discover(start_dir=this_dir, pattern=pattern)
standard_tests.addTests(package_tests)
# Run this test last, to make sure none of the other tests degrated the
# integrity of terra. A configured terra can cause unittests to interfere
# with each other
loader.testMethodPrefix = 'last'
package_tests = loader.discover(start_dir=this_dir, pattern=pattern)
standard_tests.addTests(package_tests)
# This does not check THIS file for 'last', I can't figure that out, cause
# it is "discovered" before load_tests is ever called
return standard_tests
| [((336, 361), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (351, 361), False, 'import os\n'), ((102, 140), 'os.environ.get', 'os.environ.get', (['"""TERRA_UNITTEST"""', 'None'], {}), "('TERRA_UNITTEST', None)\n", (116, 140), False, 'import os\n')] |
benburrill/formiko | icons/svg2png.py | 86630506c537f9517666d9b0d5b2a905e7385b01 | # -*- coding: utf-8 -*-
from gi.repository.GdkPixbuf import Pixbuf
from os import makedirs
def main():
for size in (16, 22, 24, 32, 48, 64, 128, 256, 512):
icon = Pixbuf.new_from_file_at_scale("formiko.svg", size, size, True)
makedirs("%dx%d" % (size, size))
icon.savev("%dx%d/formiko.png" % (size, size), "png", [], [])
if __name__ == "__main__":
main()
| [((178, 240), 'gi.repository.GdkPixbuf.Pixbuf.new_from_file_at_scale', 'Pixbuf.new_from_file_at_scale', (['"""formiko.svg"""', 'size', 'size', '(True)'], {}), "('formiko.svg', size, size, True)\n", (207, 240), False, 'from gi.repository.GdkPixbuf import Pixbuf\n'), ((249, 281), 'os.makedirs', 'makedirs', (["('%dx%d' % (size, size))"], {}), "('%dx%d' % (size, size))\n", (257, 281), False, 'from os import makedirs\n')] |
AngelOnFira/megagame-controller | django/currencies/migrations/0003_auto_20211121_0701.py | 033fec84babf80ffd0868a0f7d946ac4c18b061c | # Generated by Django 3.2.8 on 2021-11-21 12:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("currencies", "0002_initial"),
]
operations = [
migrations.AddField(
model_name="payment",
name="completed",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="payment",
name="completion_amount",
field=models.IntegerField(default=0),
),
]
| [((330, 364), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (349, 364), False, 'from django.db import migrations, models\n'), ((496, 526), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (515, 526), False, 'from django.db import migrations, models\n')] |
tinkoff-ai/etna-ts | etna/transforms/decomposition/trend.py | ded5161ed49f5c2697778825f899842ee30c6c61 | from typing import Optional
import pandas as pd
from ruptures import Binseg
from ruptures.base import BaseCost
from sklearn.linear_model import LinearRegression
from etna.transforms.base import PerSegmentWrapper
from etna.transforms.decomposition.change_points_trend import BaseEstimator
from etna.transforms.decomposition.change_points_trend import TDetrendModel
from etna.transforms.decomposition.change_points_trend import _OneSegmentChangePointsTrendTransform
class _OneSegmentTrendTransform(_OneSegmentChangePointsTrendTransform):
"""_OneSegmentTrendTransform adds trend as a feature."""
def __init__(
self,
in_column: str,
out_column: str,
change_point_model: BaseEstimator,
detrend_model: TDetrendModel,
**change_point_model_predict_params,
):
"""Init _OneSegmentTrendTransform.
Parameters
----------
in_column:
name of column to apply transform to
out_column:
name of added column
change_point_model:
model to get trend change points
detrend_model:
model to get trend from data
change_point_model_predict_params:
params for change_point_model predict method
"""
self.out_column = out_column
super().__init__(
in_column=in_column,
change_point_model=change_point_model,
detrend_model=detrend_model,
**change_point_model_predict_params,
)
def transform(self, df: pd.DataFrame) -> pd.DataFrame:
"""Add column with trend, got from the detrend_model.
Parameters
----------
df:
data to get trend from
Returns
-------
pd.DataFrame:
df with trend column
"""
df._is_copy = False
series = df[self.in_column]
trend_series = self._predict_per_interval_model(series=series)
df[self.out_column] = trend_series
return df
def inverse_transform(self, df: pd.DataFrame) -> pd.DataFrame:
"""Inverse transform dataframe.
Parameters
----------
df:
one segment dataframe
Returns
-------
pd.DataFrame:
given dataframe
"""
return df
class _TrendTransform(PerSegmentWrapper):
"""_TrendTransform adds trend as a feature. Creates column 'regressor_<in_column>_trend'."""
def __init__(
self,
in_column: str,
out_column: str,
change_point_model: BaseEstimator,
detrend_model: TDetrendModel,
**change_point_model_predict_params,
):
"""Init _TrendTransform.
Parameters
----------
in_column:
name of column to apply transform to
out_column:
name of added column
change_point_model:
model to get trend change points
detrend_model:
model to get trend in data
change_point_model_predict_params:
params for change_point_model predict method
"""
super().__init__(
transform=_OneSegmentTrendTransform(
in_column=in_column,
out_column=out_column,
change_point_model=change_point_model,
detrend_model=detrend_model,
**change_point_model_predict_params,
)
)
class TrendTransform(_TrendTransform):
"""TrendTransform adds trend as a feature.
TrendTransform uses Binseg model as a change point detection model in _TrendTransform.
"""
def __init__(
self,
in_column: str,
out_column: Optional[str] = None,
detrend_model: TDetrendModel = LinearRegression(),
model: str = "ar",
custom_cost: Optional[BaseCost] = None,
min_size: int = 2,
jump: int = 1,
n_bkps: int = 5,
pen: Optional[float] = None,
epsilon: Optional[float] = None,
):
"""Init TrendTransform.
Parameters
----------
in_column:
name of column to apply transform to
out_column:
name of added column. Don't forget to add regressor prefix if necessary.
If not given, use 'regressor_{self.__repr__()}'
detrend_model:
model to get trend in data
model:
binseg segment model, ["l1", "l2", "rbf",...]. Not used if 'custom_cost' is not None.
custom_cost:
binseg custom cost function
min_size:
minimum segment length necessary to decide it is a stable trend segment
jump:
jump value can speed up computations: if jump==k, the algo will use every k-th value for change points search.
n_bkps:
number of change points to find
pen:
penalty value (>0)
epsilon:
reconstruction budget (>0)
"""
self.in_column = in_column
self.out_column = out_column
self.detrend_model = detrend_model
self.model = model
self.custom_cost = custom_cost
self.min_size = min_size
self.jump = jump
self.n_bkps = n_bkps
self.pen = pen
self.epsilon = epsilon
super().__init__(
in_column=self.in_column,
out_column=self.out_column if self.out_column is not None else f"regressor_{self.__repr__()}",
change_point_model=Binseg(
model=self.model, custom_cost=self.custom_cost, min_size=self.min_size, jump=self.jump
),
detrend_model=self.detrend_model,
n_bkps=self.n_bkps,
pen=self.pen,
epsilon=self.epsilon,
)
| [((3774, 3792), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {}), '()\n', (3790, 3792), False, 'from sklearn.linear_model import LinearRegression\n'), ((5504, 5603), 'ruptures.Binseg', 'Binseg', ([], {'model': 'self.model', 'custom_cost': 'self.custom_cost', 'min_size': 'self.min_size', 'jump': 'self.jump'}), '(model=self.model, custom_cost=self.custom_cost, min_size=self.\n min_size, jump=self.jump)\n', (5510, 5603), False, 'from ruptures import Binseg\n')] |
schwehr/argopy | argopy/tests/test_fetchers_facade_index.py | 1b35d5cfb87b2f9ccd2ca45b9987a614edd30700 | import xarray as xr
import pytest
import warnings
import argopy
from argopy import IndexFetcher as ArgoIndexFetcher
from argopy.errors import InvalidFetcherAccessPoint, InvalidFetcher, ErddapServerError, DataNotFound
from . import (
AVAILABLE_INDEX_SOURCES,
requires_fetcher_index,
requires_connected_erddap_index,
requires_localftp_index,
requires_connection,
safe_to_server_errors
)
class Test_Facade:
src = list(AVAILABLE_INDEX_SOURCES.keys())[0]
def test_invalid_fetcher(self):
with pytest.raises(InvalidFetcher):
ArgoIndexFetcher(src="invalid_fetcher").to_xarray()
@requires_fetcher_index
def test_invalid_accesspoint(self):
# Use the first valid data source
with pytest.raises(InvalidFetcherAccessPoint):
ArgoIndexFetcher(
src=self.src
).invalid_accesspoint.to_xarray() # Can't get data if access point not defined first
with pytest.raises(InvalidFetcherAccessPoint):
ArgoIndexFetcher(
src=self.src
).to_xarray() # Can't get data if access point not defined first
@requires_fetcher_index
def test_invalid_dataset(self):
with pytest.raises(ValueError):
ArgoIndexFetcher(src=self.src, ds='dummy_ds')
@requires_connection
@requires_fetcher_index
class Test_AllBackends:
""" Test main API facade for all available index fetching backends """
local_ftp = argopy.tutorial.open_dataset("localftp")[0]
# todo Determine the list of output format to test
# what else beyond .to_xarray() ?
fetcher_opts = {}
# Define API entry point options to tests:
# These should be available online and with the argopy-data dummy gdac ftp
args = {}
args["float"] = [[2901623], [6901929, 2901623]]
args["region"] = [
[-60, -40, 40.0, 60.0],
[-60, -40, 40.0, 60.0, "2007-08-01", "2007-09-01"],
]
args["profile"] = [[2901623, 2], [6901929, [5, 45]]]
def __test_float(self, bk, **ftc_opts):
""" Test float index fetching for a given backend """
for arg in self.args["float"]:
options = {**self.fetcher_opts, **ftc_opts}
f = ArgoIndexFetcher(src=bk, **options).float(arg)
assert isinstance(f.to_xarray(), xr.Dataset)
def __test_profile(self, bk, **ftc_opts):
""" Test profile index fetching for a given backend """
for arg in self.args["profile"]:
options = {**self.fetcher_opts, **ftc_opts}
f = ArgoIndexFetcher(src=bk, **options).profile(*arg)
assert isinstance(f.to_xarray(), xr.Dataset)
def __test_region(self, bk, **ftc_opts):
""" Test float index fetching for a given backend """
for arg in self.args["region"]:
options = {**self.fetcher_opts, **ftc_opts}
f = ArgoIndexFetcher(src=bk, **options).region(arg)
assert isinstance(f.to_xarray(), xr.Dataset)
@pytest.mark.skip(reason="Waiting for https://github.com/euroargodev/argopy/issues/16")
@requires_connected_erddap_index
@safe_to_server_errors
def test_float_erddap(self):
self.__test_float("erddap")
@requires_localftp_index
def test_float_localftp(self):
with argopy.set_options(local_ftp=self.local_ftp):
self.__test_float("localftp", index_file="ar_index_global_prof.txt")
@requires_localftp_index
def test_profile_localftp(self):
with argopy.set_options(local_ftp=self.local_ftp):
self.__test_profile("localftp", index_file="ar_index_global_prof.txt")
@pytest.mark.skip(reason="Waiting for https://github.com/euroargodev/argopy/issues/16")
@requires_connected_erddap_index
def test_region_erddap(self):
self.__test_region("erddap")
@requires_localftp_index
def test_region_localftp(self):
with argopy.set_options(local_ftp=self.local_ftp):
self.__test_region("localftp", index_file="ar_index_global_prof.txt")
| [((2985, 3076), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""Waiting for https://github.com/euroargodev/argopy/issues/16"""'}), "(reason=\n 'Waiting for https://github.com/euroargodev/argopy/issues/16')\n", (3001, 3076), False, 'import pytest\n'), ((3625, 3716), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""Waiting for https://github.com/euroargodev/argopy/issues/16"""'}), "(reason=\n 'Waiting for https://github.com/euroargodev/argopy/issues/16')\n", (3641, 3716), False, 'import pytest\n'), ((1469, 1509), 'argopy.tutorial.open_dataset', 'argopy.tutorial.open_dataset', (['"""localftp"""'], {}), "('localftp')\n", (1497, 1509), False, 'import argopy\n'), ((532, 561), 'pytest.raises', 'pytest.raises', (['InvalidFetcher'], {}), '(InvalidFetcher)\n', (545, 561), False, 'import pytest\n'), ((752, 792), 'pytest.raises', 'pytest.raises', (['InvalidFetcherAccessPoint'], {}), '(InvalidFetcherAccessPoint)\n', (765, 792), False, 'import pytest\n'), ((964, 1004), 'pytest.raises', 'pytest.raises', (['InvalidFetcherAccessPoint'], {}), '(InvalidFetcherAccessPoint)\n', (977, 1004), False, 'import pytest\n'), ((1221, 1246), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1234, 1246), False, 'import pytest\n'), ((1260, 1305), 'argopy.IndexFetcher', 'ArgoIndexFetcher', ([], {'src': 'self.src', 'ds': '"""dummy_ds"""'}), "(src=self.src, ds='dummy_ds')\n", (1276, 1305), True, 'from argopy import IndexFetcher as ArgoIndexFetcher\n'), ((3283, 3327), 'argopy.set_options', 'argopy.set_options', ([], {'local_ftp': 'self.local_ftp'}), '(local_ftp=self.local_ftp)\n', (3301, 3327), False, 'import argopy\n'), ((3490, 3534), 'argopy.set_options', 'argopy.set_options', ([], {'local_ftp': 'self.local_ftp'}), '(local_ftp=self.local_ftp)\n', (3508, 3534), False, 'import argopy\n'), ((3899, 3943), 'argopy.set_options', 'argopy.set_options', ([], {'local_ftp': 'self.local_ftp'}), '(local_ftp=self.local_ftp)\n', (3917, 3943), False, 'import argopy\n'), ((575, 614), 'argopy.IndexFetcher', 'ArgoIndexFetcher', ([], {'src': '"""invalid_fetcher"""'}), "(src='invalid_fetcher')\n", (591, 614), True, 'from argopy import IndexFetcher as ArgoIndexFetcher\n'), ((1018, 1048), 'argopy.IndexFetcher', 'ArgoIndexFetcher', ([], {'src': 'self.src'}), '(src=self.src)\n', (1034, 1048), True, 'from argopy import IndexFetcher as ArgoIndexFetcher\n'), ((2219, 2254), 'argopy.IndexFetcher', 'ArgoIndexFetcher', ([], {'src': 'bk'}), '(src=bk, **options)\n', (2235, 2254), True, 'from argopy import IndexFetcher as ArgoIndexFetcher\n'), ((2547, 2582), 'argopy.IndexFetcher', 'ArgoIndexFetcher', ([], {'src': 'bk'}), '(src=bk, **options)\n', (2563, 2582), True, 'from argopy import IndexFetcher as ArgoIndexFetcher\n'), ((2874, 2909), 'argopy.IndexFetcher', 'ArgoIndexFetcher', ([], {'src': 'bk'}), '(src=bk, **options)\n', (2890, 2909), True, 'from argopy import IndexFetcher as ArgoIndexFetcher\n'), ((806, 836), 'argopy.IndexFetcher', 'ArgoIndexFetcher', ([], {'src': 'self.src'}), '(src=self.src)\n', (822, 836), True, 'from argopy import IndexFetcher as ArgoIndexFetcher\n')] |
jatty/hass-acthor | custom_components/acthor/config_flow.py | 9d5aaed3f01e9288fef031b47b0808e6e80c22d3 | import voluptuous as vol
from homeassistant.config_entries import ConfigFlow
from homeassistant.const import CONF_HOST, CONF_NAME
from .acthor import test_connection
from .const import DEVICE_NAME, DOMAIN
class ACThorConfigFlow(ConfigFlow, domain=DOMAIN):
async def async_step_user(self, user_input: dict = None) -> dict:
errors = {}
if user_input is not None:
ok = await test_connection(user_input[CONF_HOST], timeout=5)
if ok:
return self.async_create_entry(
title=user_input[CONF_NAME],
data=user_input,
)
else:
errors["base"] = "connection_failed"
return self.async_show_form(
step_id="user",
data_schema=vol.Schema({
vol.Required(CONF_NAME, default=DEVICE_NAME): str,
vol.Required(CONF_HOST): str,
}),
errors=errors,
)
| [((818, 862), 'voluptuous.Required', 'vol.Required', (['CONF_NAME'], {'default': 'DEVICE_NAME'}), '(CONF_NAME, default=DEVICE_NAME)\n', (830, 862), True, 'import voluptuous as vol\n'), ((885, 908), 'voluptuous.Required', 'vol.Required', (['CONF_HOST'], {}), '(CONF_HOST)\n', (897, 908), True, 'import voluptuous as vol\n')] |
glauberm/doaj | doajtest/fixtures/common.py | dc24dfcbf4a9f02ce5c9b09b611a5766ea5742f7 | NOTES = {
'notes': [
{'date': '2014-05-22T00:00:00Z', 'note': 'Second Note'},
{'date': '2014-05-21T14:02:45Z', 'note': 'First Note'}
]
}
SUBJECT = {
"subject": ['HB1-3840', 'H']
}
OWNER = {
"owner": "Owner"
}
EDITORIAL = {
"editor_group": "editorgroup",
"editor": "associate"
}
SEAL = {
"doaj_seal": True,
}
| [] |
HEInventions/docnado | docnado/docnado.py | 8817d8a9856b4babd9a2f81678a9ef0b8a75d4bc | """ docnado.py
A rapid documentation tool that will blow you away.
"""
import os
import re
import sys
import csv
import glob
import time
import signal
import shutil
import urllib
import base64
import hashlib
import argparse
import tempfile
import datetime
import threading
import traceback
import subprocess
import platform
import requests
from bs4 import BeautifulSoup
from multiprocessing import Pool
from urllib.parse import urlparse
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
from xml.etree import ElementTree
from flask import Flask, url_for, abort, send_from_directory, \
render_template, Markup, make_response, render_template_string
import markdown
import markdown.util
from markdown.extensions import Extension
from markdown.postprocessors import Postprocessor
from markdown.inlinepatterns import LinkPattern, IMAGE_LINK_RE, dequote, handleAttributes
from markdown.blockprocessors import HashHeaderProcessor
from http.client import responses
if __package__:
from .navtree import NavItem, parse_nav_string
else:
from navtree import NavItem, parse_nav_string
class MultiPurposeLinkPattern(LinkPattern):
""" Embed image, video, youtube, csv or file download links
by extending the typical image tag pattern.
#  or 
If the link has "DOWNLOAD" in the alt text, treat it as a download.
Otherwise, see if its a YouTube video. Otherwise, see if its a
csv that can be turned into a table, otherwise if the link cannot be parsed
as a video, it will always be treated as an image.
"""
SUPPORTED_VIDEO = ('ogv', 'ogg', 'avi', 'mp4', 'webm', )
SUPPORTED_TABLES = ('csv', )
SUPPORTED_PDF = ('pdf', )
def get_src(self, m):
""" Get the source and parts from the matched groups: src, parts """
src_parts = m.group(9).split()
if src_parts:
src = src_parts[0]
if src[0] == "<" and src[-1] == ">":
src = src[1:-1]
return self.sanitize_url(self.unescape(src)), src_parts
else:
return '', src_parts
@staticmethod
def youtube_url_validation(url):
""" Given a YouTube URL, return the ID component.
https://stackoverflow.com/questions/4705996
"""
youtube_regex = (r'(https?://)?(www\.)?'
r'(youtube|youtu|youtube-nocookie)\.(com|be)/'
r'(watch\?v=|embed/|v/|.+\?v=)?([^&=%\?]{11})')
youtube_regex_match = re.match(youtube_regex, url)
return youtube_regex_match.group(6) if youtube_regex_match else None
@staticmethod
def as_youtube(m, video_id):
""" Return a DOM element that embeds a YouTube video. """
el = ElementTree.Element('iframe')
el.set('class', 'video')
el.set('src', f'https://www.youtube.com/embed/{video_id}?rel=0')
el.set('frameborder', '0')
el.set('allow', 'autoplay; encrypted-media')
el.set('allowfullscreen', '1')
return el
def as_pdf(self, m):
""" Return a DOM element that embeds a PDF document using an embed. """
src, parts = self.get_src(m)
wrapper = ElementTree.Element('aside')
wrapper.set('class', 'pdf-embed-wrapper')
el = ElementTree.SubElement(wrapper, 'embed')
el.set('class', 'pdf-embed')
el.set('src', src)
el.set('width', '100%')
el.set('type', 'application/pdf')
el.set('height', '100%') # width * 1.4142 (aspect ratio of a4)
el.set('pluginspage', 'http://www.adobe.com/products/acrobat/readstep2.html')
if len(parts) > 1:
el.set('alt', dequote(self.unescape(" ".join(parts[1:]))))
return wrapper
def as_video(self, m):
""" Return a video element """
src, parts = self.get_src(m)
el = ElementTree.Element('video')
el.set('src', src)
el.set("controls", "true")
handleAttributes(m.group(2), el)
return el
def as_image(self, m):
""" Return an image element """
el = ElementTree.Element('img')
src, parts = self.get_src(m)
el.set('src', src)
# Set the title if present.
if len(parts) > 1:
el.set('title', dequote(self.unescape(" ".join(parts[1:]))))
# Set the attributes on the element, if enabled.
# Set the 'alt' attribute with whatever is left from `handleAttributes`.
attrs = self.markdown.enable_attributes
alt_text = handleAttributes(m.group(2), el) if attrs else m.group(2)
el.set('alt', self.unescape(alt_text))
return el
def as_csv(self, m):
src, parts = self.get_src(m)
root = ElementTree.Element('table')
root.set('source', src)
root.set('class', 'csv-table table thead-light table-hover')
file_path = os.path.join(self.markdown.page_root, src)
with open(file_path, 'r', encoding='utf-8') as f:
reader = csv.reader(f)
headers = next(reader)
rows = [r for r in reader]
thead = ElementTree.SubElement(root, 'thead')
for col in headers:
ElementTree.SubElement(thead, 'th').text = col
for row in rows:
tr = ElementTree.SubElement(root, 'tr')
for col in row:
ElementTree.SubElement(tr, 'td').text = col
return root
def as_download(self, m):
""" Create card layers used to make a download button. """
src, parts = self.get_src(m)
# Returns a human readable string representation of bytes
def _human_size(byte_number, units=(' bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB')):
return str(byte_number) + units[0] if byte_number < 1024 else _human_size(byte_number >> 10, units[1:])
# Get information required for card.
split_src = os.path.split(src)
file_path = os.path.join(self.markdown.page_root, *split_src)
file_size = os.path.getsize(file_path)
file_basename = os.path.basename(file_path)
card_text = dequote(self.unescape(" ".join(parts[1:]))) if len(parts) > 1 else ''
# If its a pptx, extract the thumbnail previews.
# NOTE: This works, but is is removed until we support other
# file types, which for now is not a priority.
# preview_uri = None
# import zipfile
# if (file_path.endswith('pptx')):
# with zipfile.ZipFile(file_path) as zipper:
# with zipper.open('docProps/thumbnail.jpeg', 'r') as fp:
# mime = 'image/jpeg'
# data64 = base64.b64encode(fp.read()).decode('utf-8')
# preview_uri = u'data:%s;base64,%s' % (mime, data64)
# Card and structure.
card = ElementTree.Element("div")
card.set('class', 'card download-card')
header = ElementTree.SubElement(card, 'div')
header.set('class', 'download-card-header')
body = ElementTree.SubElement(card, 'div')
body.set('class', 'download-card-body')
# Add preview image.
# if preview_uri:
# img = ET.SubElement(header, 'img')
# img.set('src', preview_uri)
# Filename link heading.
heading = ElementTree.SubElement(body, 'a')
heading.set('class', 'download-card-title')
heading.set('href', src)
download_icon = ElementTree.SubElement(heading, 'i')
download_icon.set('class', 'fa fa-download')
download_text = ElementTree.SubElement(heading, 'span')
download_text.text = file_basename
# Title element from the "quote marks" part.
body_desc = ElementTree.SubElement(body, 'span')
body_desc.text = card_text
# File size span at the bottom.
body_size = ElementTree.SubElement(body, 'span')
body_size.set('class', 'small text-muted')
body_size.text = f'{_human_size(file_size)}'
return card
@staticmethod
def _is_inject(m):
""" Determine if the ALT text [] part of the link says 'INJECT'. """
alt = m.group(2)
return alt.lower() == 'inject'
def as_raw(self, m):
""" Load the HTML document specified in the link, parse it to HTML elements and return it.
"""
src, parts = self.get_src(m)
# Find the path to the HTML document, relative to the current markdown page.
file_path = os.path.join(self.markdown.page_root, src)
raw_html_string = read_html_for_injection(file_path)
if len(parts) < 2:
parts.append("nothing_one=1||nothing_two=2")
# Helper function.
def _argify(args):
if '=' not in args:
raise ValueError('injection template requires named arguments split by ||')
left, right = args.split('=')
return left.strip(), right.strip()
# Split arg string on double pipe. Joins them to undo automattic splitting from the markdown.
arg_strings = " ".join(parts[1:]).strip('\"').split("||")
# Parse into dictionary of key-value pairs based on the '=' notation.
try:
named_args = dict([_argify(args) for args in arg_strings])
except Exception as e:
raise Exception(f"Error parsing ![INJECT] arguments in {self.markdown.page_file} {repr(e)}")
# Take the template renderer and give it our string, and named args.
# Capture the output as a string.
try:
injectable_templated_str = render_template_string(raw_html_string, **named_args)
except Exception as e:
raise Exception(f"Error rendering ![INJECT] template for file {file_path} {repr(e)}")
# Feed that string to the XML parser.
try:
return ElementTree.fromstring(injectable_templated_str)
except Exception as e:
raise Exception(f"Error parsing ![INJECT] template for file {file_path} {repr(e)}")
@staticmethod
def _is_download(m):
""" Determine if the ALT text [] part of the link says 'DOWNLOAD'. """
alt = m.group(2)
return alt.lower() == 'download'
def handleMatch(self, m):
""" Use the URL extension to render the link. """
src, parts = self.get_src(m)
if self._is_download(m):
return self.as_download(m)
elif self._is_inject(m):
return self.as_raw(m)
youtube = self.youtube_url_validation(src)
if youtube:
return self.as_youtube(m, youtube)
src_lower = src.lower()
if src_lower.endswith(self.SUPPORTED_TABLES):
return self.as_csv(m)
elif src_lower.endswith(self.SUPPORTED_PDF):
return self.as_pdf(m)
elif src_lower.endswith(self.SUPPORTED_VIDEO):
return self.as_video(m)
return self.as_image(m)
class OffsetHashHeaderProcessor(HashHeaderProcessor):
""" Process hash headers with an offset to control the type of heading
DOM element that is generated. """
HEADING_LEVEL_OFFSET = 1
def run(self, parent, blocks):
block = blocks.pop(0)
m = self.RE.search(block)
if m:
before = block[:m.start()]
after = block[m.end():]
if before:
self.parser.parseBlocks(parent, [before])
heading_level = len(m.group('level'))
h = ElementTree.SubElement(parent, 'h%d' % (heading_level + self.HEADING_LEVEL_OFFSET))
h.text = m.group('header').strip()
if after:
blocks.insert(0, after)
class ChecklistPostprocessor(Postprocessor):
"""
Adds checklist class to list element.
Adapted from: `markdown_checklist.extension`
"""
pattern = re.compile(r'<li>\[([ Xx])\]')
def run(self, html):
html = re.sub(self.pattern, self._convert_checkbox, html)
before = '<ul>\n<li><input type="checkbox"'
after = before.replace('<ul>', '<ul class="checklist">')
html = html.replace(before, after)
return html
@staticmethod
def _convert_checkbox(match):
state = match.group(1)
checked = ' checked' if state != ' ' else ''
return '<li><input type="checkbox" disabled%s>' % checked
# Remove the `video`, `iframe`, `aside`, and `table` elements as block elements.
markdown.util.BLOCK_LEVEL_ELEMENTS = re.compile(
r"^(p|div|h[1-6]|blockquote|pre|dl|ol|ul"
r"|script|noscript|form|fieldset|math"
r"|hr|hr/|style|li|dt|dd|thead|tbody"
r"|tr|th|td|section|footer|header|group|figure"
r"|figcaption|article|canvas|output"
r"|progress|nav|main)$",
re.IGNORECASE
)
class MultiExtension(Extension):
""" Markdown `Extension` that adds our new components and
overrides some that we are not using.
"""
def extendMarkdown(self, md, md_globals):
""" Configure markdown by disabling elements and replacing them with
others. """
# Add checklist processing extension based on: 'markdown_checklist.extension'.
md.postprocessors.add('checklist', ChecklistPostprocessor(md), '>raw_html')
# Remove default patterns.
del md.inlinePatterns['image_link']
# Create a new one and insert into pipeline.
multi_purpose_pattern = MultiPurposeLinkPattern(IMAGE_LINK_RE, md)
md.inlinePatterns['multi_purpose_pattern'] = multi_purpose_pattern
# Remove line headers.
del md.parser.blockprocessors['setextheader']
# Swap hash headers for one that can change the DOM h1, h2 level.
md.parser.blockprocessors['hashheader'] = OffsetHashHeaderProcessor(md.parser)
# https://python-markdown.github.io/extensions/
mdextensions = [MultiExtension(),
'markdown.extensions.tables',
'markdown.extensions.meta',
'markdown.extensions.def_list',
'markdown.extensions.headerid',
'markdown.extensions.fenced_code',
'markdown.extensions.attr_list']
def build_meta_cache(root):
""" Recursively search for Markdown files and build a cache of `Meta`
from metadata in the Markdown.
:param root: str: The path to search for files from.
"""
doc_files = glob.iglob(root + '/**/*.md', recursive=True)
def _meta(path):
with open(path, 'r', encoding='utf-8') as f:
md = markdown.Markdown(extensions=mdextensions)
md.page_root = os.path.dirname(path)
Markup(md.convert(f.read()))
return md.Meta if hasattr(md, 'Meta') else None
doc_files_meta = {os.path.relpath(path, start=root): _meta(path) for path in doc_files}
doc_files_meta = {path: value for path, value in doc_files_meta.items() if value is not None}
# If a nav filter is set, exclude relevant documents.
# This takes the comma separated string supplied to `nav_limit`
# and excludes certain documents if they are NOT in this list.
global CMD_ARGS
if CMD_ARGS.nav_limit:
nav_filters = CMD_ARGS.nav_limit.split(',')
nav_filters = [nav_filter.strip().lower() for nav_filter in nav_filters]
nav_filters = [nav_filter for nav_filter in nav_filters if nav_filter]
def _should_include(doc_meta):
nav_strings = [nav.lower() for nav in doc_meta.get('nav', [])]
return any([y.startswith(x) for x in nav_filters for y in nav_strings])
doc_files_meta = {path: value for path, value in doc_files_meta.items() if _should_include(value)}
return doc_files_meta
def build_nav_menu(meta_cache):
""" Given a cache of Markdown `Meta` data, compile a structure that can be
used to generate the NAV menu.
This uses the `nav: Assembly>Bench>Part` variable at the top of the Markdown file.
"""
root = NavItem('root', 0)
# Pre-sort the nav-items alphabetically by nav-string. This will get overridden with the arange()
# function, but this avoids-un arranged items moving round between page refreshes due to Dicts being
# unordered.
sorted_meta_cache = sorted(
meta_cache.items(),
key = lambda items: items[1].get('nav', [''])[0].split('>')[-1] # Sort by the last part of the nav string for each page.
)
for path, meta in sorted_meta_cache:
nav_str = meta.get('nav', [None])[0]
nav_chunks = parse_nav_string(nav_str)
node = root
for name, weight in nav_chunks:
n = NavItem(name, weight)
node = node.add(n)
node.bind(meta=meta, link=path)
root.arrange()
return root
def build_reload_files_list(extra_dirs):
""" Given a list of directories, return a list of files to watch for modification
and subsequent server reload. """
extra_files = extra_dirs[:]
for extra_dir in extra_dirs:
for dirname, dirs, files in os.walk(extra_dir):
for filename in files:
filename = os.path.join(dirname, filename)
if os.path.isfile(filename):
extra_files.append(filename)
return extra_files
def read_html_for_injection(path):
""" Open an HTML file at the given path and return the contents
as a string. If the file does not exist, we raise an exception.
"""
# TODO: In the future, consider adding some caching here. However,
# beware of reloading / refereshing the page UX implications.
with open(path) as file:
return file.read()
def _render_markdown(file_path, **kwargs):
""" Given a `file_path` render the Markdown and return the result of `render_template`.
"""
global NAV_MENU, PROJECT_LOGO, PDF_GENERATION_ENABLED
default_template = 'document'
with open(file_path, 'r', encoding='utf-8') as f:
md = markdown.Markdown(extensions=mdextensions)
md.page_root = os.path.dirname(file_path)
md.page_file = file_path
markup = Markup(md.convert(f.read()))
# Fetch the template defined in the metadata.
template = md.Meta.get('template', None)
template = template[0] if template else default_template
if not template:
raise Exception('no template found for document')
template = f'{template}.html'
# Load any HTML to be injected from the meta-data.
injections = md.Meta.get('inject', [])
injections = [os.path.join(md.page_root, file) for file in injections]
injections = [read_html_for_injection(file) for file in injections]
# Render it out with all the prepared data.
return render_template(template,
content=markup,
nav_menu=NAV_MENU,
project_logo=PROJECT_LOGO,
pdf_enabled=PDF_GENERATION_ENABLED,
injections=injections,
**md.Meta,
**kwargs)
def configure_flask(app, root_dir):
""" Setup the flask application within this scope. """
@app.before_first_request
def build_navigation_cache():
""" Build an in-memory cache of document meta-data.
NOTE: The design choice is made to crash the application if any
of the markdown files cannot be opened and parsed. In the
future when it becomes more stable, this will probably change.
"""
# This is called each time the server restarts.
global NAV_MENU
meta_cache = build_meta_cache(root_dir)
# Build the nav menu data-structure.
NAV_MENU = build_nav_menu(meta_cache)
# Store the reference to the function that rebuilds the navigation cache.
app.build_navigation_cache = build_navigation_cache
@app.template_filter('gravatar')
def gravatar(email, size=100, rating='g', default='retro', use_ssl=False):
""" Return a gravatar link for a given email address. """
url = "https://secure.gravatar.com/avatar/" if use_ssl else "http://www.gravatar.com/avatar/"
email = email.strip().lower().encode('utf-8')
hash_email = hashlib.md5(email).hexdigest()
return f'{url}{hash_email}?s={size}&r={rating}&d={default}'
@app.template_filter()
def url_unquote(url):
""" Removes encoding around a URL. """
return urllib.parse.unquote(url)
@app.route('/favicon.ico')
def favicon():
return send_from_directory(os.path.join(app.root_path, 'static'),
'favicon.ico', mimetype='image/vnd.microsoft.icon')
@app.route("/print_header")
def print_header():
""" Render the template for the header used when printing with WKPDFTOHTML. """
global PROJECT_LOGO
return render_template('print_header.html', project_logo=PROJECT_LOGO)
@app.route("/print_footer")
def print_footer():
""" Render the template for the footer used when printing with WKPDFTOHTML. """
global PROJECT_LOGO
return render_template('print_footer.html', project_logo=PROJECT_LOGO)
@app.errorhandler(404)
def page_not_found(e):
global NAV_MENU, PROJECT_LOGO
return render_template('404.html', nav_menu=NAV_MENU, project_logo=PROJECT_LOGO), 404
@app.route("/w/<path:page>")
def wiki(page):
""" Render the page. """
file_path = os.path.abspath(os.path.join(root_dir, page))
if not os.path.isfile(file_path):
abort(404)
if '.md' in [ext.lower() for ext in os.path.splitext(file_path)]:
return _render_markdown(file_path, current_page=page)
else:
return send_from_directory(os.path.dirname(file_path), os.path.basename(file_path))
@app.route("/")
@app.route("/w/")
def homepage():
return wiki('home.md')
@app.route("/pdf/<path:page>")
def wiki_pdf(page):
file_path = os.path.abspath(os.path.join(root_dir, page))
if not os.path.isfile(file_path):
abort(404)
if '.md' not in [ext.lower() for ext in os.path.splitext(file_path)]:
return send_from_directory(os.path.dirname(file_path), os.path.basename(file_path))
# Configure the different paths.
pdf_temp = f'{tempfile.mktemp()}.pdf'
input_url = url_for('wiki', page=page, _external=True)
header_url = url_for('print_header', _external=True)
footer_url = url_for('print_footer', _external=True)
args = f'{WKHTMLTOPDF_BINARY} --header-html {header_url} --footer-html {footer_url} \
--print-media-type --header-spacing 2 {input_url} {pdf_temp}'
# Invoke WkHTMLtoPDF
result = subprocess.check_output(args, shell=True)
if not result:
pass
# Write the newly generated temp pdf into a response.
with open(pdf_temp, 'rb') as f:
binary_pdf = f.read()
target_file_name = page.replace("/", "_").replace("\\", "_")
response = make_response(binary_pdf)
response.headers['Content-Type'] = 'application/pdf'
# response.headers['Content-Disposition'] = f'attachment; filename={target_file_name}.pdf'
response.headers['Content-Disposition'] = f'inline; filename={target_file_name}.pdf'
# Delete the temp file and return the response.
os.remove(pdf_temp)
return response
def generate_static_pdf(app, root_dir, output_dir, nav_filter=None):
""" Generate a static PDF directory for the documentation in `root_dir`
into `output_dir`.
"""
global PORT_NUMBER
# Find all markdown document paths that are in the nav.
documents = build_meta_cache(root_dir)
markdown_docs_urls = ['pdf/' + file.replace('\\', '/') for file in documents.keys()]
# Generate URl to file pairs.
pairs = [(f'http://localhost:{PORT_NUMBER}/{url}',
f'{os.path.join(output_dir, *os.path.split(url))}.pdf')
for url in markdown_docs_urls]
# Download each pair.
for source, target in pairs:
os.makedirs(os.path.dirname(target), exist_ok=True)
print(f'Source: {source} \n Target: {target}')
urllib.request.urlretrieve(source, target)
# Helper function to return the domain if present.
def is_absolute(url):
""" Returns True if the passed url string is an absolute path.
False if not
"""
links = urlparse(url)
return bool(links.netloc)
def generate_static_html(app, root_dir, output_dir):
""" Generate a static HTML site for the documentation in `root_dir`
into `output_dir`.
"""
from flask_frozen import Freezer, MissingURLGeneratorWarning
import warnings
warnings.filterwarnings("ignore", category=MissingURLGeneratorWarning)
# Update the flask config.
app.config['FREEZER_RELATIVE_URLS'] = True
app.config['FREEZER_IGNORE_MIMETYPE_WARNINGS'] = True
app.config['FREEZER_DESTINATION'] = output_dir
# Create the freezer app. Make it use specific URLs.
freezer = Freezer(app, with_no_argument_rules=False, log_url_for=False)
# Register a generator that passes ALL files in the docs directory into the
# `wiki` flask route.
@freezer.register_generator
def wiki():
all_docs = [file.replace(f'{root_dir}', '/w').replace(f'{os.path.sep}', '/')
for file in glob.iglob(f'{root_dir}/**/*', recursive=True)
if os.path.isfile(file)]
for doc in all_docs:
yield doc
# Save all the URLs using the correct extension and MIME type.
freezer.freeze()
# For each `.md` file in the output directory:
for markdown_file in glob.iglob(f'{output_dir}/**/*.md', recursive=True):
# Rewrite all relative links to other `.md` files to `.html.`
output = ''
with open(markdown_file, 'r', encoding="utf-8") as f:
html = f.read()
def _href_replace(m):
href = m.group()
if is_absolute(href[6:-1]):
return href
return href.replace('.md', '.html')
output = re.sub('href="(.*md)"', _href_replace, html)
# Rename the file from `.md` to HTML.
with open(markdown_file[:-3] + '.html', 'w', encoding="utf-8") as f:
f.write(output)
# Delete the Markdown file.
os.remove(markdown_file)
def load_project_logo(logo_file=None):
""" Attempt to load the project logo from the specified path.
If this fails, return None. If this succeeds, convert it to a data-uri.
"""
if not logo_file:
return None
if not os.path.exists(logo_file):
return None
with open(logo_file, 'rb') as fp:
mime = 'image/png'
data64 = base64.b64encode(fp.read()).decode('utf-8')
preview_uri = u'data:%s;base64,%s' % (mime, data64)
return preview_uri
def check_pdf_generation_cap():
""" Check to see if we can use PDF generation by attempting to use the binary. """
global WKHTMLTOPDF_BINARY
retcode = subprocess.call(f'{WKHTMLTOPDF_BINARY} --version',
shell=True,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
return retcode == 0
def copy_local_project(force=False):
""" Copy the sample docs and style into the local working directory.
Note: This will overwrite anything currently in those folders.
"""
source_root = os.path.dirname(__file__)
target_root = os.getcwd()
targets = ['docs', 'style', 'logo.png']
pairs = [(os.path.join(source_root, path), os.path.join(target_root, path))
for path in targets]
for source, target in pairs:
if os.path.isdir(source):
if os.path.exists(target):
if force:
print(f'Deleting existing {target} and replacing it with {target}')
shutil.rmtree(target)
shutil.copytree(source, target)
else:
print(f'Warning: {target} already exists.')
else:
print(f'Copying: {source} -> {target}')
shutil.copytree(source, target)
else:
if os.path.exists(target):
if force:
print(f'Deleting existing {target} and replacing it with {target}')
os.remove(target)
shutil.copyfile(source, target)
else:
print(f'Warning: {target} already exists.')
else:
print(f'Copying: {source} -> {target}')
shutil.copyfile(source, target)
def find_references(document_path):
""" Search through the markdown 'document_path' and make a list of referenced files
with paths that are relative to the directory containing the `document_path`.
"""
# Open the file to search.
with open(document_path, 'r', encoding='utf-8') as f:
markdown_raw_data = f.read()
# Render as HTML.
md = markdown.Markdown(extensions=mdextensions)
document_dir = os.path.dirname(document_path)
md.page_root = document_dir
# Interpret with the BeautifulSoup HTML scraping library.
soup = BeautifulSoup(md.convert(markdown_raw_data), 'html.parser')
tags_to_search = {
'img': 'src',
'a': 'href',
'video': 'src',
'table': 'source',
'embed': 'src',
}
# For each entry in the `tags_to_search` table, extract the tag attribute value.
references = set()
for k, v in tags_to_search.items():
for tag in soup.find_all(k):
val = tag.get(v)
if val:
references.add(val)
# Normalise the referenced assets (to take into account relative paths).
references = [os.path.join(document_dir, urllib.request.url2pathname(ref)) for ref in references]
# Make unique.
return set(references)
def has_nav(markdown_text):
""" Returns True if the passed string of text contains navbar metadata.
Returns False if it does not.
"""
expression = re.compile(r'(?=\n|)nav:\s+\w+(?=\n |)')
return True if expression.search(markdown_text) else False
def find_orphans(files):
""" Searches all files and folders recursively in the given path for image and video assets
that are unused by markdown files.
"""
# Find all references in
pages = {}
for file in files:
if file.endswith('.md'):
pages[file] = find_references(file)
# Remove the markdown documents that have a navbar metadata.
md_with_nav = []
for file in files:
if file.endswith('.md'):
with open(file, encoding='utf-8') as f:
if has_nav(f.read().lower()):
md_with_nav.append(file)
files = [x for x in files if x not in md_with_nav]
# Create a flat list of all references in the markdown files
all_references = []
for i in pages.values():
all_references += [k for k in i]
# Output unused assets
return [i for i in files if i not in all_references]
class DocumentLinks:
""" A helper class to process the `<a href.../>` links from a single
markdown document that is rendered using our own renderer.
"""
def __init__(self, md_file):
""" Open a Markdown document and find all links in `<a href .../>`.
"""
# Store important information about this document.
self.md_file = md_file
self.md_dir = os.path.dirname(md_file)
# Read in Markdown and generate HTML with our parser.
with open(md_file, 'r', encoding='utf-8') as f:
markdown_raw_data = f.read()
md = markdown.Markdown(extensions=mdextensions)
md.page_root = self.md_dir
html = md.convert(markdown_raw_data)
# Interpret with the BeautifulSoup HTML scraping library.
soup = BeautifulSoup(html, 'html.parser')
tags_to_search = {
'img': 'src',
'a': 'href',
'video': 'src',
'table': 'source',
'embed': 'src',
}
self.references = set()
for k, v in tags_to_search.items():
links = soup.find_all(k)
for link in links:
if link.get('href'):
if link.get('href').find('http:') > -1 or link.get('href').find('https:') > -1:
val = link.get(v)
if val:
self.references.add(val)
else:
val = link.get(v)
if val:
self.references.add(val)
@property
def web_links(self):
""" Generate a list of web links from our cached links.
"""
return [link for link in self.references if is_absolute(link)]
@property
def relative_links(self):
""" Generate a list of relative file system links from our cached links.
This converts from a web path to a path on disk then normalises the path to the current directory.
"""
def _norm(path):
return os.path.join(self.md_dir, urllib.request.url2pathname(path))
return [_norm(link) for link in self.references if not is_absolute(link)]
@staticmethod
def validate_url(address):
""" Returns `True` if page at address returns with status code 200 (ok) otherwise returns `False`.
"""
try:
request = requests.head(address)
return request.status_code, address
except requests.exceptions.RequestException:
return False, address
def detect_broken_links(self, process_pool):
""" Go through all the `web_links` and the `relative_links` and report
which are broken (i.e. do not resolve to HTTP200OK or a file on disk).
"""
result = process_pool.map(self.validate_url, self.web_links)
for response, url in result:
if not response == 200:
yield url + ' Status: ' + (responses[response] if response is int else "Exception")
for file in self.relative_links:
if not os.path.exists(file):
yield file
def generate_metadata(path):
""" Add relevant metadata to the top of the markdown file at the passed path.
Title is drawn from the filename, Date from the last modified timestamp, Version defaults at 1.0.0,
Nav is generated from the filepath, and Authors are generated from the git contributors (if applicable) and
are otherwise left blank.
Warning: Does not check if there is existing metadata.
"""
s = subprocess.getoutput(f"git log -p {path}")
lines = s.split(os.linesep)
authors = set([re.search(r'<(.*)>', line).group(1)for line in lines if 'Author:' in line])
file_status = os.stat(path)
nav_path = os.path.sep.join(path.split(os.path.sep)[1:])
metadata = {
'title': ' '.join(
path
.split('.')[0]
.split(os.path.sep)[-1]
.replace('_', ' ')
.replace('-', ' ')
.title()
.split()
),
'desc': '',
'date': datetime.datetime.utcfromtimestamp(file_status.st_mtime).strftime('%Y/%m/%d'),
'version': '1.0.0',
'template': '',
'nav': nav_path.replace(os.path.sep, '>').title().split('.')[0],
'percent': '100',
'authors': ' '.join(authors),
}
result = ""
for key in metadata.keys():
result += ('{}:{}{}\n'.format(key, '\t' if len(key) > 6 else '\t\t', metadata[key]))
with open(path, 'r+', encoding='utf-8') as f:
content = f.read()
f.seek(0, 0)
f.write(result)
f.write(content)
class ReloadHandler(PatternMatchingEventHandler):
""" Rebuild the document metadata / navigation cache when markdown files are updated
in the documents directory. """
def __init__(self, app):
super(ReloadHandler, self).__init__(patterns=['*.md'], ignore_directories=False, case_sensitive=False)
self.flask_app = app
def on_any_event(self, event):
self.flask_app.build_navigation_cache()
global CMD_ARGS, NAV_MENU, PROJECT_LOGO, WKHTMLTOPDF_BINARY, PDF_GENERATION_ENABLED, PORT_NUMBER
CMD_ARGS = None
NAV_MENU = {}
PROJECT_LOGO = None
WKHTMLTOPDF_BINARY = None
PDF_GENERATION_ENABLED = False
def main():
""" Application entrypoint. """
global PORT_NUMBER
PORT_NUMBER = 5000
# Parse the command line arguments.
parser = argparse.ArgumentParser(description='docnado: Lightweight tool for rendering \
Markdown documentation with different templates.')
parser.add_argument('--html', action='store', dest='html_output_dir',
help='Generate a static site from the server and output to the \
specified directory.')
parser.add_argument('--pdf', action='store', dest='pdf_output_dir',
help='Generate static PDFs from the server and output to the \
specified directory.')
parser.add_argument('--nav-limit', action='store', dest='nav_limit',
default=None,
help='Include certain document trees only based on a comma separated \
list of nav strings. e.g. Tooling,Document')
parser.add_argument('--new', action="store_true", dest='new_project',
default=False,
help='Copy the `docs` and `styles` folder into the working directory \
and output a config file that addresses them. Does not overwrite existing files.')
parser.add_argument('--new-force', action="store_true", dest='new_project_force',
default=False,
help='Copy the `docs` and `styles` folder into the working directory \
and output a config file that addresses them. Force deletion of existing files.')
parser.add_argument('--dirs', action="store_true", dest='show_dirs',
default=False,
help='Display the different directories the software is using \
to search for documentation and styles.')
parser.add_argument('--generate-meta', action="store", dest='generate_meta',
default=False,
help='Generate metadata for markdown files in the specified directory.')
parser.add_argument('--find-orphans', action="store_true", dest='find_orphans',
default=False,
help='Identify unused media assets (orphans)')
parser.add_argument('--find-broken-links', action="store_true", dest='find_broken_links',
default=False,
help='Identify broken external links.')
parser.add_argument('--port', action="store", dest='new_port_number',
default=False,
help='Specify a port for the docnado server')
parser.add_argument('--host', action="store", dest='set_host',
default=False,
help='Set the docnado development server to listen on IP addresses.')
# Import the command line args and make them application global.
global CMD_ARGS
args = parser.parse_args()
CMD_ARGS = args
# Load config from the environment and validate it.
global PROJECT_LOGO, PDF_GENERATION_ENABLED, NAV_MENU, WKHTMLTOPDF_BINARY
TRUE = 'TRUE'
FALSE = 'FALSE'
flask_debug = os.environ.get('DN_FLASK_DEBUG', FALSE) == TRUE
watch_changes = os.environ.get('DN_RELOAD_ON_CHANGES', TRUE) == TRUE
WKHTMLTOPDF_BINARY = ('wkhtmltopdf_0.12.5.exe' if platform.system() == 'Windows' else 'wkhtmltopdf')
PDF_GENERATION_ENABLED = check_pdf_generation_cap()
dir_documents = os.environ.get('DN_DOCS_DIR', os.path.join(os.getcwd(), 'docs'))
dir_style = os.environ.get('DN_STYLE_DIR', os.path.join(os.getcwd(), 'style'))
logo_location = os.environ.get('DN_PROJECT_LOGO', os.path.join(os.getcwd(), 'logo.png'))
# If `style` folder does not exist, use the one in site-packages.
if not os.path.exists(dir_style) and not os.path.isdir(dir_style):
dir_style = os.path.join(os.path.dirname(__file__), 'style')
# Attempt to load the project logo into a base64 data uri.
PROJECT_LOGO = load_project_logo(logo_location)
# Compute the static and template directories.
dir_static = os.path.join(dir_style, 'static')
dir_templates = os.path.join(dir_style, 'templates')
# If the user is asking to create a new project.
if args.new_project:
copy_local_project()
sys.exit()
if args.new_project_force:
copy_local_project(force=True)
return 0
if args.new_port_number:
PORT_NUMBER = int(args.new_port_number)
if args.generate_meta:
doc_files = glob.iglob(args.generate_meta + '/**/*.md', recursive=True)
for i in doc_files:
generate_metadata(i)
return 0
if args.find_orphans:
# Find all the assets in the directory/subdirectories recursively and append their file path to a list.
files = glob.glob((dir_documents + '/**/*.*'), recursive=True)
files = [f for f in files if not os.path.isdir(f)]
orphans = find_orphans(files)
if orphans:
print(f'{len(orphans)} Unused assets (orphans):\n\t' + '\n\t'.join(orphans))
return -1
return 0
if args.find_broken_links:
process_pool = Pool(processes=10)
md_files = glob.glob((dir_documents + '/**/*.md'), recursive=True)
md_reports = tuple((md, list(DocumentLinks(md).detect_broken_links(process_pool))) for md in md_files)
num_broken = 0
for file, report in md_reports:
if report:
num_broken += len(report)
print(f'{file}\n\t' + '\n\t'.join(report))
return -1 if num_broken else 0
if args.show_dirs:
print('The following directories are being used: ')
print('\t', f'Documents -> {dir_documents}')
print('\t', f'Logo -> {logo_location}')
print('\t', f'Style -> {dir_style}')
print('\t', f' Static -> {dir_static}')
print('\t', f' Templates -> {dir_templates}')
sys.exit()
if not os.path.exists(dir_documents) and not os.path.isdir(dir_documents):
print(f'Error: Documents directory "{dir_documents}" does not exist. \
Create one called `docs` and fill it with your documentation.', file=sys.stderr)
sys.exit(-1)
if not os.path.exists(dir_static) and not os.path.isdir(dir_static):
print(f'Error: Static directory "{dir_static}" does not exist.', file=sys.stderr)
sys.exit(-1)
if not os.path.exists(dir_templates) and not os.path.isdir(dir_templates):
print(f'Error: Templates directory "{dir_templates}" does not exist.', file=sys.stderr)
sys.exit(-1)
# Create the server.
app = Flask(__name__,
static_url_path='',
template_folder=dir_templates,
static_folder=dir_static)
# Attach routes and filters.
configure_flask(app, dir_documents)
# Output PDF files.
if args.pdf_output_dir:
if not check_pdf_generation_cap():
print(f'Error: PDF generation requires WkHTMLtoPDF.', file=sys.stderr)
sys.exit(-1)
def gen_pdfs():
time.sleep(2)
generate_static_pdf(
app, dir_documents, os.path.join(os.getcwd(), args.pdf_output_dir)
)
time.sleep(5)
os.kill(os.getpid(), signal.SIGTERM)
t1 = threading.Thread(target=gen_pdfs)
t1.start()
app.run(debug=flask_debug, threaded=True, port=PORT_NUMBER)
sys.exit()
# Output a static site.
if args.html_output_dir:
PDF_GENERATION_ENABLED = False
try:
generate_static_html(app, dir_documents, os.path.join(os.getcwd(), args.html_output_dir))
index_html = """ <!DOCTYPE html>
<html>
<head>
<meta http-equiv="refresh" content="0; url=./w/">
</head>
<body>
</body>
</html>"""
with open(os.path.join(os.getcwd(), args.html_output_dir, 'index.html'), 'w') as f:
f.write(index_html)
except Exception:
traceback.print_exc(file=sys.stderr)
sys.exit(-1)
sys.exit()
# Watch for any changes in the docs or style directories.
dn_watch_files = []
observer = None
if watch_changes:
observer = Observer()
observer.schedule(ReloadHandler(app), path=dir_documents, recursive=True)
observer.start()
dn_watch_files = build_reload_files_list([__name__, dir_style])
# Run the server.
if args.set_host:
try:
print('Attempting set sevelopment server listen on public IP address: ' + args.set_host)
print('WARNING: The Docnado development environment is intended to be used as a development tool ONLY, '
'and is not recommended for use in a production environment.')
app.run(debug=flask_debug, port=PORT_NUMBER, extra_files=dn_watch_files, host=args.set_host)
except OSError as e:
print(e)
print(f'Error initialising server.')
except KeyboardInterrupt:
pass
finally:
if observer:
observer.stop()
observer.join()
else:
try:
app.run(debug=flask_debug, port=PORT_NUMBER, extra_files=dn_watch_files)
except OSError as e:
print(e)
print(f'Error initialising server.')
except KeyboardInterrupt:
pass
finally:
if observer:
observer.stop()
observer.join()
# if running brainerd directly, boot the app
if __name__ == "__main__":
main()
| [((12509, 12752), 're.compile', 're.compile', (['"""^(p|div|h[1-6]|blockquote|pre|dl|ol|ul|script|noscript|form|fieldset|math|hr|hr/|style|li|dt|dd|thead|tbody|tr|th|td|section|footer|header|group|figure|figcaption|article|canvas|output|progress|nav|main)$"""', 're.IGNORECASE'], {}), "(\n '^(p|div|h[1-6]|blockquote|pre|dl|ol|ul|script|noscript|form|fieldset|math|hr|hr/|style|li|dt|dd|thead|tbody|tr|th|td|section|footer|header|group|figure|figcaption|article|canvas|output|progress|nav|main)$'\n , re.IGNORECASE)\n", (12519, 12752), False, 'import re\n'), ((11883, 11914), 're.compile', 're.compile', (['"""<li>\\\\[([ Xx])\\\\]"""'], {}), "('<li>\\\\[([ Xx])\\\\]')\n", (11893, 11914), False, 'import re\n'), ((14377, 14422), 'glob.iglob', 'glob.iglob', (["(root + '/**/*.md')"], {'recursive': '(True)'}), "(root + '/**/*.md', recursive=True)\n", (14387, 14422), False, 'import glob\n'), ((15939, 15957), 'navtree.NavItem', 'NavItem', (['"""root"""', '(0)'], {}), "('root', 0)\n", (15946, 15957), False, 'from navtree import NavItem, parse_nav_string\n'), ((24525, 24538), 'urllib.parse.urlparse', 'urlparse', (['url'], {}), '(url)\n', (24533, 24538), False, 'from urllib.parse import urlparse\n'), ((24817, 24887), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {'category': 'MissingURLGeneratorWarning'}), "('ignore', category=MissingURLGeneratorWarning)\n", (24840, 24887), False, 'import warnings\n'), ((25149, 25210), 'flask_frozen.Freezer', 'Freezer', (['app'], {'with_no_argument_rules': '(False)', 'log_url_for': '(False)'}), '(app, with_no_argument_rules=False, log_url_for=False)\n', (25156, 25210), False, 'from flask_frozen import Freezer, MissingURLGeneratorWarning\n'), ((25792, 25843), 'glob.iglob', 'glob.iglob', (['f"""{output_dir}/**/*.md"""'], {'recursive': '(True)'}), "(f'{output_dir}/**/*.md', recursive=True)\n", (25802, 25843), False, 'import glob\n'), ((27181, 27302), 'subprocess.call', 'subprocess.call', (['f"""{WKHTMLTOPDF_BINARY} --version"""'], {'shell': '(True)', 'stdout': 'subprocess.DEVNULL', 'stderr': 'subprocess.DEVNULL'}), "(f'{WKHTMLTOPDF_BINARY} --version', shell=True, stdout=\n subprocess.DEVNULL, stderr=subprocess.DEVNULL)\n", (27196, 27302), False, 'import subprocess\n'), ((27617, 27642), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (27632, 27642), False, 'import os\n'), ((27661, 27672), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (27670, 27672), False, 'import os\n'), ((29194, 29236), 'markdown.Markdown', 'markdown.Markdown', ([], {'extensions': 'mdextensions'}), '(extensions=mdextensions)\n', (29211, 29236), False, 'import markdown\n'), ((29256, 29286), 'os.path.dirname', 'os.path.dirname', (['document_path'], {}), '(document_path)\n', (29271, 29286), False, 'import os\n'), ((30267, 30310), 're.compile', 're.compile', (['"""(?=\\\\n|)nav:\\\\s+\\\\w+(?=\\\\n |)"""'], {}), "('(?=\\\\n|)nav:\\\\s+\\\\w+(?=\\\\n |)')\n", (30277, 30310), False, 'import re\n'), ((34832, 34874), 'subprocess.getoutput', 'subprocess.getoutput', (['f"""git log -p {path}"""'], {}), "(f'git log -p {path}')\n", (34852, 34874), False, 'import subprocess\n'), ((35021, 35034), 'os.stat', 'os.stat', (['path'], {}), '(path)\n', (35028, 35034), False, 'import os\n'), ((36723, 36897), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""docnado: Lightweight tool for rendering Markdown documentation with different templates."""'}), "(description=\n 'docnado: Lightweight tool for rendering Markdown documentation with different templates.'\n )\n", (36746, 36897), False, 'import argparse\n'), ((40763, 40796), 'os.path.join', 'os.path.join', (['dir_style', '"""static"""'], {}), "(dir_style, 'static')\n", (40775, 40796), False, 'import os\n'), ((40817, 40853), 'os.path.join', 'os.path.join', (['dir_style', '"""templates"""'], {}), "(dir_style, 'templates')\n", (40829, 40853), False, 'import os\n'), ((43329, 43425), 'flask.Flask', 'Flask', (['__name__'], {'static_url_path': '""""""', 'template_folder': 'dir_templates', 'static_folder': 'dir_static'}), "(__name__, static_url_path='', template_folder=dir_templates,\n static_folder=dir_static)\n", (43334, 43425), False, 'from flask import Flask, url_for, abort, send_from_directory, render_template, Markup, make_response, render_template_string\n'), ((2565, 2593), 're.match', 're.match', (['youtube_regex', 'url'], {}), '(youtube_regex, url)\n', (2573, 2593), False, 'import re\n'), ((2802, 2831), 'xml.etree.ElementTree.Element', 'ElementTree.Element', (['"""iframe"""'], {}), "('iframe')\n", (2821, 2831), False, 'from xml.etree import ElementTree\n'), ((3245, 3273), 'xml.etree.ElementTree.Element', 'ElementTree.Element', (['"""aside"""'], {}), "('aside')\n", (3264, 3273), False, 'from xml.etree import ElementTree\n'), ((3338, 3378), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['wrapper', '"""embed"""'], {}), "(wrapper, 'embed')\n", (3360, 3378), False, 'from xml.etree import ElementTree\n'), ((3913, 3941), 'xml.etree.ElementTree.Element', 'ElementTree.Element', (['"""video"""'], {}), "('video')\n", (3932, 3941), False, 'from xml.etree import ElementTree\n'), ((4144, 4170), 'xml.etree.ElementTree.Element', 'ElementTree.Element', (['"""img"""'], {}), "('img')\n", (4163, 4170), False, 'from xml.etree import ElementTree\n'), ((4779, 4807), 'xml.etree.ElementTree.Element', 'ElementTree.Element', (['"""table"""'], {}), "('table')\n", (4798, 4807), False, 'from xml.etree import ElementTree\n'), ((4929, 4971), 'os.path.join', 'os.path.join', (['self.markdown.page_root', 'src'], {}), '(self.markdown.page_root, src)\n', (4941, 4971), False, 'import os\n'), ((5969, 5987), 'os.path.split', 'os.path.split', (['src'], {}), '(src)\n', (5982, 5987), False, 'import os\n'), ((6008, 6057), 'os.path.join', 'os.path.join', (['self.markdown.page_root', '*split_src'], {}), '(self.markdown.page_root, *split_src)\n', (6020, 6057), False, 'import os\n'), ((6078, 6104), 'os.path.getsize', 'os.path.getsize', (['file_path'], {}), '(file_path)\n', (6093, 6104), False, 'import os\n'), ((6129, 6156), 'os.path.basename', 'os.path.basename', (['file_path'], {}), '(file_path)\n', (6145, 6156), False, 'import os\n'), ((6894, 6920), 'xml.etree.ElementTree.Element', 'ElementTree.Element', (['"""div"""'], {}), "('div')\n", (6913, 6920), False, 'from xml.etree import ElementTree\n'), ((6986, 7021), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['card', '"""div"""'], {}), "(card, 'div')\n", (7008, 7021), False, 'from xml.etree import ElementTree\n'), ((7089, 7124), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['card', '"""div"""'], {}), "(card, 'div')\n", (7111, 7124), False, 'from xml.etree import ElementTree\n'), ((7372, 7405), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['body', '"""a"""'], {}), "(body, 'a')\n", (7394, 7405), False, 'from xml.etree import ElementTree\n'), ((7515, 7551), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['heading', '"""i"""'], {}), "(heading, 'i')\n", (7537, 7551), False, 'from xml.etree import ElementTree\n'), ((7629, 7668), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['heading', '"""span"""'], {}), "(heading, 'span')\n", (7651, 7668), False, 'from xml.etree import ElementTree\n'), ((7786, 7822), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['body', '"""span"""'], {}), "(body, 'span')\n", (7808, 7822), False, 'from xml.etree import ElementTree\n'), ((7919, 7955), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['body', '"""span"""'], {}), "(body, 'span')\n", (7941, 7955), False, 'from xml.etree import ElementTree\n'), ((8543, 8585), 'os.path.join', 'os.path.join', (['self.markdown.page_root', 'src'], {}), '(self.markdown.page_root, src)\n', (8555, 8585), False, 'import os\n'), ((11955, 12005), 're.sub', 're.sub', (['self.pattern', 'self._convert_checkbox', 'html'], {}), '(self.pattern, self._convert_checkbox, html)\n', (11961, 12005), False, 'import re\n'), ((14731, 14764), 'os.path.relpath', 'os.path.relpath', (['path'], {'start': 'root'}), '(path, start=root)\n', (14746, 14764), False, 'import os\n'), ((16491, 16516), 'navtree.parse_nav_string', 'parse_nav_string', (['nav_str'], {}), '(nav_str)\n', (16507, 16516), False, 'from navtree import NavItem, parse_nav_string\n'), ((16989, 17007), 'os.walk', 'os.walk', (['extra_dir'], {}), '(extra_dir)\n', (16996, 17007), False, 'import os\n'), ((17898, 17940), 'markdown.Markdown', 'markdown.Markdown', ([], {'extensions': 'mdextensions'}), '(extensions=mdextensions)\n', (17915, 17940), False, 'import markdown\n'), ((17964, 17990), 'os.path.dirname', 'os.path.dirname', (['file_path'], {}), '(file_path)\n', (17979, 17990), False, 'import os\n'), ((18694, 18870), 'flask.render_template', 'render_template', (['template'], {'content': 'markup', 'nav_menu': 'NAV_MENU', 'project_logo': 'PROJECT_LOGO', 'pdf_enabled': 'PDF_GENERATION_ENABLED', 'injections': 'injections'}), '(template, content=markup, nav_menu=NAV_MENU, project_logo=\n PROJECT_LOGO, pdf_enabled=PDF_GENERATION_ENABLED, injections=injections,\n **md.Meta, **kwargs)\n', (18709, 18870), False, 'from flask import Flask, url_for, abort, send_from_directory, render_template, Markup, make_response, render_template_string\n'), ((20454, 20479), 'urllib.parse.unquote', 'urllib.parse.unquote', (['url'], {}), '(url)\n', (20474, 20479), False, 'import urllib\n'), ((20880, 20943), 'flask.render_template', 'render_template', (['"""print_header.html"""'], {'project_logo': 'PROJECT_LOGO'}), "('print_header.html', project_logo=PROJECT_LOGO)\n", (20895, 20943), False, 'from flask import Flask, url_for, abort, send_from_directory, render_template, Markup, make_response, render_template_string\n'), ((21132, 21195), 'flask.render_template', 'render_template', (['"""print_footer.html"""'], {'project_logo': 'PROJECT_LOGO'}), "('print_footer.html', project_logo=PROJECT_LOGO)\n", (21147, 21195), False, 'from flask import Flask, url_for, abort, send_from_directory, render_template, Markup, make_response, render_template_string\n'), ((22420, 22462), 'flask.url_for', 'url_for', (['"""wiki"""'], {'page': 'page', '_external': '(True)'}), "('wiki', page=page, _external=True)\n", (22427, 22462), False, 'from flask import Flask, url_for, abort, send_from_directory, render_template, Markup, make_response, render_template_string\n'), ((22484, 22523), 'flask.url_for', 'url_for', (['"""print_header"""'], {'_external': '(True)'}), "('print_header', _external=True)\n", (22491, 22523), False, 'from flask import Flask, url_for, abort, send_from_directory, render_template, Markup, make_response, render_template_string\n'), ((22545, 22584), 'flask.url_for', 'url_for', (['"""print_footer"""'], {'_external': '(True)'}), "('print_footer', _external=True)\n", (22552, 22584), False, 'from flask import Flask, url_for, abort, send_from_directory, render_template, Markup, make_response, render_template_string\n'), ((22805, 22846), 'subprocess.check_output', 'subprocess.check_output', (['args'], {'shell': '(True)'}), '(args, shell=True)\n', (22828, 22846), False, 'import subprocess\n'), ((23476, 23495), 'os.remove', 'os.remove', (['pdf_temp'], {}), '(pdf_temp)\n', (23485, 23495), False, 'import os\n'), ((24299, 24341), 'urllib.request.urlretrieve', 'urllib.request.urlretrieve', (['source', 'target'], {}), '(source, target)\n', (24325, 24341), False, 'import urllib\n'), ((26486, 26510), 'os.remove', 'os.remove', (['markdown_file'], {}), '(markdown_file)\n', (26495, 26510), False, 'import os\n'), ((26756, 26781), 'os.path.exists', 'os.path.exists', (['logo_file'], {}), '(logo_file)\n', (26770, 26781), False, 'import os\n'), ((27877, 27898), 'os.path.isdir', 'os.path.isdir', (['source'], {}), '(source)\n', (27890, 27898), False, 'import os\n'), ((31681, 31705), 'os.path.dirname', 'os.path.dirname', (['md_file'], {}), '(md_file)\n', (31696, 31705), False, 'import os\n'), ((31879, 31921), 'markdown.Markdown', 'markdown.Markdown', ([], {'extensions': 'mdextensions'}), '(extensions=mdextensions)\n', (31896, 31921), False, 'import markdown\n'), ((32084, 32118), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html', '"""html.parser"""'], {}), "(html, 'html.parser')\n", (32097, 32118), False, 'from bs4 import BeautifulSoup\n'), ((39822, 39861), 'os.environ.get', 'os.environ.get', (['"""DN_FLASK_DEBUG"""', 'FALSE'], {}), "('DN_FLASK_DEBUG', FALSE)\n", (39836, 39861), False, 'import os\n'), ((39890, 39934), 'os.environ.get', 'os.environ.get', (['"""DN_RELOAD_ON_CHANGES"""', 'TRUE'], {}), "('DN_RELOAD_ON_CHANGES', TRUE)\n", (39904, 39934), False, 'import os\n'), ((40970, 40980), 'sys.exit', 'sys.exit', ([], {}), '()\n', (40978, 40980), False, 'import sys\n'), ((41195, 41254), 'glob.iglob', 'glob.iglob', (["(args.generate_meta + '/**/*.md')"], {'recursive': '(True)'}), "(args.generate_meta + '/**/*.md', recursive=True)\n", (41205, 41254), False, 'import glob\n'), ((41488, 41540), 'glob.glob', 'glob.glob', (["(dir_documents + '/**/*.*')"], {'recursive': '(True)'}), "(dir_documents + '/**/*.*', recursive=True)\n", (41497, 41540), False, 'import glob\n'), ((41844, 41862), 'multiprocessing.Pool', 'Pool', ([], {'processes': '(10)'}), '(processes=10)\n', (41848, 41862), False, 'from multiprocessing import Pool\n'), ((41882, 41935), 'glob.glob', 'glob.glob', (["(dir_documents + '/**/*.md')"], {'recursive': '(True)'}), "(dir_documents + '/**/*.md', recursive=True)\n", (41891, 41935), False, 'import glob\n'), ((42630, 42640), 'sys.exit', 'sys.exit', ([], {}), '()\n', (42638, 42640), False, 'import sys\n'), ((42898, 42910), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (42906, 42910), False, 'import sys\n'), ((43083, 43095), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (43091, 43095), False, 'import sys\n'), ((43280, 43292), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (43288, 43292), False, 'import sys\n'), ((44018, 44051), 'threading.Thread', 'threading.Thread', ([], {'target': 'gen_pdfs'}), '(target=gen_pdfs)\n', (44034, 44051), False, 'import threading\n'), ((44147, 44157), 'sys.exit', 'sys.exit', ([], {}), '()\n', (44155, 44157), False, 'import sys\n'), ((44881, 44891), 'sys.exit', 'sys.exit', ([], {}), '()\n', (44889, 44891), False, 'import sys\n'), ((45040, 45050), 'watchdog.observers.Observer', 'Observer', ([], {}), '()\n', (45048, 45050), False, 'from watchdog.observers import Observer\n'), ((5051, 5064), 'csv.reader', 'csv.reader', (['f'], {}), '(f)\n', (5061, 5064), False, 'import csv\n'), ((5159, 5196), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['root', '"""thead"""'], {}), "(root, 'thead')\n", (5181, 5196), False, 'from xml.etree import ElementTree\n'), ((9640, 9693), 'flask.render_template_string', 'render_template_string', (['raw_html_string'], {}), '(raw_html_string, **named_args)\n', (9662, 9693), False, 'from flask import Flask, url_for, abort, send_from_directory, render_template, Markup, make_response, render_template_string\n'), ((9902, 9950), 'xml.etree.ElementTree.fromstring', 'ElementTree.fromstring', (['injectable_templated_str'], {}), '(injectable_templated_str)\n', (9924, 9950), False, 'from xml.etree import ElementTree\n'), ((11522, 11610), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['parent', "('h%d' % (heading_level + self.HEADING_LEVEL_OFFSET))"], {}), "(parent, 'h%d' % (heading_level + self.\n HEADING_LEVEL_OFFSET))\n", (11544, 11610), False, 'from xml.etree import ElementTree\n'), ((14515, 14557), 'markdown.Markdown', 'markdown.Markdown', ([], {'extensions': 'mdextensions'}), '(extensions=mdextensions)\n', (14532, 14557), False, 'import markdown\n'), ((14585, 14606), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (14600, 14606), False, 'import os\n'), ((16593, 16614), 'navtree.NavItem', 'NavItem', (['name', 'weight'], {}), '(name, weight)\n', (16600, 16614), False, 'from navtree import NavItem, parse_nav_string\n'), ((18493, 18525), 'os.path.join', 'os.path.join', (['md.page_root', 'file'], {}), '(md.page_root, file)\n', (18505, 18525), False, 'import os\n'), ((20566, 20603), 'os.path.join', 'os.path.join', (['app.root_path', '"""static"""'], {}), "(app.root_path, 'static')\n", (20578, 20603), False, 'import os\n'), ((21304, 21377), 'flask.render_template', 'render_template', (['"""404.html"""'], {'nav_menu': 'NAV_MENU', 'project_logo': 'PROJECT_LOGO'}), "('404.html', nav_menu=NAV_MENU, project_logo=PROJECT_LOGO)\n", (21319, 21377), False, 'from flask import Flask, url_for, abort, send_from_directory, render_template, Markup, make_response, render_template_string\n'), ((21506, 21534), 'os.path.join', 'os.path.join', (['root_dir', 'page'], {}), '(root_dir, page)\n', (21518, 21534), False, 'import os\n'), ((21551, 21576), 'os.path.isfile', 'os.path.isfile', (['file_path'], {}), '(file_path)\n', (21565, 21576), False, 'import os\n'), ((21590, 21600), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (21595, 21600), False, 'from flask import Flask, url_for, abort, send_from_directory, render_template, Markup, make_response, render_template_string\n'), ((22042, 22070), 'os.path.join', 'os.path.join', (['root_dir', 'page'], {}), '(root_dir, page)\n', (22054, 22070), False, 'import os\n'), ((22087, 22112), 'os.path.isfile', 'os.path.isfile', (['file_path'], {}), '(file_path)\n', (22101, 22112), False, 'import os\n'), ((22126, 22136), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (22131, 22136), False, 'from flask import Flask, url_for, abort, send_from_directory, render_template, Markup, make_response, render_template_string\n'), ((23120, 23145), 'flask.make_response', 'make_response', (['binary_pdf'], {}), '(binary_pdf)\n', (23133, 23145), False, 'from flask import Flask, url_for, abort, send_from_directory, render_template, Markup, make_response, render_template_string\n'), ((24196, 24219), 'os.path.dirname', 'os.path.dirname', (['target'], {}), '(target)\n', (24211, 24219), False, 'import os\n'), ((26244, 26288), 're.sub', 're.sub', (['"""href="(.*md)\\""""', '_href_replace', 'html'], {}), '(\'href="(.*md)"\', _href_replace, html)\n', (26250, 26288), False, 'import re\n'), ((27732, 27763), 'os.path.join', 'os.path.join', (['source_root', 'path'], {}), '(source_root, path)\n', (27744, 27763), False, 'import os\n'), ((27765, 27796), 'os.path.join', 'os.path.join', (['target_root', 'path'], {}), '(target_root, path)\n', (27777, 27796), False, 'import os\n'), ((27915, 27937), 'os.path.exists', 'os.path.exists', (['target'], {}), '(target)\n', (27929, 27937), False, 'import os\n'), ((28384, 28406), 'os.path.exists', 'os.path.exists', (['target'], {}), '(target)\n', (28398, 28406), False, 'import os\n'), ((29994, 30026), 'urllib.request.url2pathname', 'urllib.request.url2pathname', (['ref'], {}), '(ref)\n', (30021, 30026), False, 'import urllib\n'), ((33666, 33688), 'requests.head', 'requests.head', (['address'], {}), '(address)\n', (33679, 33688), False, 'import requests\n'), ((39998, 40015), 'platform.system', 'platform.system', ([], {}), '()\n', (40013, 40015), False, 'import platform\n'), ((40169, 40180), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (40178, 40180), False, 'import os\n'), ((40251, 40262), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (40260, 40262), False, 'import os\n'), ((40341, 40352), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (40350, 40352), False, 'import os\n'), ((40449, 40474), 'os.path.exists', 'os.path.exists', (['dir_style'], {}), '(dir_style)\n', (40463, 40474), False, 'import os\n'), ((40483, 40507), 'os.path.isdir', 'os.path.isdir', (['dir_style'], {}), '(dir_style)\n', (40496, 40507), False, 'import os\n'), ((40542, 40567), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (40557, 40567), False, 'import os\n'), ((42653, 42682), 'os.path.exists', 'os.path.exists', (['dir_documents'], {}), '(dir_documents)\n', (42667, 42682), False, 'import os\n'), ((42691, 42719), 'os.path.isdir', 'os.path.isdir', (['dir_documents'], {}), '(dir_documents)\n', (42704, 42719), False, 'import os\n'), ((42923, 42949), 'os.path.exists', 'os.path.exists', (['dir_static'], {}), '(dir_static)\n', (42937, 42949), False, 'import os\n'), ((42958, 42983), 'os.path.isdir', 'os.path.isdir', (['dir_static'], {}), '(dir_static)\n', (42971, 42983), False, 'import os\n'), ((43108, 43137), 'os.path.exists', 'os.path.exists', (['dir_templates'], {}), '(dir_templates)\n', (43122, 43137), False, 'import os\n'), ((43146, 43174), 'os.path.isdir', 'os.path.isdir', (['dir_templates'], {}), '(dir_templates)\n', (43159, 43174), False, 'import os\n'), ((43735, 43747), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (43743, 43747), False, 'import sys\n'), ((43785, 43798), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (43795, 43798), False, 'import time\n'), ((43941, 43954), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (43951, 43954), False, 'import time\n'), ((5342, 5376), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['root', '"""tr"""'], {}), "(root, 'tr')\n", (5364, 5376), False, 'from xml.etree import ElementTree\n'), ((17071, 17102), 'os.path.join', 'os.path.join', (['dirname', 'filename'], {}), '(dirname, filename)\n', (17083, 17102), False, 'import os\n'), ((17122, 17146), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (17136, 17146), False, 'import os\n'), ((20238, 20256), 'hashlib.md5', 'hashlib.md5', (['email'], {}), '(email)\n', (20249, 20256), False, 'import hashlib\n'), ((21795, 21821), 'os.path.dirname', 'os.path.dirname', (['file_path'], {}), '(file_path)\n', (21810, 21821), False, 'import os\n'), ((21823, 21850), 'os.path.basename', 'os.path.basename', (['file_path'], {}), '(file_path)\n', (21839, 21850), False, 'import os\n'), ((22255, 22281), 'os.path.dirname', 'os.path.dirname', (['file_path'], {}), '(file_path)\n', (22270, 22281), False, 'import os\n'), ((22283, 22310), 'os.path.basename', 'os.path.basename', (['file_path'], {}), '(file_path)\n', (22299, 22310), False, 'import os\n'), ((22376, 22393), 'tempfile.mktemp', 'tempfile.mktemp', ([], {}), '()\n', (22391, 22393), False, 'import tempfile\n'), ((25483, 25529), 'glob.iglob', 'glob.iglob', (['f"""{root_dir}/**/*"""'], {'recursive': '(True)'}), "(f'{root_dir}/**/*', recursive=True)\n", (25493, 25529), False, 'import glob\n'), ((25553, 25573), 'os.path.isfile', 'os.path.isfile', (['file'], {}), '(file)\n', (25567, 25573), False, 'import os\n'), ((28323, 28354), 'shutil.copytree', 'shutil.copytree', (['source', 'target'], {}), '(source, target)\n', (28338, 28354), False, 'import shutil\n'), ((28788, 28819), 'shutil.copyfile', 'shutil.copyfile', (['source', 'target'], {}), '(source, target)\n', (28803, 28819), False, 'import shutil\n'), ((33344, 33377), 'urllib.request.url2pathname', 'urllib.request.url2pathname', (['path'], {}), '(path)\n', (33371, 33377), False, 'import urllib\n'), ((34348, 34368), 'os.path.exists', 'os.path.exists', (['file'], {}), '(file)\n', (34362, 34368), False, 'import os\n'), ((35372, 35428), 'datetime.datetime.utcfromtimestamp', 'datetime.datetime.utcfromtimestamp', (['file_status.st_mtime'], {}), '(file_status.st_mtime)\n', (35406, 35428), False, 'import datetime\n'), ((43975, 43986), 'os.getpid', 'os.getpid', ([], {}), '()\n', (43984, 43986), False, 'import os\n'), ((44811, 44847), 'traceback.print_exc', 'traceback.print_exc', ([], {'file': 'sys.stderr'}), '(file=sys.stderr)\n', (44830, 44847), False, 'import traceback\n'), ((44860, 44872), 'sys.exit', 'sys.exit', (['(-1)'], {}), '(-1)\n', (44868, 44872), False, 'import sys\n'), ((5245, 5280), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['thead', '"""th"""'], {}), "(thead, 'th')\n", (5267, 5280), False, 'from xml.etree import ElementTree\n'), ((21646, 21673), 'os.path.splitext', 'os.path.splitext', (['file_path'], {}), '(file_path)\n', (21662, 21673), False, 'import os\n'), ((22186, 22213), 'os.path.splitext', 'os.path.splitext', (['file_path'], {}), '(file_path)\n', (22202, 22213), False, 'import os\n'), ((28073, 28094), 'shutil.rmtree', 'shutil.rmtree', (['target'], {}), '(target)\n', (28086, 28094), False, 'import shutil\n'), ((28115, 28146), 'shutil.copytree', 'shutil.copytree', (['source', 'target'], {}), '(source, target)\n', (28130, 28146), False, 'import shutil\n'), ((28542, 28559), 'os.remove', 'os.remove', (['target'], {}), '(target)\n', (28551, 28559), False, 'import os\n'), ((28580, 28611), 'shutil.copyfile', 'shutil.copyfile', (['source', 'target'], {}), '(source, target)\n', (28595, 28611), False, 'import shutil\n'), ((34926, 34951), 're.search', 're.search', (['"""<(.*)>"""', 'line'], {}), "('<(.*)>', line)\n", (34935, 34951), False, 'import re\n'), ((41584, 41600), 'os.path.isdir', 'os.path.isdir', (['f'], {}), '(f)\n', (41597, 41600), False, 'import os\n'), ((43881, 43892), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (43890, 43892), False, 'import os\n'), ((44334, 44345), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (44343, 44345), False, 'import os\n'), ((5429, 5461), 'xml.etree.ElementTree.SubElement', 'ElementTree.SubElement', (['tr', '"""td"""'], {}), "(tr, 'td')\n", (5451, 5461), False, 'from xml.etree import ElementTree\n'), ((44676, 44687), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (44685, 44687), False, 'import os\n'), ((24045, 24063), 'os.path.split', 'os.path.split', (['url'], {}), '(url)\n', (24058, 24063), False, 'import os\n')] |
Luis-Felipe-N/curso-em-video-python | modulo-3/aulas/modulos e pacotes/uteis.py | 09ff58ae31ae0360ebec74de609011d527956065 | def fatorial(n):
f = 1
while n != 0:
f *= n
n -= 1
return f
def dobro(n):
n *= 2
return n
def triplo(n):
n *= 3
return n
| [] |
MaximBrewer/sebe | server/src/oscarbluelight/tests/offer/test_benefit_percentage.py | 4b94b2c782d018b6fa3a130fa30173386cc9bfdd | from decimal import Decimal as D
from django.core import exceptions
from django.test import TestCase
from oscar.test import factories
from oscar.test.basket import add_product, add_products
from django_redis import get_redis_connection
from oscarbluelight.offer.models import (
Range,
Benefit,
BluelightCountCondition,
BluelightValueCondition,
BluelightPercentageDiscountBenefit,
)
from unittest import mock
class TestAPercentageDiscountAppliedWithCountCondition(TestCase):
def setUp(self):
# Flush the cache
conn = get_redis_connection("redis")
conn.flushall()
range = Range.objects.create(name="All products", includes_all_products=True)
self.condition = BluelightCountCondition(
range=range,
proxy_class="oscarbluelight.offer.conditions.BluelightCountCondition",
value=2,
)
self.benefit = BluelightPercentageDiscountBenefit(
range=range,
proxy_class="oscarbluelight.offer.benefits.BluelightPercentageDiscountBenefit",
value=20,
)
self.offer = mock.Mock()
self.basket = factories.create_basket(empty=True)
def test_applies_correctly_to_empty_basket(self):
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D("0.00"), result.discount)
self.assertEqual(0, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_with_no_discountable_products(self):
product = factories.create_product(is_discountable=False)
add_product(self.basket, D("12.00"), 2, product=product)
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D("0.00"), result.discount)
self.assertEqual(0, self.basket.num_items_with_discount)
self.assertEqual(2, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_matches_condition(self):
add_product(self.basket, D("12.00"), 2)
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(2 * D("12.00") * D("0.2"), result.discount)
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_exceeds_condition(self):
add_product(self.basket, D("12.00"), 3)
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(3 * D("12.00") * D("0.2"), result.discount)
self.assertEqual(3, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_obeys_max_discount_setting(self):
self.benefit.max_discount = D("5.00")
self.benefit.save()
add_product(self.basket, D("12.00"), 3)
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D("5.00"), result.discount)
self.assertEqual(3, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_records_reason_for_discount_no_voucher(self):
self.offer.name = "My Offer Name"
self.offer.description = "My Offer Description"
self.offer.get_voucher = mock.Mock()
self.offer.get_voucher.return_value = None
add_product(self.basket, D("5.00"))
# Apply benefit twice to simulate how Applicator will actually do it
self.benefit.apply(self.basket, self.condition, self.offer)
self.benefit.apply(self.basket, self.condition, self.offer)
line = self.basket.all_lines()[0]
descrs = line.get_discount_descriptions()
self.assertEqual(len(descrs), 1)
self.assertEqual(descrs[0].amount, D("1.00"))
self.assertEqual(descrs[0].offer_name, "My Offer Name")
self.assertEqual(descrs[0].offer_description, "My Offer Description")
self.assertIsNone(descrs[0].voucher_name)
self.assertIsNone(descrs[0].voucher_code)
def test_records_reason_for_discount_with_voucher(self):
voucher = mock.Mock()
voucher.name = "My Voucher"
voucher.code = "SWEETDEAL"
self.offer.name = "Offer for Voucher"
self.offer.description = ""
self.offer.get_voucher = mock.Mock()
self.offer.get_voucher.return_value = voucher
add_product(self.basket, D("5.00"))
# Apply benefit twice to simulate how Applicator will actually do it
self.benefit.apply(self.basket, self.condition, self.offer)
self.benefit.apply(self.basket, self.condition, self.offer)
line = self.basket.all_lines()[0]
descrs = line.get_discount_descriptions()
self.assertEqual(len(descrs), 1)
self.assertEqual(descrs[0].amount, D("1.00"))
self.assertEqual(descrs[0].offer_name, "Offer for Voucher")
self.assertEqual(descrs[0].offer_description, "")
self.assertEqual(descrs[0].voucher_name, "My Voucher")
self.assertEqual(descrs[0].voucher_code, "SWEETDEAL")
class TestAPercentageDiscountWithMaxItemsSetAppliedWithCountCondition(TestCase):
def setUp(self):
# Flush the cache
conn = get_redis_connection("redis")
conn.flushall()
range = Range.objects.create(name="All products", includes_all_products=True)
self.condition = BluelightCountCondition(
range=range,
proxy_class="oscarbluelight.offer.conditions.BluelightCountCondition",
value=2,
)
self.benefit = BluelightPercentageDiscountBenefit(
range=range,
proxy_class="oscarbluelight.offer.benefits.BluelightPercentageDiscountBenefit",
value=20,
max_affected_items=1,
)
self.offer = mock.Mock()
self.basket = factories.create_basket(empty=True)
def test_applies_correctly_to_empty_basket(self):
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D("0.00"), result.discount)
self.assertEqual(0, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_matches_condition(self):
add_product(self.basket, D("12.00"), 2)
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(1 * D("12.00") * D("0.2"), result.discount)
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_exceeds_condition(self):
add_products(self.basket, [(D("12.00"), 2), (D("20.00"), 2)])
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(1 * D("12.00") * D("0.2"), result.discount)
# Should only consume the condition products
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(2, self.basket.num_items_without_discount)
class TestAPercentageDiscountWithMultipleApplicationsWithCountCondition(TestCase):
def setUp(self):
# Flush the cache
conn = get_redis_connection("redis")
conn.flushall()
self.range_mattresses = Range.objects.create(name="Mattresses")
self.range_slippers = Range.objects.create(name="Slippers")
self.mattress = factories.create_product(title="Mattress", price=D("2999.00"))
self.slipper1 = factories.create_product(title="Slipper", price=D("78.00"))
self.slipper2 = factories.create_product(title="Slipper", price=D("79.00"))
self.range_mattresses.add_product(self.mattress)
self.range_slippers.add_product(self.slipper1)
self.range_slippers.add_product(self.slipper2)
self.condition = BluelightCountCondition.objects.create(
range=self.range_mattresses,
proxy_class="oscarbluelight.offer.conditions.BluelightCountCondition",
value=1,
)
self.benefit = BluelightPercentageDiscountBenefit.objects.create(
range=self.range_slippers,
proxy_class="oscarbluelight.offer.benefits.BluelightPercentageDiscountBenefit",
value=D("100.00"),
max_affected_items=1,
)
self.offer = mock.Mock()
self.basket = factories.create_basket(empty=True)
def test_applies_correctly_to_basket_which_matches_multiple_lines_multiple_times(
self,
):
# Add two different lines to the basket
self.basket.add_product(self.mattress, 2)
self.basket.add_product(self.slipper1, 1)
self.basket.add_product(self.slipper2, 1)
# Apply once
self.assertTrue(self.condition.proxy().is_satisfied(self.offer, self.basket))
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertTrue(result.is_successful)
self.assertEqual(result.discount, D("78.00"))
self.assertEqual(self.basket.num_items_with_discount, 2)
self.assertEqual(self.basket.num_items_without_discount, 2)
# Apply second time
self.assertTrue(self.condition.proxy().is_satisfied(self.offer, self.basket))
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertTrue(result.is_successful)
self.assertEqual(result.discount, D("79.00"))
self.assertEqual(self.basket.num_items_with_discount, 4)
self.assertEqual(self.basket.num_items_without_discount, 0)
# Can't apply a third time because the condition is no longer satisfied
self.assertFalse(self.condition.proxy().is_satisfied(self.offer, self.basket))
class TestAPercentageDiscountAppliedWithValueCondition(TestCase):
def setUp(self):
# Flush the cache
conn = get_redis_connection("redis")
conn.flushall()
range = Range.objects.create(name="All products", includes_all_products=True)
self.condition = BluelightValueCondition.objects.create(
range=range,
proxy_class="oscarbluelight.offer.conditions.BluelightValueCondition",
value=D("10.00"),
)
self.benefit = BluelightPercentageDiscountBenefit.objects.create(
range=range,
proxy_class="oscarbluelight.offer.benefits.BluelightPercentageDiscountBenefit",
value=20,
)
self.offer = mock.Mock()
self.basket = factories.create_basket(empty=True)
def test_applies_correctly_to_empty_basket(self):
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D("0.00"), result.discount)
self.assertEqual(0, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_matches_condition(self):
add_product(self.basket, D("5.00"), 2)
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(2 * D("5.00") * D("0.2"), result.discount)
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_exceeds_condition_but_matches_on_boundary(
self,
):
add_product(self.basket, D("5.00"), 3)
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(3 * D("5.00") * D("0.2"), result.discount)
self.assertEqual(3, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_exceeds_condition(self):
add_product(self.basket, D("4.00"), 3)
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(3 * D("4.00") * D("0.2"), result.discount)
self.assertEqual(3, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
class TestAPercentageDiscountWithMaxItemsSetAppliedWithValueCondition(TestCase):
def setUp(self):
# Flush the cache
conn = get_redis_connection("redis")
conn.flushall()
range = Range.objects.create(name="All products", includes_all_products=True)
self.condition = BluelightValueCondition.objects.create(
range=range,
proxy_class="oscarbluelight.offer.conditions.BluelightValueCondition",
value=D("10.00"),
)
self.benefit = BluelightPercentageDiscountBenefit.objects.create(
range=range,
proxy_class="oscarbluelight.offer.benefits.BluelightPercentageDiscountBenefit",
value=20,
max_affected_items=1,
)
self.offer = mock.Mock()
self.basket = factories.create_basket(empty=True)
def test_applies_correctly_to_empty_basket(self):
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(D("0.00"), result.discount)
self.assertEqual(0, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_matches_condition(self):
add_product(self.basket, D("5.00"), 2)
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(1 * D("5.00") * D("0.2"), result.discount)
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_exceeds_condition_but_matches_on_boundary(
self,
):
add_product(self.basket, D("5.00"), 3)
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(1 * D("5.00") * D("0.2"), result.discount)
self.assertEqual(2, self.basket.num_items_with_discount)
self.assertEqual(1, self.basket.num_items_without_discount)
def test_applies_correctly_to_basket_which_exceeds_condition(self):
add_product(self.basket, D("4.00"), 3)
result = self.benefit.apply(self.basket, self.condition, self.offer)
self.assertEqual(1 * D("4.00") * D("0.2"), result.discount)
self.assertEqual(3, self.basket.num_items_with_discount)
self.assertEqual(0, self.basket.num_items_without_discount)
class TestAPercentageDiscountBenefit(TestCase):
def setUp(self):
# Flush the cache
conn = get_redis_connection("redis")
conn.flushall()
def test_requires_a_benefit_value(self):
rng = Range.objects.create(name="", includes_all_products=True)
benefit = Benefit.objects.create(
range=rng,
proxy_class="oscarbluelight.offer.benefits.BluelightPercentageDiscountBenefit",
)
with self.assertRaises(exceptions.ValidationError):
benefit.clean()
| [((558, 587), 'django_redis.get_redis_connection', 'get_redis_connection', (['"""redis"""'], {}), "('redis')\n", (578, 587), False, 'from django_redis import get_redis_connection\n'), ((629, 698), 'oscarbluelight.offer.models.Range.objects.create', 'Range.objects.create', ([], {'name': '"""All products"""', 'includes_all_products': '(True)'}), "(name='All products', includes_all_products=True)\n", (649, 698), False, 'from oscarbluelight.offer.models import Range, Benefit, BluelightCountCondition, BluelightValueCondition, BluelightPercentageDiscountBenefit\n'), ((724, 845), 'oscarbluelight.offer.models.BluelightCountCondition', 'BluelightCountCondition', ([], {'range': 'range', 'proxy_class': '"""oscarbluelight.offer.conditions.BluelightCountCondition"""', 'value': '(2)'}), "(range=range, proxy_class=\n 'oscarbluelight.offer.conditions.BluelightCountCondition', value=2)\n", (747, 845), False, 'from oscarbluelight.offer.models import Range, Benefit, BluelightCountCondition, BluelightValueCondition, BluelightPercentageDiscountBenefit\n'), ((911, 1057), 'oscarbluelight.offer.models.BluelightPercentageDiscountBenefit', 'BluelightPercentageDiscountBenefit', ([], {'range': 'range', 'proxy_class': '"""oscarbluelight.offer.benefits.BluelightPercentageDiscountBenefit"""', 'value': '(20)'}), "(range=range, proxy_class=\n 'oscarbluelight.offer.benefits.BluelightPercentageDiscountBenefit',\n value=20)\n", (945, 1057), False, 'from oscarbluelight.offer.models import Range, Benefit, BluelightCountCondition, BluelightValueCondition, BluelightPercentageDiscountBenefit\n'), ((1117, 1128), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (1126, 1128), False, 'from unittest import mock\n'), ((1151, 1186), 'oscar.test.factories.create_basket', 'factories.create_basket', ([], {'empty': '(True)'}), '(empty=True)\n', (1174, 1186), False, 'from oscar.test import factories\n'), ((1602, 1649), 'oscar.test.factories.create_product', 'factories.create_product', ([], {'is_discountable': '(False)'}), '(is_discountable=False)\n', (1626, 1649), False, 'from oscar.test import factories\n'), ((2862, 2871), 'decimal.Decimal', 'D', (['"""5.00"""'], {}), "('5.00')\n", (2863, 2871), True, 'from decimal import Decimal as D\n'), ((3402, 3413), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (3411, 3413), False, 'from unittest import mock\n'), ((4233, 4244), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (4242, 4244), False, 'from unittest import mock\n'), ((4431, 4442), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (4440, 4442), False, 'from unittest import mock\n'), ((5339, 5368), 'django_redis.get_redis_connection', 'get_redis_connection', (['"""redis"""'], {}), "('redis')\n", (5359, 5368), False, 'from django_redis import get_redis_connection\n'), ((5410, 5479), 'oscarbluelight.offer.models.Range.objects.create', 'Range.objects.create', ([], {'name': '"""All products"""', 'includes_all_products': '(True)'}), "(name='All products', includes_all_products=True)\n", (5430, 5479), False, 'from oscarbluelight.offer.models import Range, Benefit, BluelightCountCondition, BluelightValueCondition, BluelightPercentageDiscountBenefit\n'), ((5505, 5626), 'oscarbluelight.offer.models.BluelightCountCondition', 'BluelightCountCondition', ([], {'range': 'range', 'proxy_class': '"""oscarbluelight.offer.conditions.BluelightCountCondition"""', 'value': '(2)'}), "(range=range, proxy_class=\n 'oscarbluelight.offer.conditions.BluelightCountCondition', value=2)\n", (5528, 5626), False, 'from oscarbluelight.offer.models import Range, Benefit, BluelightCountCondition, BluelightValueCondition, BluelightPercentageDiscountBenefit\n'), ((5692, 5860), 'oscarbluelight.offer.models.BluelightPercentageDiscountBenefit', 'BluelightPercentageDiscountBenefit', ([], {'range': 'range', 'proxy_class': '"""oscarbluelight.offer.benefits.BluelightPercentageDiscountBenefit"""', 'value': '(20)', 'max_affected_items': '(1)'}), "(range=range, proxy_class=\n 'oscarbluelight.offer.benefits.BluelightPercentageDiscountBenefit',\n value=20, max_affected_items=1)\n", (5726, 5860), False, 'from oscarbluelight.offer.models import Range, Benefit, BluelightCountCondition, BluelightValueCondition, BluelightPercentageDiscountBenefit\n'), ((5932, 5943), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (5941, 5943), False, 'from unittest import mock\n'), ((5966, 6001), 'oscar.test.factories.create_basket', 'factories.create_basket', ([], {'empty': '(True)'}), '(empty=True)\n', (5989, 6001), False, 'from oscar.test import factories\n'), ((7342, 7371), 'django_redis.get_redis_connection', 'get_redis_connection', (['"""redis"""'], {}), "('redis')\n", (7362, 7371), False, 'from django_redis import get_redis_connection\n'), ((7429, 7468), 'oscarbluelight.offer.models.Range.objects.create', 'Range.objects.create', ([], {'name': '"""Mattresses"""'}), "(name='Mattresses')\n", (7449, 7468), False, 'from oscarbluelight.offer.models import Range, Benefit, BluelightCountCondition, BluelightValueCondition, BluelightPercentageDiscountBenefit\n'), ((7499, 7536), 'oscarbluelight.offer.models.Range.objects.create', 'Range.objects.create', ([], {'name': '"""Slippers"""'}), "(name='Slippers')\n", (7519, 7536), False, 'from oscarbluelight.offer.models import Range, Benefit, BluelightCountCondition, BluelightValueCondition, BluelightPercentageDiscountBenefit\n'), ((7987, 8142), 'oscarbluelight.offer.models.BluelightCountCondition.objects.create', 'BluelightCountCondition.objects.create', ([], {'range': 'self.range_mattresses', 'proxy_class': '"""oscarbluelight.offer.conditions.BluelightCountCondition"""', 'value': '(1)'}), "(range=self.range_mattresses,\n proxy_class='oscarbluelight.offer.conditions.BluelightCountCondition',\n value=1)\n", (8025, 8142), False, 'from oscarbluelight.offer.models import Range, Benefit, BluelightCountCondition, BluelightValueCondition, BluelightPercentageDiscountBenefit\n'), ((8485, 8496), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (8494, 8496), False, 'from unittest import mock\n'), ((8519, 8554), 'oscar.test.factories.create_basket', 'factories.create_basket', ([], {'empty': '(True)'}), '(empty=True)\n', (8542, 8554), False, 'from oscar.test import factories\n'), ((9999, 10028), 'django_redis.get_redis_connection', 'get_redis_connection', (['"""redis"""'], {}), "('redis')\n", (10019, 10028), False, 'from django_redis import get_redis_connection\n'), ((10070, 10139), 'oscarbluelight.offer.models.Range.objects.create', 'Range.objects.create', ([], {'name': '"""All products"""', 'includes_all_products': '(True)'}), "(name='All products', includes_all_products=True)\n", (10090, 10139), False, 'from oscarbluelight.offer.models import Range, Benefit, BluelightCountCondition, BluelightValueCondition, BluelightPercentageDiscountBenefit\n'), ((10376, 10537), 'oscarbluelight.offer.models.BluelightPercentageDiscountBenefit.objects.create', 'BluelightPercentageDiscountBenefit.objects.create', ([], {'range': 'range', 'proxy_class': '"""oscarbluelight.offer.benefits.BluelightPercentageDiscountBenefit"""', 'value': '(20)'}), "(range=range, proxy_class=\n 'oscarbluelight.offer.benefits.BluelightPercentageDiscountBenefit',\n value=20)\n", (10425, 10537), False, 'from oscarbluelight.offer.models import Range, Benefit, BluelightCountCondition, BluelightValueCondition, BluelightPercentageDiscountBenefit\n'), ((10597, 10608), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (10606, 10608), False, 'from unittest import mock\n'), ((10631, 10666), 'oscar.test.factories.create_basket', 'factories.create_basket', ([], {'empty': '(True)'}), '(empty=True)\n', (10654, 10666), False, 'from oscar.test import factories\n'), ((12363, 12392), 'django_redis.get_redis_connection', 'get_redis_connection', (['"""redis"""'], {}), "('redis')\n", (12383, 12392), False, 'from django_redis import get_redis_connection\n'), ((12434, 12503), 'oscarbluelight.offer.models.Range.objects.create', 'Range.objects.create', ([], {'name': '"""All products"""', 'includes_all_products': '(True)'}), "(name='All products', includes_all_products=True)\n", (12454, 12503), False, 'from oscarbluelight.offer.models import Range, Benefit, BluelightCountCondition, BluelightValueCondition, BluelightPercentageDiscountBenefit\n'), ((12740, 12923), 'oscarbluelight.offer.models.BluelightPercentageDiscountBenefit.objects.create', 'BluelightPercentageDiscountBenefit.objects.create', ([], {'range': 'range', 'proxy_class': '"""oscarbluelight.offer.benefits.BluelightPercentageDiscountBenefit"""', 'value': '(20)', 'max_affected_items': '(1)'}), "(range=range, proxy_class=\n 'oscarbluelight.offer.benefits.BluelightPercentageDiscountBenefit',\n value=20, max_affected_items=1)\n", (12789, 12923), False, 'from oscarbluelight.offer.models import Range, Benefit, BluelightCountCondition, BluelightValueCondition, BluelightPercentageDiscountBenefit\n'), ((12995, 13006), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (13004, 13006), False, 'from unittest import mock\n'), ((13029, 13064), 'oscar.test.factories.create_basket', 'factories.create_basket', ([], {'empty': '(True)'}), '(empty=True)\n', (13052, 13064), False, 'from oscar.test import factories\n'), ((14728, 14757), 'django_redis.get_redis_connection', 'get_redis_connection', (['"""redis"""'], {}), "('redis')\n", (14748, 14757), False, 'from django_redis import get_redis_connection\n'), ((14842, 14899), 'oscarbluelight.offer.models.Range.objects.create', 'Range.objects.create', ([], {'name': '""""""', 'includes_all_products': '(True)'}), "(name='', includes_all_products=True)\n", (14862, 14899), False, 'from oscarbluelight.offer.models import Range, Benefit, BluelightCountCondition, BluelightValueCondition, BluelightPercentageDiscountBenefit\n'), ((14918, 15036), 'oscarbluelight.offer.models.Benefit.objects.create', 'Benefit.objects.create', ([], {'range': 'rng', 'proxy_class': '"""oscarbluelight.offer.benefits.BluelightPercentageDiscountBenefit"""'}), "(range=rng, proxy_class=\n 'oscarbluelight.offer.benefits.BluelightPercentageDiscountBenefit')\n", (14940, 15036), False, 'from oscarbluelight.offer.models import Range, Benefit, BluelightCountCondition, BluelightValueCondition, BluelightPercentageDiscountBenefit\n'), ((1344, 1353), 'decimal.Decimal', 'D', (['"""0.00"""'], {}), "('0.00')\n", (1345, 1353), True, 'from decimal import Decimal as D\n'), ((1683, 1693), 'decimal.Decimal', 'D', (['"""12.00"""'], {}), "('12.00')\n", (1684, 1693), True, 'from decimal import Decimal as D\n'), ((1817, 1826), 'decimal.Decimal', 'D', (['"""0.00"""'], {}), "('0.00')\n", (1818, 1826), True, 'from decimal import Decimal as D\n'), ((2084, 2094), 'decimal.Decimal', 'D', (['"""12.00"""'], {}), "('12.00')\n", (2085, 2094), True, 'from decimal import Decimal as D\n'), ((2484, 2494), 'decimal.Decimal', 'D', (['"""12.00"""'], {}), "('12.00')\n", (2485, 2494), True, 'from decimal import Decimal as D\n'), ((2933, 2943), 'decimal.Decimal', 'D', (['"""12.00"""'], {}), "('12.00')\n", (2934, 2943), True, 'from decimal import Decimal as D\n'), ((3050, 3059), 'decimal.Decimal', 'D', (['"""5.00"""'], {}), "('5.00')\n", (3051, 3059), True, 'from decimal import Decimal as D\n'), ((3499, 3508), 'decimal.Decimal', 'D', (['"""5.00"""'], {}), "('5.00')\n", (3500, 3508), True, 'from decimal import Decimal as D\n'), ((3900, 3909), 'decimal.Decimal', 'D', (['"""1.00"""'], {}), "('1.00')\n", (3901, 3909), True, 'from decimal import Decimal as D\n'), ((4531, 4540), 'decimal.Decimal', 'D', (['"""5.00"""'], {}), "('5.00')\n", (4532, 4540), True, 'from decimal import Decimal as D\n'), ((4932, 4941), 'decimal.Decimal', 'D', (['"""1.00"""'], {}), "('1.00')\n", (4933, 4941), True, 'from decimal import Decimal as D\n'), ((6159, 6168), 'decimal.Decimal', 'D', (['"""0.00"""'], {}), "('0.00')\n", (6160, 6168), True, 'from decimal import Decimal as D\n'), ((6426, 6436), 'decimal.Decimal', 'D', (['"""12.00"""'], {}), "('12.00')\n", (6427, 6436), True, 'from decimal import Decimal as D\n'), ((9133, 9143), 'decimal.Decimal', 'D', (['"""78.00"""'], {}), "('78.00')\n", (9134, 9143), True, 'from decimal import Decimal as D\n'), ((9557, 9567), 'decimal.Decimal', 'D', (['"""79.00"""'], {}), "('79.00')\n", (9558, 9567), True, 'from decimal import Decimal as D\n'), ((10824, 10833), 'decimal.Decimal', 'D', (['"""0.00"""'], {}), "('0.00')\n", (10825, 10833), True, 'from decimal import Decimal as D\n'), ((11091, 11100), 'decimal.Decimal', 'D', (['"""5.00"""'], {}), "('5.00')\n", (11092, 11100), True, 'from decimal import Decimal as D\n'), ((11528, 11537), 'decimal.Decimal', 'D', (['"""5.00"""'], {}), "('5.00')\n", (11529, 11537), True, 'from decimal import Decimal as D\n'), ((11926, 11935), 'decimal.Decimal', 'D', (['"""4.00"""'], {}), "('4.00')\n", (11927, 11935), True, 'from decimal import Decimal as D\n'), ((13222, 13231), 'decimal.Decimal', 'D', (['"""0.00"""'], {}), "('0.00')\n", (13223, 13231), True, 'from decimal import Decimal as D\n'), ((13489, 13498), 'decimal.Decimal', 'D', (['"""5.00"""'], {}), "('5.00')\n", (13490, 13498), True, 'from decimal import Decimal as D\n'), ((13926, 13935), 'decimal.Decimal', 'D', (['"""5.00"""'], {}), "('5.00')\n", (13927, 13935), True, 'from decimal import Decimal as D\n'), ((14324, 14333), 'decimal.Decimal', 'D', (['"""4.00"""'], {}), "('4.00')\n", (14325, 14333), True, 'from decimal import Decimal as D\n'), ((2218, 2226), 'decimal.Decimal', 'D', (['"""0.2"""'], {}), "('0.2')\n", (2219, 2226), True, 'from decimal import Decimal as D\n'), ((2618, 2626), 'decimal.Decimal', 'D', (['"""0.2"""'], {}), "('0.2')\n", (2619, 2626), True, 'from decimal import Decimal as D\n'), ((6560, 6568), 'decimal.Decimal', 'D', (['"""0.2"""'], {}), "('0.2')\n", (6561, 6568), True, 'from decimal import Decimal as D\n'), ((6982, 6990), 'decimal.Decimal', 'D', (['"""0.2"""'], {}), "('0.2')\n", (6983, 6990), True, 'from decimal import Decimal as D\n'), ((7611, 7623), 'decimal.Decimal', 'D', (['"""2999.00"""'], {}), "('2999.00')\n", (7612, 7623), True, 'from decimal import Decimal as D\n'), ((7697, 7707), 'decimal.Decimal', 'D', (['"""78.00"""'], {}), "('78.00')\n", (7698, 7707), True, 'from decimal import Decimal as D\n'), ((7781, 7791), 'decimal.Decimal', 'D', (['"""79.00"""'], {}), "('79.00')\n", (7782, 7791), True, 'from decimal import Decimal as D\n'), ((8406, 8417), 'decimal.Decimal', 'D', (['"""100.00"""'], {}), "('100.00')\n", (8407, 8417), True, 'from decimal import Decimal as D\n'), ((10331, 10341), 'decimal.Decimal', 'D', (['"""10.00"""'], {}), "('10.00')\n", (10332, 10341), True, 'from decimal import Decimal as D\n'), ((11223, 11231), 'decimal.Decimal', 'D', (['"""0.2"""'], {}), "('0.2')\n", (11224, 11231), True, 'from decimal import Decimal as D\n'), ((11660, 11668), 'decimal.Decimal', 'D', (['"""0.2"""'], {}), "('0.2')\n", (11661, 11668), True, 'from decimal import Decimal as D\n'), ((12058, 12066), 'decimal.Decimal', 'D', (['"""0.2"""'], {}), "('0.2')\n", (12059, 12066), True, 'from decimal import Decimal as D\n'), ((12695, 12705), 'decimal.Decimal', 'D', (['"""10.00"""'], {}), "('10.00')\n", (12696, 12705), True, 'from decimal import Decimal as D\n'), ((13621, 13629), 'decimal.Decimal', 'D', (['"""0.2"""'], {}), "('0.2')\n", (13622, 13629), True, 'from decimal import Decimal as D\n'), ((14058, 14066), 'decimal.Decimal', 'D', (['"""0.2"""'], {}), "('0.2')\n", (14059, 14066), True, 'from decimal import Decimal as D\n'), ((14456, 14464), 'decimal.Decimal', 'D', (['"""0.2"""'], {}), "('0.2')\n", (14457, 14464), True, 'from decimal import Decimal as D\n'), ((2205, 2215), 'decimal.Decimal', 'D', (['"""12.00"""'], {}), "('12.00')\n", (2206, 2215), True, 'from decimal import Decimal as D\n'), ((2605, 2615), 'decimal.Decimal', 'D', (['"""12.00"""'], {}), "('12.00')\n", (2606, 2615), True, 'from decimal import Decimal as D\n'), ((6547, 6557), 'decimal.Decimal', 'D', (['"""12.00"""'], {}), "('12.00')\n", (6548, 6557), True, 'from decimal import Decimal as D\n'), ((6829, 6839), 'decimal.Decimal', 'D', (['"""12.00"""'], {}), "('12.00')\n", (6830, 6839), True, 'from decimal import Decimal as D\n'), ((6846, 6856), 'decimal.Decimal', 'D', (['"""20.00"""'], {}), "('20.00')\n", (6847, 6856), True, 'from decimal import Decimal as D\n'), ((6969, 6979), 'decimal.Decimal', 'D', (['"""12.00"""'], {}), "('12.00')\n", (6970, 6979), True, 'from decimal import Decimal as D\n'), ((11211, 11220), 'decimal.Decimal', 'D', (['"""5.00"""'], {}), "('5.00')\n", (11212, 11220), True, 'from decimal import Decimal as D\n'), ((11648, 11657), 'decimal.Decimal', 'D', (['"""5.00"""'], {}), "('5.00')\n", (11649, 11657), True, 'from decimal import Decimal as D\n'), ((12046, 12055), 'decimal.Decimal', 'D', (['"""4.00"""'], {}), "('4.00')\n", (12047, 12055), True, 'from decimal import Decimal as D\n'), ((13609, 13618), 'decimal.Decimal', 'D', (['"""5.00"""'], {}), "('5.00')\n", (13610, 13618), True, 'from decimal import Decimal as D\n'), ((14046, 14055), 'decimal.Decimal', 'D', (['"""5.00"""'], {}), "('5.00')\n", (14047, 14055), True, 'from decimal import Decimal as D\n'), ((14444, 14453), 'decimal.Decimal', 'D', (['"""4.00"""'], {}), "('4.00')\n", (14445, 14453), True, 'from decimal import Decimal as D\n')] |
adstr123/LPTHW | CLCC/ex8.py | 1a331ef173ffd6122b5c5ed13d8fdcc73ab7ce66 | # Moving around directories with pushd & popd
# You can save directries to go back to later. These can be built up in a stack.
#pushd i/like/icecream
# current stack: ~temp/i/like/icecream
#pushd i/like
# current stack: ~temp/i/like ~temp/i/like/icecream
#popd
# PS ~temp/i/like
#popd
# PS ~temp/i/like/icecream
# You can also add a directory as an argument to a pushd command to also immediately change to that directory | [] |
clouserw/zamboni | mkt/search/tests/test_filters.py | c4a568b69c1613f27da41d46328b2975cbdc1c07 | # -*- coding: utf-8 -*-
import json
from nose.tools import eq_, ok_
from rest_framework.exceptions import ParseError
from django.contrib.auth.models import AnonymousUser
from django.test.client import RequestFactory
from django.test.utils import override_settings
import mkt
from mkt.constants.applications import DEVICE_CHOICES_IDS
from mkt.constants.features import FeatureProfile
from mkt.search.filters import (DeviceTypeFilter, ProfileFilter,
PublicAppsFilter, PublicSearchFormFilter,
RegionFilter, SearchQueryFilter, SortingFilter,
ValidAppsFilter)
from mkt.search.forms import TARAKO_CATEGORIES_MAPPING
from mkt.search.views import SearchView
from mkt.site.tests import TestCase
from mkt.webapps.indexers import WebappIndexer
class FilterTestsBase(TestCase):
def setUp(self):
super(FilterTestsBase, self).setUp()
self.req = RequestFactory().get('/')
self.req.user = AnonymousUser()
self.view_class = SearchView
def _filter(self, req=None, data=None):
req = req or RequestFactory().get('/', data=data or {})
req.user = AnonymousUser()
queryset = WebappIndexer.search()
for filter_class in self.filter_classes:
queryset = filter_class().filter_queryset(req, queryset,
self.view_class)
return queryset.to_dict()
class TestQueryFilter(FilterTestsBase):
filter_classes = [SearchQueryFilter]
def test_q(self):
qs = self._filter(data={'q': 'search terms'})
# Spot check a few queries.
should = (qs['query']['function_score']['query']['bool']['should'])
ok_({'match': {'name': {'query': 'search terms', 'boost': 4,
'slop': 1, 'type': 'phrase'}}}
in should)
ok_({'prefix': {'name': {'boost': 1.5, 'value': 'search terms'}}}
in should)
ok_({'match': {'name_english': {'query': 'search terms',
'boost': 2.5}}}
in should)
ok_({'match': {'description_english': {'query': 'search terms',
'boost': 0.6,
'analyzer': 'english_analyzer',
'type': 'phrase'}}}
in should)
def test_fuzzy_single_word(self):
qs = self._filter(data={'q': 'term'})
should = (qs['query']['function_score']['query']['bool']['should'])
ok_({'fuzzy': {'tags': {'prefix_length': 1, 'value': 'term'}}}
in should)
def test_no_fuzzy_multi_word(self):
qs = self._filter(data={'q': 'search terms'})
qs_str = json.dumps(qs)
ok_('fuzzy' not in qs_str)
@override_settings(ES_USE_PLUGINS=True)
def test_polish_analyzer(self):
"""
Test that the polish analyzer is included correctly since it is an
exception to the rest b/c it is a plugin.
"""
with self.activate(locale='pl'):
qs = self._filter(data={'q': u'próba'})
should = (qs['query']['function_score']['query']['bool']['should'])
ok_({'match': {'name_polish': {'query': u'pr\xf3ba',
'boost': 2.5}}}
in should)
ok_({'match': {'description_polish': {'query': u'pr\xf3ba',
'boost': 0.6,
'analyzer': 'polish',
'type': 'phrase'}}}
in should)
class TestFormFilter(FilterTestsBase):
filter_classes = [PublicSearchFormFilter]
def test_category(self):
qs = self._filter(data={'cat': 'games'})
ok_({'terms': {'category': ['games']}}
in qs['query']['filtered']['filter']['bool']['must'])
def test_tag(self):
qs = self._filter(data={'tag': 'tarako'})
ok_({'term': {'tags': 'tarako'}}
in qs['query']['filtered']['filter']['bool']['must'])
def test_tarako_categories(self):
qs = self._filter(data={'cat': 'tarako-lifestyle'})
ok_({'terms':
{'category': TARAKO_CATEGORIES_MAPPING['tarako-lifestyle']}}
in qs['query']['filtered']['filter']['bool']['must'])
qs = self._filter(data={'cat': 'tarako-games'})
ok_({'terms': {'category': TARAKO_CATEGORIES_MAPPING['tarako-games']}}
in qs['query']['filtered']['filter']['bool']['must'])
qs = self._filter(data={'cat': 'tarako-tools'})
ok_({'terms': {'category': TARAKO_CATEGORIES_MAPPING['tarako-tools']}}
in qs['query']['filtered']['filter']['bool']['must'])
def test_app_type(self):
qs = self._filter(data={'app_type': ['hosted']})
ok_({'terms': {'app_type': [1]}}
in qs['query']['filtered']['filter']['bool']['must'])
def test_app_type_packaged(self):
"""Test packaged also includes privileged."""
qs = self._filter(data={'app_type': ['packaged']})
ok_({'terms': {'app_type': [2, 3]}}
in qs['query']['filtered']['filter']['bool']['must'])
def test_manifest_url(self):
url = 'http://hy.fr/manifest.webapp'
qs = self._filter(data={'manifest_url': url})
ok_({'term': {'manifest_url': url}}
in qs['query']['filtered']['filter']['bool']['must'])
def test_offline(self):
"""Ensure we are filtering by offline-capable apps."""
qs = self._filter(data={'offline': 'True'})
ok_({'term': {'is_offline': True}}
in qs['query']['filtered']['filter']['bool']['must'])
def test_online(self):
"""Ensure we are filtering by apps that require online access."""
qs = self._filter(data={'offline': 'False'})
ok_({'term': {'is_offline': False}}
in qs['query']['filtered']['filter']['bool']['must'])
def test_offline_and_online(self):
"""Ensure we are not filtering by offline/online by default."""
# Pass any form values other than 'offline' to create the dict.
qs = self._filter(data={'cat': 'games'})
ok_({'term': {'is_offline': True}}
not in qs['query']['filtered']['filter']['bool']['must'])
ok_({'term': {'is_offline': False}}
not in qs['query']['filtered']['filter']['bool']['must'])
def test_languages(self):
qs = self._filter(data={'languages': 'fr'})
ok_({'terms': {'supported_locales': ['fr']}}
in qs['query']['filtered']['filter']['bool']['must'])
qs = self._filter(data={'languages': 'ar,en-US'})
ok_({'terms': {'supported_locales': ['ar', 'en-US']}}
in qs['query']['filtered']['filter']['bool']['must'])
def test_author(self):
qs = self._filter(data={'author': 'Mozilla LABS'})
ok_({'term': {'author.raw': u'mozilla labs'}}
in qs['query']['filtered']['filter']['bool']['must'])
def test_installs_allowed_from(self):
qs = self._filter(data={'installs_allowed_from': '*'})
ok_({'term': {'installs_allowed_from': u'*'}}
in qs['query']['filtered']['filter']['bool']['must'])
# Test that we don't filter by this field if not provided.
qs = self._filter()
ok_('installs_allowed_from' not in json.dumps(qs),
"Unexpected 'installs_allowed_from' in query")
def test_premium_types(self):
def ptype(p):
return mkt.ADDON_PREMIUM_API_LOOKUP.get(p)
# Test a single premium type.
qs = self._filter(data={'premium_types': ['free']})
ok_({'terms': {'premium_type': [ptype('free')]}}
in qs['query']['filtered']['filter']['bool']['must'])
# Test many premium types.
qs = self._filter(data={'premium_types': ['free', 'free-inapp']})
ok_({'terms': {'premium_type': [ptype('free'), ptype('free-inapp')]}}
in qs['query']['filtered']['filter']['bool']['must'])
# Test a non-existent premium type.
with self.assertRaises(ParseError):
self._filter(data={'premium_types': ['free', 'platinum']})
def test_device(self):
qs = self._filter(data={'dev': 'desktop'})
ok_({'term': {'device': DEVICE_CHOICES_IDS['desktop']}}
in qs['query']['filtered']['filter']['bool']['must'])
def test_no_device_with_device_type(self):
"""Test that providing a device type w/o device doesn't filter."""
qs = self._filter(data={'dev': '', 'device': 'firefoxos'})
ok_('filtered' not in qs['query'].keys())
class TestPublicAppsFilter(FilterTestsBase):
filter_classes = [PublicAppsFilter]
def test_status(self):
qs = self._filter(self.req)
ok_({'term': {'status': mkt.STATUS_PUBLIC}}
in qs['query']['filtered']['filter']['bool']['must'])
ok_({'term': {'is_disabled': False}}
in qs['query']['filtered']['filter']['bool']['must'])
class TestValidAppsFilter(FilterTestsBase):
filter_classes = [ValidAppsFilter]
def test_status(self):
qs = self._filter(self.req)
ok_({'terms': {'status': mkt.VALID_STATUSES}}
in qs['query']['filtered']['filter']['bool']['must'])
ok_({'term': {'is_disabled': False}}
in qs['query']['filtered']['filter']['bool']['must'])
class TestDeviceTypeFilter(FilterTestsBase):
filter_classes = [DeviceTypeFilter]
def test_no_filters(self):
qs = self._filter(self.req)
ok_('filtered' not in qs['query'].keys())
def test_mobile(self):
self.req.MOBILE = True
qs = self._filter(self.req)
ok_({'term': {'uses_flash': False}}
in qs['query']['filtered']['filter']['bool']['must'])
def test_gaia(self):
self.req.GAIA = True
qs = self._filter(self.req)
ok_({'term': {'uses_flash': False}}
in qs['query']['filtered']['filter']['bool']['must'])
def test_tablet(self):
self.req.TABLET = True
qs = self._filter(self.req)
ok_('filtered' not in qs['query'].keys())
def test_device_in_querystring(self):
qs = self._filter(data={'dev': 'desktop'})
ok_({'term': {'device': 1}}
in qs['query']['filtered']['filter']['bool']['must'])
qs = self._filter(data={'dev': 'android', 'device': 'mobile'})
ok_({'term': {'device': 2}}
in qs['query']['filtered']['filter']['bool']['must'])
qs = self._filter(data={'dev': 'android', 'device': 'tablet'})
ok_({'term': {'device': 3}}
in qs['query']['filtered']['filter']['bool']['must'])
qs = self._filter(data={'dev': 'firefoxos'})
ok_({'term': {'device': 4}}
in qs['query']['filtered']['filter']['bool']['must'])
class TestRegionFilter(FilterTestsBase):
filter_classes = [RegionFilter]
def test_no_region_default(self):
qs = self._filter(self.req)
ok_({'term': {'region_exclusions': mkt.regions.RESTOFWORLD.id}}
in qs['query']['filtered']['filter']['bool']['must_not'])
def test_region(self):
self.req.REGION = mkt.regions.BRA
qs = self._filter(self.req)
ok_({'term': {'region_exclusions': mkt.regions.BRA.id}}
in qs['query']['filtered']['filter']['bool']['must_not'])
class TestProfileFilter(FilterTestsBase):
filter_classes = [ProfileFilter]
def profile_qs(self, disabled_features=None):
if disabled_features is None:
disabled_features = {}
profile = FeatureProfile().fromkeys(FeatureProfile(), True)
for feature in disabled_features:
profile[feature] = False
return {'pro': profile.to_signature(), 'dev': 'firefoxos'}
def test_filter_all_features_present(self):
qs = self._filter(data=self.profile_qs())
ok_('filtered' not in qs['query'].keys())
def test_filter_one_feature_present(self):
qs = self._filter(data=self.profile_qs(disabled_features=['sms']))
ok_({'term': {'features.has_sms': True}}
in qs['query']['filtered']['filter']['bool']['must_not'])
def test_filter_one_feature_present_desktop(self):
data = self.profile_qs(disabled_features=['sms'])
data['dev'] = 'desktop'
qs = self._filter(data=data)
ok_('filtered' not in qs['query'].keys())
def test_filter_multiple_features_present(self):
qs = self._filter(
data=self.profile_qs(disabled_features=['sms', 'apps']))
ok_({'term': {'features.has_sms': True}}
in qs['query']['filtered']['filter']['bool']['must_not'])
ok_({'term': {'features.has_apps': True}}
in qs['query']['filtered']['filter']['bool']['must_not'])
class TestSortingFilter(FilterTestsBase):
filter_classes = [SortingFilter]
def test_sort(self):
for api_sort, es_sort in SortingFilter.DEFAULT_SORTING.items():
qs = self._filter(data={'sort': [api_sort]})
if es_sort.startswith('-'):
ok_({es_sort[1:]: {'order': 'desc'}} in qs['sort'], qs)
else:
eq_([es_sort], qs['sort'], qs)
def test_sort_multiple(self):
qs = self._filter(data={'sort': ['rating', 'created']})
ok_({'bayesian_rating': {'order': 'desc'}} in qs['sort'])
ok_({'created': {'order': 'desc'}} in qs['sort'])
def test_sort_regional(self):
"""Popularity and trending use regional sorting for mature regions."""
req = RequestFactory().get('/')
req.REGION = mkt.regions.BRA
# Default empty query searches use popularity.
qs = self._filter(req)
ok_({'popularity_%s'
% mkt.regions.BRA.id: {'order': 'desc'}} in qs['sort'])
# Popularity.
req = RequestFactory().get('/', data={'sort': ['popularity']})
req.REGION = mkt.regions.BRA
qs = self._filter(req)
ok_({'popularity_%s'
% mkt.regions.BRA.id: {'order': 'desc'}} in qs['sort'])
# Trending.
req = RequestFactory().get('/', data={'sort': ['trending']})
req.REGION = mkt.regions.BRA
qs = self._filter(req)
ok_({'trending_%s' % mkt.regions.BRA.id: {'order': 'desc'}}
in qs['sort'])
class TestCombinedFilter(FilterTestsBase):
"""
Basic test to ensure that when filters are combined they result in the
expected query structure.
"""
filter_classes = [SearchQueryFilter, PublicSearchFormFilter,
PublicAppsFilter, SortingFilter]
def test_combined(self):
qs = self._filter(data={'q': 'test', 'cat': 'games',
'sort': 'trending'})
ok_(qs['query']['filtered']['query']['function_score'])
ok_(qs['query']['filtered']['filter'])
must = qs['query']['filtered']['filter']['bool']['must']
ok_({'terms': {'category': ['games']}} in must)
ok_({'term': {'status': 4}} in must)
ok_({'term': {'is_disabled': False}} in must)
ok_({'trending': {'order': 'desc'}} in qs['sort'])
query = qs['query']['filtered']['query']
ok_({'field_value_factor': {'field': 'boost'}}
in query['function_score']['functions'])
ok_({'match': {'name_english': {'boost': 2.5, 'query': u'test'}}}
in query['function_score']['query']['bool']['should'])
| [((2859, 2897), 'django.test.utils.override_settings', 'override_settings', ([], {'ES_USE_PLUGINS': '(True)'}), '(ES_USE_PLUGINS=True)\n', (2876, 2897), False, 'from django.test.utils import override_settings\n'), ((1003, 1018), 'django.contrib.auth.models.AnonymousUser', 'AnonymousUser', ([], {}), '()\n', (1016, 1018), False, 'from django.contrib.auth.models import AnonymousUser\n'), ((1184, 1199), 'django.contrib.auth.models.AnonymousUser', 'AnonymousUser', ([], {}), '()\n', (1197, 1199), False, 'from django.contrib.auth.models import AnonymousUser\n'), ((1219, 1241), 'mkt.webapps.indexers.WebappIndexer.search', 'WebappIndexer.search', ([], {}), '()\n', (1239, 1241), False, 'from mkt.webapps.indexers import WebappIndexer\n'), ((1746, 1852), 'nose.tools.ok_', 'ok_', (["({'match': {'name': {'query': 'search terms', 'boost': 4, 'slop': 1, 'type':\n 'phrase'}}} in should)"], {}), "({'match': {'name': {'query': 'search terms', 'boost': 4, 'slop': 1,\n 'type': 'phrase'}}} in should)\n", (1749, 1852), False, 'from nose.tools import eq_, ok_\n'), ((1901, 1977), 'nose.tools.ok_', 'ok_', (["({'prefix': {'name': {'boost': 1.5, 'value': 'search terms'}}} in should)"], {}), "({'prefix': {'name': {'boost': 1.5, 'value': 'search terms'}}} in should)\n", (1904, 1977), False, 'from nose.tools import eq_, ok_\n'), ((1998, 2085), 'nose.tools.ok_', 'ok_', (["({'match': {'name_english': {'query': 'search terms', 'boost': 2.5}}} in should\n )"], {}), "({'match': {'name_english': {'query': 'search terms', 'boost': 2.5}}} in\n should)\n", (2001, 2085), False, 'from nose.tools import eq_, ok_\n'), ((2142, 2287), 'nose.tools.ok_', 'ok_', (["({'match': {'description_english': {'query': 'search terms', 'boost': 0.6,\n 'analyzer': 'english_analyzer', 'type': 'phrase'}}} in should)"], {}), "({'match': {'description_english': {'query': 'search terms', 'boost': \n 0.6, 'analyzer': 'english_analyzer', 'type': 'phrase'}}} in should)\n", (2145, 2287), False, 'from nose.tools import eq_, ok_\n'), ((2605, 2678), 'nose.tools.ok_', 'ok_', (["({'fuzzy': {'tags': {'prefix_length': 1, 'value': 'term'}}} in should)"], {}), "({'fuzzy': {'tags': {'prefix_length': 1, 'value': 'term'}}} in should)\n", (2608, 2678), False, 'from nose.tools import eq_, ok_\n'), ((2803, 2817), 'json.dumps', 'json.dumps', (['qs'], {}), '(qs)\n', (2813, 2817), False, 'import json\n'), ((2826, 2852), 'nose.tools.ok_', 'ok_', (["('fuzzy' not in qs_str)"], {}), "('fuzzy' not in qs_str)\n", (2829, 2852), False, 'from nose.tools import eq_, ok_\n'), ((3887, 3984), 'nose.tools.ok_', 'ok_', (["({'terms': {'category': ['games']}} in qs['query']['filtered']['filter'][\n 'bool']['must'])"], {}), "({'terms': {'category': ['games']}} in qs['query']['filtered']['filter']\n ['bool']['must'])\n", (3890, 3984), False, 'from nose.tools import eq_, ok_\n'), ((4075, 4166), 'nose.tools.ok_', 'ok_', (["({'term': {'tags': 'tarako'}} in qs['query']['filtered']['filter']['bool'][\n 'must'])"], {}), "({'term': {'tags': 'tarako'}} in qs['query']['filtered']['filter'][\n 'bool']['must'])\n", (4078, 4166), False, 'from nose.tools import eq_, ok_\n'), ((4281, 4413), 'nose.tools.ok_', 'ok_', (["({'terms': {'category': TARAKO_CATEGORIES_MAPPING['tarako-lifestyle']}} in\n qs['query']['filtered']['filter']['bool']['must'])"], {}), "({'terms': {'category': TARAKO_CATEGORIES_MAPPING['tarako-lifestyle']}} in\n qs['query']['filtered']['filter']['bool']['must'])\n", (4284, 4413), False, 'from nose.tools import eq_, ok_\n'), ((4500, 4628), 'nose.tools.ok_', 'ok_', (["({'terms': {'category': TARAKO_CATEGORIES_MAPPING['tarako-games']}} in qs[\n 'query']['filtered']['filter']['bool']['must'])"], {}), "({'terms': {'category': TARAKO_CATEGORIES_MAPPING['tarako-games']}} in\n qs['query']['filtered']['filter']['bool']['must'])\n", (4503, 4628), False, 'from nose.tools import eq_, ok_\n'), ((4702, 4830), 'nose.tools.ok_', 'ok_', (["({'terms': {'category': TARAKO_CATEGORIES_MAPPING['tarako-tools']}} in qs[\n 'query']['filtered']['filter']['bool']['must'])"], {}), "({'terms': {'category': TARAKO_CATEGORIES_MAPPING['tarako-tools']}} in\n qs['query']['filtered']['filter']['bool']['must'])\n", (4705, 4830), False, 'from nose.tools import eq_, ok_\n'), ((4934, 5025), 'nose.tools.ok_', 'ok_', (["({'terms': {'app_type': [1]}} in qs['query']['filtered']['filter']['bool'][\n 'must'])"], {}), "({'terms': {'app_type': [1]}} in qs['query']['filtered']['filter'][\n 'bool']['must'])\n", (4937, 5025), False, 'from nose.tools import eq_, ok_\n'), ((5193, 5287), 'nose.tools.ok_', 'ok_', (["({'terms': {'app_type': [2, 3]}} in qs['query']['filtered']['filter'][\n 'bool']['must'])"], {}), "({'terms': {'app_type': [2, 3]}} in qs['query']['filtered']['filter'][\n 'bool']['must'])\n", (5196, 5287), False, 'from nose.tools import eq_, ok_\n'), ((5436, 5530), 'nose.tools.ok_', 'ok_', (["({'term': {'manifest_url': url}} in qs['query']['filtered']['filter'][\n 'bool']['must'])"], {}), "({'term': {'manifest_url': url}} in qs['query']['filtered']['filter'][\n 'bool']['must'])\n", (5439, 5530), False, 'from nose.tools import eq_, ok_\n'), ((5690, 5783), 'nose.tools.ok_', 'ok_', (["({'term': {'is_offline': True}} in qs['query']['filtered']['filter']['bool'\n ]['must'])"], {}), "({'term': {'is_offline': True}} in qs['query']['filtered']['filter'][\n 'bool']['must'])\n", (5693, 5783), False, 'from nose.tools import eq_, ok_\n'), ((5954, 6048), 'nose.tools.ok_', 'ok_', (["({'term': {'is_offline': False}} in qs['query']['filtered']['filter'][\n 'bool']['must'])"], {}), "({'term': {'is_offline': False}} in qs['query']['filtered']['filter'][\n 'bool']['must'])\n", (5957, 6048), False, 'from nose.tools import eq_, ok_\n'), ((6297, 6394), 'nose.tools.ok_', 'ok_', (["({'term': {'is_offline': True}} not in qs['query']['filtered']['filter'][\n 'bool']['must'])"], {}), "({'term': {'is_offline': True}} not in qs['query']['filtered']['filter']\n ['bool']['must'])\n", (6300, 6394), False, 'from nose.tools import eq_, ok_\n'), ((6410, 6508), 'nose.tools.ok_', 'ok_', (["({'term': {'is_offline': False}} not in qs['query']['filtered']['filter'][\n 'bool']['must'])"], {}), "({'term': {'is_offline': False}} not in qs['query']['filtered']['filter'\n ]['bool']['must'])\n", (6413, 6508), False, 'from nose.tools import eq_, ok_\n'), ((6607, 6710), 'nose.tools.ok_', 'ok_', (["({'terms': {'supported_locales': ['fr']}} in qs['query']['filtered'][\n 'filter']['bool']['must'])"], {}), "({'terms': {'supported_locales': ['fr']}} in qs['query']['filtered'][\n 'filter']['bool']['must'])\n", (6610, 6710), False, 'from nose.tools import eq_, ok_\n'), ((6785, 6897), 'nose.tools.ok_', 'ok_', (["({'terms': {'supported_locales': ['ar', 'en-US']}} in qs['query'][\n 'filtered']['filter']['bool']['must'])"], {}), "({'terms': {'supported_locales': ['ar', 'en-US']}} in qs['query'][\n 'filtered']['filter']['bool']['must'])\n", (6788, 6897), False, 'from nose.tools import eq_, ok_\n'), ((7000, 7104), 'nose.tools.ok_', 'ok_', (["({'term': {'author.raw': u'mozilla labs'}} in qs['query']['filtered'][\n 'filter']['bool']['must'])"], {}), "({'term': {'author.raw': u'mozilla labs'}} in qs['query']['filtered'][\n 'filter']['bool']['must'])\n", (7003, 7104), False, 'from nose.tools import eq_, ok_\n'), ((7226, 7330), 'nose.tools.ok_', 'ok_', (["({'term': {'installs_allowed_from': u'*'}} in qs['query']['filtered'][\n 'filter']['bool']['must'])"], {}), "({'term': {'installs_allowed_from': u'*'}} in qs['query']['filtered'][\n 'filter']['bool']['must'])\n", (7229, 7330), False, 'from nose.tools import eq_, ok_\n'), ((8384, 8498), 'nose.tools.ok_', 'ok_', (["({'term': {'device': DEVICE_CHOICES_IDS['desktop']}} in qs['query'][\n 'filtered']['filter']['bool']['must'])"], {}), "({'term': {'device': DEVICE_CHOICES_IDS['desktop']}} in qs['query'][\n 'filtered']['filter']['bool']['must'])\n", (8387, 8498), False, 'from nose.tools import eq_, ok_\n'), ((8906, 9008), 'nose.tools.ok_', 'ok_', (["({'term': {'status': mkt.STATUS_PUBLIC}} in qs['query']['filtered'][\n 'filter']['bool']['must'])"], {}), "({'term': {'status': mkt.STATUS_PUBLIC}} in qs['query']['filtered'][\n 'filter']['bool']['must'])\n", (8909, 9008), False, 'from nose.tools import eq_, ok_\n'), ((9024, 9119), 'nose.tools.ok_', 'ok_', (["({'term': {'is_disabled': False}} in qs['query']['filtered']['filter'][\n 'bool']['must'])"], {}), "({'term': {'is_disabled': False}} in qs['query']['filtered']['filter'][\n 'bool']['must'])\n", (9027, 9119), False, 'from nose.tools import eq_, ok_\n'), ((9285, 9389), 'nose.tools.ok_', 'ok_', (["({'terms': {'status': mkt.VALID_STATUSES}} in qs['query']['filtered'][\n 'filter']['bool']['must'])"], {}), "({'terms': {'status': mkt.VALID_STATUSES}} in qs['query']['filtered'][\n 'filter']['bool']['must'])\n", (9288, 9389), False, 'from nose.tools import eq_, ok_\n'), ((9405, 9500), 'nose.tools.ok_', 'ok_', (["({'term': {'is_disabled': False}} in qs['query']['filtered']['filter'][\n 'bool']['must'])"], {}), "({'term': {'is_disabled': False}} in qs['query']['filtered']['filter'][\n 'bool']['must'])\n", (9408, 9500), False, 'from nose.tools import eq_, ok_\n'), ((9817, 9911), 'nose.tools.ok_', 'ok_', (["({'term': {'uses_flash': False}} in qs['query']['filtered']['filter'][\n 'bool']['must'])"], {}), "({'term': {'uses_flash': False}} in qs['query']['filtered']['filter'][\n 'bool']['must'])\n", (9820, 9911), False, 'from nose.tools import eq_, ok_\n'), ((10018, 10112), 'nose.tools.ok_', 'ok_', (["({'term': {'uses_flash': False}} in qs['query']['filtered']['filter'][\n 'bool']['must'])"], {}), "({'term': {'uses_flash': False}} in qs['query']['filtered']['filter'][\n 'bool']['must'])\n", (10021, 10112), False, 'from nose.tools import eq_, ok_\n'), ((10367, 10453), 'nose.tools.ok_', 'ok_', (["({'term': {'device': 1}} in qs['query']['filtered']['filter']['bool']['must'])"], {}), "({'term': {'device': 1}} in qs['query']['filtered']['filter']['bool'][\n 'must'])\n", (10370, 10453), False, 'from nose.tools import eq_, ok_\n'), ((10540, 10626), 'nose.tools.ok_', 'ok_', (["({'term': {'device': 2}} in qs['query']['filtered']['filter']['bool']['must'])"], {}), "({'term': {'device': 2}} in qs['query']['filtered']['filter']['bool'][\n 'must'])\n", (10543, 10626), False, 'from nose.tools import eq_, ok_\n'), ((10713, 10799), 'nose.tools.ok_', 'ok_', (["({'term': {'device': 3}} in qs['query']['filtered']['filter']['bool']['must'])"], {}), "({'term': {'device': 3}} in qs['query']['filtered']['filter']['bool'][\n 'must'])\n", (10716, 10799), False, 'from nose.tools import eq_, ok_\n'), ((10868, 10954), 'nose.tools.ok_', 'ok_', (["({'term': {'device': 4}} in qs['query']['filtered']['filter']['bool']['must'])"], {}), "({'term': {'device': 4}} in qs['query']['filtered']['filter']['bool'][\n 'must'])\n", (10871, 10954), False, 'from nose.tools import eq_, ok_\n'), ((11125, 11251), 'nose.tools.ok_', 'ok_', (["({'term': {'region_exclusions': mkt.regions.RESTOFWORLD.id}} in qs['query']\n ['filtered']['filter']['bool']['must_not'])"], {}), "({'term': {'region_exclusions': mkt.regions.RESTOFWORLD.id}} in qs[\n 'query']['filtered']['filter']['bool']['must_not'])\n", (11128, 11251), False, 'from nose.tools import eq_, ok_\n'), ((11373, 11491), 'nose.tools.ok_', 'ok_', (["({'term': {'region_exclusions': mkt.regions.BRA.id}} in qs['query'][\n 'filtered']['filter']['bool']['must_not'])"], {}), "({'term': {'region_exclusions': mkt.regions.BRA.id}} in qs['query'][\n 'filtered']['filter']['bool']['must_not'])\n", (11376, 11491), False, 'from nose.tools import eq_, ok_\n'), ((12199, 12302), 'nose.tools.ok_', 'ok_', (["({'term': {'features.has_sms': True}} in qs['query']['filtered']['filter'][\n 'bool']['must_not'])"], {}), "({'term': {'features.has_sms': True}} in qs['query']['filtered'][\n 'filter']['bool']['must_not'])\n", (12202, 12302), False, 'from nose.tools import eq_, ok_\n'), ((12701, 12804), 'nose.tools.ok_', 'ok_', (["({'term': {'features.has_sms': True}} in qs['query']['filtered']['filter'][\n 'bool']['must_not'])"], {}), "({'term': {'features.has_sms': True}} in qs['query']['filtered'][\n 'filter']['bool']['must_not'])\n", (12704, 12804), False, 'from nose.tools import eq_, ok_\n'), ((12820, 12924), 'nose.tools.ok_', 'ok_', (["({'term': {'features.has_apps': True}} in qs['query']['filtered']['filter']\n ['bool']['must_not'])"], {}), "({'term': {'features.has_apps': True}} in qs['query']['filtered'][\n 'filter']['bool']['must_not'])\n", (12823, 12924), False, 'from nose.tools import eq_, ok_\n'), ((13073, 13110), 'mkt.search.filters.SortingFilter.DEFAULT_SORTING.items', 'SortingFilter.DEFAULT_SORTING.items', ([], {}), '()\n', (13108, 13110), False, 'from mkt.search.filters import DeviceTypeFilter, ProfileFilter, PublicAppsFilter, PublicSearchFormFilter, RegionFilter, SearchQueryFilter, SortingFilter, ValidAppsFilter\n'), ((13453, 13510), 'nose.tools.ok_', 'ok_', (["({'bayesian_rating': {'order': 'desc'}} in qs['sort'])"], {}), "({'bayesian_rating': {'order': 'desc'}} in qs['sort'])\n", (13456, 13510), False, 'from nose.tools import eq_, ok_\n'), ((13519, 13568), 'nose.tools.ok_', 'ok_', (["({'created': {'order': 'desc'}} in qs['sort'])"], {}), "({'created': {'order': 'desc'}} in qs['sort'])\n", (13522, 13568), False, 'from nose.tools import eq_, ok_\n'), ((13854, 13932), 'nose.tools.ok_', 'ok_', (["({('popularity_%s' % mkt.regions.BRA.id): {'order': 'desc'}} in qs['sort'])"], {}), "({('popularity_%s' % mkt.regions.BRA.id): {'order': 'desc'}} in qs['sort'])\n", (13857, 13932), False, 'from nose.tools import eq_, ok_\n'), ((14113, 14191), 'nose.tools.ok_', 'ok_', (["({('popularity_%s' % mkt.regions.BRA.id): {'order': 'desc'}} in qs['sort'])"], {}), "({('popularity_%s' % mkt.regions.BRA.id): {'order': 'desc'}} in qs['sort'])\n", (14116, 14191), False, 'from nose.tools import eq_, ok_\n'), ((14368, 14444), 'nose.tools.ok_', 'ok_', (["({('trending_%s' % mkt.regions.BRA.id): {'order': 'desc'}} in qs['sort'])"], {}), "({('trending_%s' % mkt.regions.BRA.id): {'order': 'desc'}} in qs['sort'])\n", (14371, 14444), False, 'from nose.tools import eq_, ok_\n'), ((14894, 14949), 'nose.tools.ok_', 'ok_', (["qs['query']['filtered']['query']['function_score']"], {}), "(qs['query']['filtered']['query']['function_score'])\n", (14897, 14949), False, 'from nose.tools import eq_, ok_\n'), ((14958, 14996), 'nose.tools.ok_', 'ok_', (["qs['query']['filtered']['filter']"], {}), "(qs['query']['filtered']['filter'])\n", (14961, 14996), False, 'from nose.tools import eq_, ok_\n'), ((15071, 15118), 'nose.tools.ok_', 'ok_', (["({'terms': {'category': ['games']}} in must)"], {}), "({'terms': {'category': ['games']}} in must)\n", (15074, 15118), False, 'from nose.tools import eq_, ok_\n'), ((15127, 15163), 'nose.tools.ok_', 'ok_', (["({'term': {'status': 4}} in must)"], {}), "({'term': {'status': 4}} in must)\n", (15130, 15163), False, 'from nose.tools import eq_, ok_\n'), ((15172, 15217), 'nose.tools.ok_', 'ok_', (["({'term': {'is_disabled': False}} in must)"], {}), "({'term': {'is_disabled': False}} in must)\n", (15175, 15217), False, 'from nose.tools import eq_, ok_\n'), ((15227, 15277), 'nose.tools.ok_', 'ok_', (["({'trending': {'order': 'desc'}} in qs['sort'])"], {}), "({'trending': {'order': 'desc'}} in qs['sort'])\n", (15230, 15277), False, 'from nose.tools import eq_, ok_\n'), ((15336, 15428), 'nose.tools.ok_', 'ok_', (["({'field_value_factor': {'field': 'boost'}} in query['function_score'][\n 'functions'])"], {}), "({'field_value_factor': {'field': 'boost'}} in query['function_score'][\n 'functions'])\n", (15339, 15428), False, 'from nose.tools import eq_, ok_\n'), ((15444, 15569), 'nose.tools.ok_', 'ok_', (["({'match': {'name_english': {'boost': 2.5, 'query': u'test'}}} in query[\n 'function_score']['query']['bool']['should'])"], {}), "({'match': {'name_english': {'boost': 2.5, 'query': u'test'}}} in query[\n 'function_score']['query']['bool']['should'])\n", (15447, 15569), False, 'from nose.tools import eq_, ok_\n'), ((3268, 3344), 'nose.tools.ok_', 'ok_', (["({'match': {'name_polish': {'query': u'próba', 'boost': 2.5}}} in should)"], {}), "({'match': {'name_polish': {'query': u'próba', 'boost': 2.5}}} in should)\n", (3271, 3344), False, 'from nose.tools import eq_, ok_\n'), ((3419, 3546), 'nose.tools.ok_', 'ok_', (["({'match': {'description_polish': {'query': u'próba', 'boost': 0.6,\n 'analyzer': 'polish', 'type': 'phrase'}}} in should)"], {}), "({'match': {'description_polish': {'query': u'próba', 'boost': 0.6,\n 'analyzer': 'polish', 'type': 'phrase'}}} in should)\n", (3422, 3546), False, 'from nose.tools import eq_, ok_\n'), ((7627, 7662), 'mkt.ADDON_PREMIUM_API_LOOKUP.get', 'mkt.ADDON_PREMIUM_API_LOOKUP.get', (['p'], {}), '(p)\n', (7659, 7662), False, 'import mkt\n'), ((11749, 11765), 'mkt.constants.features.FeatureProfile', 'FeatureProfile', ([], {}), '()\n', (11763, 11765), False, 'from mkt.constants.features import FeatureProfile\n'), ((953, 969), 'django.test.client.RequestFactory', 'RequestFactory', ([], {}), '()\n', (967, 969), False, 'from django.test.client import RequestFactory\n'), ((7476, 7490), 'json.dumps', 'json.dumps', (['qs'], {}), '(qs)\n', (7486, 7490), False, 'import json\n'), ((11723, 11739), 'mkt.constants.features.FeatureProfile', 'FeatureProfile', ([], {}), '()\n', (11737, 11739), False, 'from mkt.constants.features import FeatureProfile\n'), ((13225, 13280), 'nose.tools.ok_', 'ok_', (["({es_sort[1:]: {'order': 'desc'}} in qs['sort'])", 'qs'], {}), "({es_sort[1:]: {'order': 'desc'}} in qs['sort'], qs)\n", (13228, 13280), False, 'from nose.tools import eq_, ok_\n'), ((13315, 13345), 'nose.tools.eq_', 'eq_', (['[es_sort]', "qs['sort']", 'qs'], {}), "([es_sort], qs['sort'], qs)\n", (13318, 13345), False, 'from nose.tools import eq_, ok_\n'), ((13697, 13713), 'django.test.client.RequestFactory', 'RequestFactory', ([], {}), '()\n', (13711, 13713), False, 'from django.test.client import RequestFactory\n'), ((13980, 13996), 'django.test.client.RequestFactory', 'RequestFactory', ([], {}), '()\n', (13994, 13996), False, 'from django.test.client import RequestFactory\n'), ((14237, 14253), 'django.test.client.RequestFactory', 'RequestFactory', ([], {}), '()\n', (14251, 14253), False, 'from django.test.client import RequestFactory\n'), ((1122, 1138), 'django.test.client.RequestFactory', 'RequestFactory', ([], {}), '()\n', (1136, 1138), False, 'from django.test.client import RequestFactory\n')] |
sweeneyb/iot-core-micropython | third_party/logging.py | 7fc341902fbf8fa587f0dc3aa10c0803a5e0d6a5 | # MIT License
#
# Copyright (c) 2019 Johan Brichau
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
CRITICAL = 50
ERROR = 40
WARNING = 30
INFO = 20
DEBUG = 10
NOTSET = 0
_level_dict = {
CRITICAL: "CRIT",
ERROR: "ERROR",
WARNING: "WARN",
INFO: "INFO",
DEBUG: "DEBUG",
}
_stream = sys.stderr
class Logger:
level = NOTSET
def __init__(self, name):
self.name = name
def _level_str(self, level):
l = _level_dict.get(level)
if l is not None:
return l
return "LVL%s" % level
def setLevel(self, level):
self.level = level
def isEnabledFor(self, level):
return level >= (self.level or _level)
def log(self, level, msg, *args):
if level >= (self.level or _level):
_stream.write("%s:%s:" % (self._level_str(level), self.name))
if not args:
print(msg, file=_stream)
else:
print(msg % args, file=_stream)
def debug(self, msg, *args):
self.log(DEBUG, msg, *args)
def info(self, msg, *args):
self.log(INFO, msg, *args)
def warning(self, msg, *args):
self.log(WARNING, msg, *args)
def error(self, msg, *args):
self.log(ERROR, msg, *args)
def critical(self, msg, *args):
self.log(CRITICAL, msg, *args)
def exc(self, e, msg, *args):
self.log(ERROR, msg, *args)
sys.print_exception(e, _stream)
def exception(self, msg, *args):
self.exc(sys.exc_info()[1], msg, *args)
_level = INFO
_loggers = {}
def getLogger(name):
if name in _loggers:
return _loggers[name]
l = Logger(name)
_loggers[name] = l
return l
def info(msg, *args):
getLogger(None).info(msg, *args)
def debug(msg, *args):
getLogger(None).debug(msg, *args)
def basicConfig(level=INFO, filename=None, stream=None, format=None):
global _level, _stream
_level = level
if stream:
_stream = stream
if filename is not None:
print("logging.basicConfig: filename arg is not supported")
if format is not None:
print("logging.basicConfig: format arg is not supported")
| [((2455, 2486), 'sys.print_exception', 'sys.print_exception', (['e', '_stream'], {}), '(e, _stream)\n', (2474, 2486), False, 'import sys\n'), ((2542, 2556), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (2554, 2556), False, 'import sys\n')] |
acounsel/django_msat | assessments/migrations/0003_auto_20210212_1943.py | 86a54e43429001cb6433e28b294d6b8a94b97e6e | # Generated by Django 3.1.6 on 2021-02-12 19:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('assessments', '0002_auto_20210212_1904'),
]
operations = [
migrations.AlterField(
model_name='country',
name='region',
field=models.CharField(blank=True, choices=[('america', 'Americas'), ('europe', 'Europe'), ('africa', 'Africa'), ('asia', 'Asia'), ('oceania', 'Oceania')], max_length=100, null=True),
),
]
| [((341, 525), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'choices': "[('america', 'Americas'), ('europe', 'Europe'), ('africa', 'Africa'), (\n 'asia', 'Asia'), ('oceania', 'Oceania')]", 'max_length': '(100)', 'null': '(True)'}), "(blank=True, choices=[('america', 'Americas'), ('europe',\n 'Europe'), ('africa', 'Africa'), ('asia', 'Asia'), ('oceania',\n 'Oceania')], max_length=100, null=True)\n", (357, 525), False, 'from django.db import migrations, models\n')] |
sethmlarson/workplace-search-python | noxfile.py | 0680ce7144fc0608d3d8c336315ffaf7ddc3ca2d | import nox
SOURCE_FILES = (
"setup.py",
"noxfile.py",
"elastic_workplace_search/",
"tests/",
)
@nox.session(python=["2.7", "3.4", "3.5", "3.6", "3.7", "3.8"])
def test(session):
session.install(".")
session.install("-r", "dev-requirements.txt")
session.run("pytest", "--record-mode=none", "tests/")
@nox.session()
def blacken(session):
session.install("black")
session.run("black", *SOURCE_FILES)
lint(session)
@nox.session()
def lint(session):
session.install("flake8", "black")
session.run("black", "--check", *SOURCE_FILES)
session.run("flake8", "--select=E,W,F", "--max-line-length=88", *SOURCE_FILES)
| [((116, 178), 'nox.session', 'nox.session', ([], {'python': "['2.7', '3.4', '3.5', '3.6', '3.7', '3.8']"}), "(python=['2.7', '3.4', '3.5', '3.6', '3.7', '3.8'])\n", (127, 178), False, 'import nox\n'), ((335, 348), 'nox.session', 'nox.session', ([], {}), '()\n', (346, 348), False, 'import nox\n'), ((462, 475), 'nox.session', 'nox.session', ([], {}), '()\n', (473, 475), False, 'import nox\n')] |
osheraz/komodo | komodo2_rl/src/environments/Spawner.py | d53759100ced7439dd501620f955f347087e4f63 | # !/usr/bin/env python
import rospy
import numpy as np
from gazebo_msgs.srv import SpawnModel, SpawnModelRequest, SpawnModelResponse
from copy import deepcopy
from tf.transformations import quaternion_from_euler
sdf_cube = """<?xml version="1.0" ?>
<sdf version="1.4">
<model name="MODELNAME">
<static>0</static>
<link name="link">
<inertial>
<mass>1.0</mass>
<inertia>
<ixx>0.01</ixx>
<ixy>0.0</ixy>
<ixz>0.0</ixz>
<iyy>0.01</iyy>
<iyz>0.0</iyz>
<izz>0.01</izz>
</inertia>
</inertial>
<collision name="stairs_collision0">
<pose>0 0 0 0 0 0</pose>
<geometry>
<box>
<size>SIZEXYZ</size>
</box>
</geometry>
<surface>
<bounce />
<friction>
<ode>
<mu>1.0</mu>
<mu2>1.0</mu2>
</ode>
</friction>
<contact>
<ode>
<kp>10000000.0</kp>
<kd>1.0</kd>
<min_depth>0.0</min_depth>
<max_vel>0.0</max_vel>
</ode>
</contact>
</surface>
</collision>
<visual name="stairs_visual0">
<pose>0 0 0 0 0 0</pose>
<geometry>
<box>
<size>SIZEXYZ</size>
</box>
</geometry>
<material>
<script>
<uri>file://media/materials/scripts/gazebo.material</uri>
<name>Gazebo/Wood</name>
</script>
</material>
</visual>
<velocity_decay>
<linear>0.000000</linear>
<angular>0.000000</angular>
</velocity_decay>
<self_collide>0</self_collide>
<kinematic>0</kinematic>
<gravity>1</gravity>
</link>
</model>
</sdf>
"""
sdf_sand = """<?xml version='1.0'?>
<sdf version='1.6'>
<model name="MODELNAME">
<link name='link'>
<pose frame=''>0 0 0.01 0 0 0 </pose>
<inertial>
<mass>1</mass>
<inertia>
<ixx>0.1</ixx>
<ixy>0</ixy>
<ixz>0</ixz>
<iyy>0.1</iyy>
<iyz>0</iyz>
<izz>0.1</izz>
</inertia>
</inertial>
<visual name='visual'>
<pose frame=''>0 0 0 0 -0 0</pose>
<geometry>
<mesh>
<scale>SIZEXYZ</scale>
<uri>model://sand/sand_particle.stl</uri>
</mesh>
</geometry>
<material>
<lighting>1</lighting>
<script>
<uri>file://media/materials/scripts/gazebo.material</uri>
<name>Gazebo/Yellow</name>
</script>
<ambient>0.3 0.25 0.1 1</ambient>
<diffuse>0.7 0.6 0.4 1</diffuse>
<specular>0.01 0.005 0.001 1</specular>
<emissive>0 0 0 1</emissive>
</material>
<transparency>0</transparency>
<cast_shadows>1</cast_shadows>
</visual>
<collision name='collision'>
<laser_retro>0</laser_retro>
<max_contacts>10</max_contacts>
<pose frame=''>0 0 0 0 -0 0</pose>
<geometry>
<mesh>
<scale>SIZEXYZ</scale>
<uri>model://sand/sand_particle.stl</uri>
</mesh>
</geometry>
<surface>
<friction>
<ode>
<mu>1</mu>
<mu2>1</mu2>
<fdir1>0 0 0</fdir1>
<slip1>0</slip1>
<slip2>0</slip2>
</ode>
<torsional>
<coefficient>1</coefficient>
<patch_radius>0</patch_radius>
<surface_radius>0</surface_radius>
<use_patch_radius>1</use_patch_radius>
<ode>
<slip>0</slip>
</ode>
</torsional>
</friction>
<bounce>
<restitution_coefficient>0.2</restitution_coefficient>
<threshold>1.01</threshold>
</bounce>
<contact>
<collide_without_contact>0</collide_without_contact>
<collide_without_contact_bitmask>1</collide_without_contact_bitmask>
<collide_bitmask>1</collide_bitmask>
<ode>
<soft_cfm>0</soft_cfm>
<soft_erp>0.2</soft_erp>
<kp>1e+13</kp>
<kd>1</kd>
<max_vel>0.01</max_vel>
<min_depth>0</min_depth>
</ode>
<bullet>
<split_impulse>1</split_impulse>
<split_impulse_penetration_threshold>-0.01</split_impulse_penetration_threshold>
<soft_cfm>0</soft_cfm>
<soft_erp>0.2</soft_erp>
<kp>1e+13</kp>
<kd>1</kd>
</bullet>
</contact>
</surface>
</collision>
</link>
<static>0</static>
<allow_auto_disable>1</allow_auto_disable>
</model>
</sdf>
"""
sdf_sand_box = """<sdf version='1.6'>
<model name='sand_box_osher'>
<link name='sand_box_osher'>
<pose frame=''>0 0 0 0 -0 0</pose>
<inertial>
<pose frame=''>-0.35285 -0.305 0.11027 0 -0 0</pose>
<mass>2000.892</mass>
<inertia>
<ixx>130.2204</ixx>
<ixy>-220.5538e-15</ixy>
<ixz>-4.85191</ixz>
<iyy>276.363</iyy>
<iyz>-77.9029e-15</iyz>
<izz>135.62</izz>
</inertia>
</inertial>
<collision name='sand_box_osher_collision'>
<pose frame=''>0 0 0 1.5708 -0 0</pose>
<geometry>
<mesh>
<scale>1 0.8 1</scale>
<uri>model://sand_box_osher/meshes/sand_box_osher.STL</uri>
</mesh>
</geometry>
</collision>
<visual name='sand_box_osher_visual'>
<pose frame=''>0 0 0 1.5708 -0 0</pose>
<geometry>
<mesh>
<scale>1 0.8 1</scale>
<uri>model://sand_box_osher/meshes/sand_box_osher.STL</uri>
</mesh>
</geometry>
<material>
<ambient>0.3 0.25 0.1 1</ambient>
<diffuse>0.7 0.6 0.4 1</diffuse>
<specular>0.01 0.005 0.001 1</specular>
<emissive>0 0 0 1</emissive>
</material>
<transparency>0.5</transparency>
</visual>
</link>
</model>
</sdf>
"""
sdf_unit_sphere = """<?xml version='1.0'?>
<sdf version='1.6'>
<model name="MODELNAME">
<link name='link'>
<pose frame=''>0 0 0 0 -0 0</pose>
<inertial>
<mass>0.1</mass>
<inertia>
<ixx>0.0000490147</ixx>
<ixy>0</ixy>
<ixz>0</ixz>
<iyy>0.000049147</iyy>
<iyz>0</iyz>
<izz>0.000049147</izz>
</inertia>
<pose frame=''>0 0 0 0 -0 0</pose>
</inertial>
<self_collide>0</self_collide>
<kinematic>0</kinematic>
<visual name='visual'>
<geometry>
<sphere>
<radius>RADIUS</radius>
</sphere>
</geometry>
<material>
<lighting>1</lighting>
<script>
<uri>file://media/materials/scripts/gazebo.material</uri>
<name>Gazebo/Yellow</name>
</script>
<ambient>0.3 0.25 0.1 1</ambient>
<diffuse>0.7 0.6 0.4 1</diffuse>
<specular>0.01 0.005 0.001 1</specular>
<emissive>0 0 0 1</emissive>
</material>
<pose frame=''>0 0 0 0 -0 0</pose>
<transparency>0</transparency>
<cast_shadows>1</cast_shadows>
</visual>
<collision name='collision'>
<laser_retro>0</laser_retro>
<max_contacts>10</max_contacts>
<pose frame=''>0 0 0 0 -0 0</pose>
<geometry>
<sphere>
<radius>RADIUS</radius>
</sphere>
</geometry>
<surface>
<friction>
<ode>
<mu>1</mu>
<mu2>1</mu2>
<fdir1>0 0 0</fdir1>
<slip1>0</slip1>
<slip2>0</slip2>
</ode>
<torsional>
<coefficient>1</coefficient>
<patch_radius>0</patch_radius>
<surface_radius>0</surface_radius>
<use_patch_radius>1</use_patch_radius>
<ode>
<slip>0</slip>
</ode>
</torsional>
</friction>
<bounce>
<restitution_coefficient>0</restitution_coefficient>
<threshold>1e+06</threshold>
</bounce>
<contact>
<collide_without_contact>0</collide_without_contact>
<collide_without_contact_bitmask>1</collide_without_contact_bitmask>
<collide_bitmask>1</collide_bitmask>
<ode>
<soft_cfm>0</soft_cfm>
<soft_erp>0.2</soft_erp>
<kp>1e+13</kp>
<kd>1</kd>
<max_vel>0.01</max_vel>
<min_depth>0</min_depth>
</ode>
<bullet>
<split_impulse>1</split_impulse>
<split_impulse_penetration_threshold>-0.01</split_impulse_penetration_threshold>
<soft_cfm>0</soft_cfm>
<soft_erp>0.2</soft_erp>
<kp>1e+13</kp>
<kd>1</kd>
</bullet>
</contact>
</surface>
</collision>
</link>
<static>0</static>
<allow_auto_disable>1</allow_auto_disable>
</model>
</sdf>
"""
sdf_sand2 = """<?xml version='1.0'?>
<sdf version='1.6'>
<model name="MODELNAME">
<link name='link'>
<pose frame=''>0 0 0.01 0 0 0 </pose>
<inertial>
<mass>1</mass>
<inertia>
<ixx>0.1</ixx>
<ixy>0</ixy>
<ixz>0</ixz>
<iyy>0.1</iyy>
<iyz>0</iyz>
<izz>0.1</izz>
</inertia>
</inertial>
<visual name='visual'>
<pose frame=''>0 0 0 0 -0 0</pose>
<geometry>
<mesh>
<scale>SIZEXYZ</scale>
<uri>model://sand/sand_particle.stl</uri>
</mesh>
</geometry>
<material>
<lighting>1</lighting>
<script>
<uri>file://media/materials/scripts/gazebo.material</uri>
<name>Gazebo/Yellow</name>
</script>
<ambient>0.3 0.25 0.1 1</ambient>
<diffuse>0.7 0.6 0.4 1</diffuse>
<specular>0.01 0.005 0.001 1</specular>
<emissive>0 0 0 1</emissive>
</material>
<transparency>0</transparency>
<cast_shadows>1</cast_shadows>
</visual>
<collision name='collision'>
<laser_retro>0</laser_retro>
<max_contacts>10</max_contacts>
<pose frame=''>0 0 0 0 -0 0</pose>
<geometry>
<mesh>
<scale>SIZEXYZ</scale>
<uri>model://sand/sand_particle.stl</uri>
</mesh>
</geometry>
<surface>
<friction>
<ode>
<mu>1</mu>
<mu2>1</mu2>
<fdir1>0 0 0</fdir1>
<slip1>0</slip1>
<slip2>0</slip2>
</ode>
<torsional>
<coefficient>1</coefficient>
<patch_radius>0</patch_radius>
<surface_radius>0</surface_radius>
<use_patch_radius>1</use_patch_radius>
<ode>
<slip>0</slip>
</ode>
</torsional>
</friction>
<bounce>
<restitution_coefficient>0</restitution_coefficient>
<threshold>1e+06</threshold>
</bounce>
<contact>
<collide_without_contact>0</collide_without_contact>
<collide_without_contact_bitmask>1</collide_without_contact_bitmask>
<collide_bitmask>1</collide_bitmask>
<ode>
<soft_cfm>0</soft_cfm>
<soft_erp>0.2</soft_erp>
<kp>1e+13</kp>
<kd>1</kd>
<max_vel>0.01</max_vel>
<min_depth>0</min_depth>
</ode>
<bullet>
<split_impulse>1</split_impulse>
<split_impulse_penetration_threshold>-0.01</split_impulse_penetration_threshold>
<soft_cfm>0</soft_cfm>
<soft_erp>0.2</soft_erp>
<kp>1e+13</kp>
<kd>1</kd>
</bullet>
</contact>
</surface>
</collision>
</link>
<static>0</static>
<allow_auto_disable>1</allow_auto_disable>
</model>
</sdf>
"""
class Spawner:
def __init__(self):
self.px = 0
self.py = 0
self.pz = 0
self.rr = 0
self.rp = 0
self.rz = 0
self.sx = 0
self.sy = 0
self.sz = 0
def create_cube_request(self,modelname, px, py, pz, rr, rp, ry, sx, sy, sz):
"""Create a SpawnModelRequest with the parameters of the cube given.
modelname: name of the model for gazebo
px py pz: position of the cube (and it's collision cube)
rr rp ry: rotation (roll, pitch, yaw) of the model
sx sy sz: size of the cube"""
cube = deepcopy(sdf_sand2)
# Replace size of model
size_str = str(round(sx, 3)) + " " + \
str(round(sy, 3)) + " " + str(round(sz, 3))
cube = cube.replace('SIZEXYZ', size_str)
# Replace modelname
cube = cube.replace('MODELNAME', str(modelname))
req = SpawnModelRequest()
req.model_name = modelname
req.model_xml = cube
req.initial_pose.position.x = px
req.initial_pose.position.y = py
req.initial_pose.position.z = pz
q = quaternion_from_euler(rr, rp, ry)
req.initial_pose.orientation.x = q[0]
req.initial_pose.orientation.y = q[1]
req.initial_pose.orientation.z = q[2]
req.initial_pose.orientation.w = q[3]
return req
def create_sphere_request(self,modelname, px, py, pz, rr, rp, ry, r):
"""Create a SpawnModelRequest with the parameters of the cube given.
modelname: name of the model for gazebo
px py pz: position of the cube (and it's collision cube)
rr rp ry: rotation (roll, pitch, yaw) of the model
sx sy sz: size of the cube"""
cube = deepcopy(sdf_unit_sphere)
# Replace size of model
cube = cube.replace('RADIUS', str(r))
# Replace modelname
cube = cube.replace('MODELNAME', str(modelname))
req = SpawnModelRequest()
req.model_name = modelname
req.model_xml = cube
req.initial_pose.position.x = px
req.initial_pose.position.y = py
req.initial_pose.position.z = pz
q = quaternion_from_euler(rr, rp, ry)
req.initial_pose.orientation.x = q[0]
req.initial_pose.orientation.y = q[1]
req.initial_pose.orientation.z = q[2]
req.initial_pose.orientation.w = q[3]
return req
def create_box_request(self,modelname, px, py, pz, rr, rp, ry):
"""Create a SpawnModelRequest with the parameters of the cube given.
modelname: name of the model for gazebo
px py pz: position of the cube (and it's collision cube)
rr rp ry: rotation (roll, pitch, yaw) of the model"""
cube = deepcopy(sdf_sand_box)
req = SpawnModelRequest()
req.model_name = modelname
req.model_xml = cube
req.initial_pose.position.x = px
req.initial_pose.position.y = py
req.initial_pose.position.z = pz
q = quaternion_from_euler(rr, rp, ry)
req.initial_pose.orientation.x = q[0]
req.initial_pose.orientation.y = q[1]
req.initial_pose.orientation.z = q[2]
req.initial_pose.orientation.w = q[3]
return req
| [((14597, 14616), 'copy.deepcopy', 'deepcopy', (['sdf_sand2'], {}), '(sdf_sand2)\n', (14605, 14616), False, 'from copy import deepcopy\n'), ((14908, 14927), 'gazebo_msgs.srv.SpawnModelRequest', 'SpawnModelRequest', ([], {}), '()\n', (14925, 14927), False, 'from gazebo_msgs.srv import SpawnModel, SpawnModelRequest, SpawnModelResponse\n'), ((15128, 15161), 'tf.transformations.quaternion_from_euler', 'quaternion_from_euler', (['rr', 'rp', 'ry'], {}), '(rr, rp, ry)\n', (15149, 15161), False, 'from tf.transformations import quaternion_from_euler\n'), ((15743, 15768), 'copy.deepcopy', 'deepcopy', (['sdf_unit_sphere'], {}), '(sdf_unit_sphere)\n', (15751, 15768), False, 'from copy import deepcopy\n'), ((15947, 15966), 'gazebo_msgs.srv.SpawnModelRequest', 'SpawnModelRequest', ([], {}), '()\n', (15964, 15966), False, 'from gazebo_msgs.srv import SpawnModel, SpawnModelRequest, SpawnModelResponse\n'), ((16167, 16200), 'tf.transformations.quaternion_from_euler', 'quaternion_from_euler', (['rr', 'rp', 'ry'], {}), '(rr, rp, ry)\n', (16188, 16200), False, 'from tf.transformations import quaternion_from_euler\n'), ((16741, 16763), 'copy.deepcopy', 'deepcopy', (['sdf_sand_box'], {}), '(sdf_sand_box)\n', (16749, 16763), False, 'from copy import deepcopy\n'), ((16779, 16798), 'gazebo_msgs.srv.SpawnModelRequest', 'SpawnModelRequest', ([], {}), '()\n', (16796, 16798), False, 'from gazebo_msgs.srv import SpawnModel, SpawnModelRequest, SpawnModelResponse\n'), ((16999, 17032), 'tf.transformations.quaternion_from_euler', 'quaternion_from_euler', (['rr', 'rp', 'ry'], {}), '(rr, rp, ry)\n', (17020, 17032), False, 'from tf.transformations import quaternion_from_euler\n')] |
tefra/xsdata-w3c-tests | output/models/ms_data/regex/re_l32_xsd/__init__.py | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | from output.models.ms_data.regex.re_l32_xsd.re_l32 import (
Regex,
Doc,
)
__all__ = [
"Regex",
"Doc",
]
| [] |
ConverJens/pipelines | sdk/python/tests/dsl/metadata_tests.py | a1d453af214ec9eebad73fb05845dd3499d60d00 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kfp.components.structures import ComponentSpec, InputSpec, OutputSpec
import unittest
class TestComponentMeta(unittest.TestCase):
def test_to_dict(self):
component_meta = ComponentSpec(name='foobar',
description='foobar example',
inputs=[InputSpec(name='input1',
description='input1 desc',
type={'GCSPath': {
'bucket_type': 'directory',
'file_type': 'csv'
}},
default='default1'
),
InputSpec(name='input2',
description='input2 desc',
type={'TFModel': {
'input_data': 'tensor',
'version': '1.8.0'
}},
default='default2'
),
InputSpec(name='input3',
description='input3 desc',
type='Integer',
default='default3'
),
],
outputs=[OutputSpec(name='output1',
description='output1 desc',
type={'Schema': {
'file_type': 'tsv'
}},
)
]
)
golden_meta = {
'name': 'foobar',
'description': 'foobar example',
'inputs': [
{
'name': 'input1',
'description': 'input1 desc',
'type': {
'GCSPath': {
'bucket_type': 'directory',
'file_type': 'csv'
}
},
'default': 'default1'
},
{
'name': 'input2',
'description': 'input2 desc',
'type': {
'TFModel': {
'input_data': 'tensor',
'version': '1.8.0'
}
},
'default': 'default2'
},
{
'name': 'input3',
'description': 'input3 desc',
'type': 'Integer',
'default': 'default3'
}
],
'outputs': [
{
'name': 'output1',
'description': 'output1 desc',
'type': {
'Schema': {
'file_type': 'tsv'
}
},
}
]
}
self.assertEqual(component_meta.to_dict(), golden_meta)
| [((898, 1042), 'kfp.components.structures.InputSpec', 'InputSpec', ([], {'name': '"""input1"""', 'description': '"""input1 desc"""', 'type': "{'GCSPath': {'bucket_type': 'directory', 'file_type': 'csv'}}", 'default': '"""default1"""'}), "(name='input1', description='input1 desc', type={'GCSPath': {\n 'bucket_type': 'directory', 'file_type': 'csv'}}, default='default1')\n", (907, 1042), False, 'from kfp.components.structures import ComponentSpec, InputSpec, OutputSpec\n'), ((1492, 1632), 'kfp.components.structures.InputSpec', 'InputSpec', ([], {'name': '"""input2"""', 'description': '"""input2 desc"""', 'type': "{'TFModel': {'input_data': 'tensor', 'version': '1.8.0'}}", 'default': '"""default2"""'}), "(name='input2', description='input2 desc', type={'TFModel': {\n 'input_data': 'tensor', 'version': '1.8.0'}}, default='default2')\n", (1501, 1632), False, 'from kfp.components.structures import ComponentSpec, InputSpec, OutputSpec\n'), ((2080, 2172), 'kfp.components.structures.InputSpec', 'InputSpec', ([], {'name': '"""input3"""', 'description': '"""input3 desc"""', 'type': '"""Integer"""', 'default': '"""default3"""'}), "(name='input3', description='input3 desc', type='Integer', default\n ='default3')\n", (2089, 2172), False, 'from kfp.components.structures import ComponentSpec, InputSpec, OutputSpec\n'), ((2488, 2586), 'kfp.components.structures.OutputSpec', 'OutputSpec', ([], {'name': '"""output1"""', 'description': '"""output1 desc"""', 'type': "{'Schema': {'file_type': 'tsv'}}"}), "(name='output1', description='output1 desc', type={'Schema': {\n 'file_type': 'tsv'}})\n", (2498, 2586), False, 'from kfp.components.structures import ComponentSpec, InputSpec, OutputSpec\n')] |
elifloresch/thirty-days-challenge | challenges/python-solutions/day-25.py | d3d41f5ce8cc4155ebf9cf52c1ece43c15a1e2af | import math
def is_prime_number(number):
if number < 2:
return False
if number == 2 or number == 3:
return True
if number % 2 == 0 or number % 3 == 0:
return False
number_sqrt = math.sqrt(number)
int_number_sqrt = int(number_sqrt) + 1
for d in range(6, int_number_sqrt, 6):
if number % (d - 1) == 0 or number % (d + 1) == 0:
return False
return True
test_cases = int(input())
numbers = []
for test_case in range(test_cases):
numbers.append(int(input()))
for n in numbers:
if is_prime_number(n):
print('Prime')
else:
print('Not prime')
| [((223, 240), 'math.sqrt', 'math.sqrt', (['number'], {}), '(number)\n', (232, 240), False, 'import math\n')] |
rnixx/garden.cefpython | examples/path_config.py | 91d5f69e9983a28ce1971637d7d2f0051c456882 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Minimal example of the CEFBrowser widget use. Here you don't have any controls
(back / forth / reload) or whatsoever. Just a kivy app displaying the
chromium-webview.
In this example we demonstrate how the cache path of CEF can be set.
"""
import os
from kivy.app import App
from kivy.garden.cefpython import CEFBrowser
from kivy.logger import Logger
if __name__ == '__main__':
class SimpleBrowserApp(App):
def build(self):
# Set runtime data paths
CEFBrowser.set_data_path(os.path.realpath("./cef_data"))
# CEFBrowser.set_caches_path(os.path.realpath("./cef_caches"))
# CEFBrowser.set_cookies_path(os.path.realpath("./cef_cookies"))
# CEFBrowser.set_logs_path(os.path.realpath("./cef_logs"))
Logger.info("Example: The CEF pathes have been set to")
Logger.info("- Cache %s", CEFBrowser._caches_path)
Logger.info("- Cookies %s", CEFBrowser._cookies_path)
Logger.info("- Logs %s", CEFBrowser._logs_path)
# Create CEFBrowser instance. Go to test-site.
cb = CEFBrowser(url="http://jegger.ch/datapool/app/test.html")
return cb
SimpleBrowserApp().run()
| [((833, 888), 'kivy.logger.Logger.info', 'Logger.info', (['"""Example: The CEF pathes have been set to"""'], {}), "('Example: The CEF pathes have been set to')\n", (844, 888), False, 'from kivy.logger import Logger\n'), ((901, 951), 'kivy.logger.Logger.info', 'Logger.info', (['"""- Cache %s"""', 'CEFBrowser._caches_path'], {}), "('- Cache %s', CEFBrowser._caches_path)\n", (912, 951), False, 'from kivy.logger import Logger\n'), ((964, 1017), 'kivy.logger.Logger.info', 'Logger.info', (['"""- Cookies %s"""', 'CEFBrowser._cookies_path'], {}), "('- Cookies %s', CEFBrowser._cookies_path)\n", (975, 1017), False, 'from kivy.logger import Logger\n'), ((1030, 1077), 'kivy.logger.Logger.info', 'Logger.info', (['"""- Logs %s"""', 'CEFBrowser._logs_path'], {}), "('- Logs %s', CEFBrowser._logs_path)\n", (1041, 1077), False, 'from kivy.logger import Logger\n'), ((1155, 1212), 'kivy.garden.cefpython.CEFBrowser', 'CEFBrowser', ([], {'url': '"""http://jegger.ch/datapool/app/test.html"""'}), "(url='http://jegger.ch/datapool/app/test.html')\n", (1165, 1212), False, 'from kivy.garden.cefpython import CEFBrowser\n'), ((566, 596), 'os.path.realpath', 'os.path.realpath', (['"""./cef_data"""'], {}), "('./cef_data')\n", (582, 596), False, 'import os\n')] |
laserbat/random-projects | simple-systems/and_xor_shift.py | 925f94f80299df6f16e91975e89f5fff7df20005 | #!/usr/bin/python3
# If F(a) is any function that can be defined as composition of bitwise XORs, ANDs and left shifts
# Then the dynac system x_(n+1) = F(x_n) is Turing complete
# Proof by simulation (rule110)
a = 1
while a:
print(bin(a))
a = a ^ (a << 1) ^ (a & (a << 1)) ^ (a & (a << 1) & (a << 2))
| [] |
Gauddel/trinity | trinity/protocol/common/peer_pool_event_bus.py | 0b12943ac36f4090abc22fc965e9e9a4f42c6f35 | from abc import (
abstractmethod,
)
from typing import (
Any,
Callable,
cast,
FrozenSet,
Generic,
Type,
TypeVar,
)
from cancel_token import (
CancelToken,
)
from p2p.exceptions import (
PeerConnectionLost,
)
from p2p.kademlia import Node
from p2p.peer import (
BasePeer,
PeerSubscriber,
)
from p2p.peer_pool import (
BasePeerPool,
)
from p2p.protocol import (
Command,
PayloadType,
)
from p2p.service import (
BaseService,
)
from trinity.endpoint import (
TrinityEventBusEndpoint,
)
from .events import (
ConnectToNodeCommand,
DisconnectPeerEvent,
HasRemoteEvent,
PeerCountRequest,
PeerCountResponse,
)
TPeer = TypeVar('TPeer', bound=BasePeer)
TStreamEvent = TypeVar('TStreamEvent', bound=HasRemoteEvent)
class PeerPoolEventServer(BaseService, PeerSubscriber, Generic[TPeer]):
"""
Base class to create a bridge between the ``PeerPool`` and the event bus so that peer
messages become available to external processes (e.g. isolated plugins). In the opposite
direction, other processes can also retrieve information or execute actions on the peer pool by
sending specific events through the event bus that the ``PeerPoolEventServer`` answers.
This class bridges all common APIs but protocol specific communication can be enabled through
subclasses that add more handlers.
"""
msg_queue_maxsize: int = 2000
subscription_msg_types: FrozenSet[Type[Command]] = frozenset({})
def __init__(self,
event_bus: TrinityEventBusEndpoint,
peer_pool: BasePeerPool,
token: CancelToken = None) -> None:
super().__init__(token)
self.peer_pool = peer_pool
self.event_bus = event_bus
async def _run(self) -> None:
self.logger.debug("Running %s", self.__class__.__name__)
self.run_daemon_event(
DisconnectPeerEvent,
lambda peer, event: peer.disconnect_nowait(event.reason)
)
self.run_daemon_task(self.handle_peer_count_requests())
self.run_daemon_task(self.handle_connect_to_node_requests())
self.run_daemon_task(self.handle_native_peer_messages())
await self.cancellation()
def run_daemon_event(self,
event_type: Type[TStreamEvent],
event_handler_fn: Callable[[TPeer, TStreamEvent], Any]) -> None:
"""
Register a handler to be run every time that an event of type ``event_type`` appears.
"""
self.run_daemon_task(self.handle_stream(event_type, event_handler_fn))
@abstractmethod
async def handle_native_peer_message(self,
remote: Node,
cmd: Command,
msg: PayloadType) -> None:
"""
Process every native peer message. Subclasses should overwrite this to forward specific
peer messages on the event bus. The handler is called for every message that is defined in
``self.subscription_msg_types``.
"""
pass
def get_peer(self, remote: Node) -> TPeer:
"""
Look up and return a peer from the ``PeerPool`` that matches the given node.
Raise ``PeerConnectionLost`` if the peer is no longer in the pool or is winding down.
"""
try:
peer = self.peer_pool.connected_nodes[remote]
except KeyError:
self.logger.debug("Peer with remote %s does not exist in the pool anymore", remote)
raise PeerConnectionLost()
else:
if not peer.is_operational:
self.logger.debug("Peer %s is not operational when selecting from pool", peer)
raise PeerConnectionLost()
else:
return cast(TPeer, peer)
async def handle_connect_to_node_requests(self) -> None:
async for command in self.wait_iter(self.event_bus.stream(ConnectToNodeCommand)):
self.logger.debug('Received request to connect to %s', command.remote)
self.run_task(self.peer_pool.connect_to_node(command.remote))
async def handle_peer_count_requests(self) -> None:
async for req in self.wait_iter(self.event_bus.stream(PeerCountRequest)):
await self.event_bus.broadcast(
PeerCountResponse(len(self.peer_pool)),
req.broadcast_config()
)
async def handle_stream(self,
event_type: Type[TStreamEvent],
event_handler_fn: Callable[[TPeer, TStreamEvent], Any]) -> None:
async for event in self.wait_iter(self.event_bus.stream(event_type)):
try:
peer = self.get_peer(event.remote)
except PeerConnectionLost:
pass
else:
event_handler_fn(peer, event)
async def handle_native_peer_messages(self) -> None:
with self.subscribe(self.peer_pool):
while self.is_operational:
peer, cmd, msg = await self.wait(self.msg_queue.get())
await self.handle_native_peer_message(peer.remote, cmd, msg)
class DefaultPeerPoolEventServer(PeerPoolEventServer[BasePeer]):
async def handle_native_peer_message(self,
remote: Node,
cmd: Command,
msg: PayloadType) -> None:
pass
| [((705, 737), 'typing.TypeVar', 'TypeVar', (['"""TPeer"""'], {'bound': 'BasePeer'}), "('TPeer', bound=BasePeer)\n", (712, 737), False, 'from typing import Any, Callable, cast, FrozenSet, Generic, Type, TypeVar\n'), ((753, 798), 'typing.TypeVar', 'TypeVar', (['"""TStreamEvent"""'], {'bound': 'HasRemoteEvent'}), "('TStreamEvent', bound=HasRemoteEvent)\n", (760, 798), False, 'from typing import Any, Callable, cast, FrozenSet, Generic, Type, TypeVar\n'), ((3615, 3635), 'p2p.exceptions.PeerConnectionLost', 'PeerConnectionLost', ([], {}), '()\n', (3633, 3635), False, 'from p2p.exceptions import PeerConnectionLost\n'), ((3807, 3827), 'p2p.exceptions.PeerConnectionLost', 'PeerConnectionLost', ([], {}), '()\n', (3825, 3827), False, 'from p2p.exceptions import PeerConnectionLost\n'), ((3869, 3886), 'typing.cast', 'cast', (['TPeer', 'peer'], {}), '(TPeer, peer)\n', (3873, 3886), False, 'from typing import Any, Callable, cast, FrozenSet, Generic, Type, TypeVar\n')] |
annagitel/ocs-ci | tests/e2e/performance/csi_tests/test_pvc_creation_deletion_performance.py | 284fe04aeb6e3d6cb70c99e65fec8ff1b1ea1dd5 | """
Test to verify performance of PVC creation and deletion
for RBD, CephFS and RBD-Thick interfaces
"""
import time
import logging
import datetime
import pytest
import ocs_ci.ocs.exceptions as ex
import threading
import statistics
from concurrent.futures import ThreadPoolExecutor
from uuid import uuid4
from ocs_ci.framework.testlib import performance
from ocs_ci.ocs.perftests import PASTest
from ocs_ci.helpers import helpers, performance_lib
from ocs_ci.ocs import constants
from ocs_ci.helpers.helpers import get_full_test_logs_path
from ocs_ci.ocs.perfresult import PerfResult
from ocs_ci.framework import config
log = logging.getLogger(__name__)
class ResultsAnalyse(PerfResult):
"""
This class generates results for all tests as one unit
and saves them to an elastic search server on the cluster
"""
def __init__(self, uuid, crd, full_log_path):
"""
Initialize the object by reading some of the data from the CRD file and
by connecting to the ES server and read all results from it.
Args:
uuid (str): the unique uid of the test
crd (dict): dictionary with test parameters - the test yaml file
that modify it in the test itself.
full_log_path (str): the path of the results files to be found
"""
super(ResultsAnalyse, self).__init__(uuid, crd)
self.new_index = "pvc_create_delete_fullres"
self.full_log_path = full_log_path
# make sure we have connection to the elastic search server
self.es_connect()
@performance
class TestPVCCreationDeletionPerformance(PASTest):
"""
Test to verify performance of PVC creation and deletion
"""
def setup(self):
"""
Setting up test parameters
"""
log.info("Starting the test setup")
super(TestPVCCreationDeletionPerformance, self).setup()
self.benchmark_name = "PVC_Creation-Deletion"
self.uuid = uuid4().hex
self.crd_data = {
"spec": {
"test_user": "Homer simpson",
"clustername": "test_cluster",
"elasticsearch": {
"server": config.PERF.get("production_es_server"),
"port": config.PERF.get("production_es_port"),
"url": f"http://{config.PERF.get('production_es_server')}:{config.PERF.get('production_es_port')}",
},
}
}
if self.dev_mode:
self.crd_data["spec"]["elasticsearch"] = {
"server": config.PERF.get("dev_es_server"),
"port": config.PERF.get("dev_es_port"),
"url": f"http://{config.PERF.get('dev_es_server')}:{config.PERF.get('dev_es_port')}",
}
@pytest.fixture()
def base_setup(self, interface_type, storageclass_factory, pod_factory):
"""
A setup phase for the test
Args:
interface_type: A fixture to iterate over ceph interfaces
storageclass_factory: A fixture to create everything needed for a
storageclass
pod_factory: A fixture to create new pod
"""
self.interface = interface_type
if self.interface == constants.CEPHBLOCKPOOL_THICK:
self.sc_obj = storageclass_factory(
interface=constants.CEPHBLOCKPOOL,
new_rbd_pool=True,
rbd_thick_provision=True,
)
else:
self.sc_obj = storageclass_factory(self.interface)
self.pod_factory = pod_factory
@pytest.fixture()
def namespace(self, project_factory):
"""
Create a new project
"""
proj_obj = project_factory()
self.namespace = proj_obj.namespace
def init_full_results(self, full_results):
"""
Initialize the full results object which will send to the ES server
Args:
full_results (obj): an empty FIOResultsAnalyse object
Returns:
FIOResultsAnalyse (obj): the input object fill with data
"""
for key in self.environment:
full_results.add_key(key, self.environment[key])
full_results.add_key("storageclass", self.sc)
full_results.add_key("index", full_results.new_index)
return full_results
@pytest.mark.parametrize(
argnames=["interface_type", "pvc_size"],
argvalues=[
pytest.param(
*[constants.CEPHBLOCKPOOL, "5Gi"],
marks=[pytest.mark.performance],
),
pytest.param(
*[constants.CEPHBLOCKPOOL, "15Gi"],
marks=[pytest.mark.performance],
),
pytest.param(
*[constants.CEPHBLOCKPOOL, "25Gi"],
marks=[pytest.mark.performance],
),
pytest.param(
*[constants.CEPHFILESYSTEM, "5Gi"],
marks=[pytest.mark.performance],
),
pytest.param(
*[constants.CEPHFILESYSTEM, "15Gi"],
marks=[pytest.mark.performance],
),
pytest.param(
*[constants.CEPHFILESYSTEM, "25Gi"],
marks=[pytest.mark.performance],
),
pytest.param(
*[constants.CEPHBLOCKPOOL_THICK, "5Gi"],
marks=[pytest.mark.performance_extended],
),
pytest.param(
*[constants.CEPHBLOCKPOOL_THICK, "15Gi"],
marks=[pytest.mark.performance_extended],
),
pytest.param(
*[constants.CEPHBLOCKPOOL_THICK, "25Gi"],
marks=[pytest.mark.performance_extended],
),
],
)
@pytest.mark.usefixtures(base_setup.__name__)
def test_pvc_creation_deletion_measurement_performance(
self, teardown_factory, pvc_size
):
"""
Measuring PVC creation and deletion times for pvc samples
Verifying that those times are within the required limits
"""
# Getting the full path for the test logs
self.full_log_path = get_full_test_logs_path(cname=self)
if self.interface == constants.CEPHBLOCKPOOL:
self.sc = "RBD"
elif self.interface == constants.CEPHFILESYSTEM:
self.sc = "CephFS"
elif self.interface == constants.CEPHBLOCKPOOL_THICK:
self.sc = "RBD-Thick"
self.full_log_path += f"-{self.sc}-{pvc_size}"
log.info(f"Logs file path name is : {self.full_log_path}")
self.start_time = time.strftime("%Y-%m-%dT%H:%M:%SGMT", time.gmtime())
self.get_env_info()
# Initialize the results doc file.
self.full_results = self.init_full_results(
ResultsAnalyse(self.uuid, self.crd_data, self.full_log_path)
)
self.full_results.add_key("pvc_size", pvc_size)
num_of_samples = 5
accepted_creation_time = (
600 if self.interface == constants.CEPHBLOCKPOOL_THICK else 1
)
# accepted deletion time for RBD is 1 sec, for CephFS is 2 secs and for RBD Thick is 5 secs
if self.interface == constants.CEPHFILESYSTEM:
accepted_deletion_time = 2
elif self.interface == constants.CEPHBLOCKPOOL:
accepted_deletion_time = 1
else:
accepted_deletion_time = 5
self.full_results.add_key("samples", num_of_samples)
accepted_creation_deviation_percent = 50
accepted_deletion_deviation_percent = 50
creation_time_measures = []
deletion_time_measures = []
msg_prefix = f"Interface: {self.interface}, PVC size: {pvc_size}."
for i in range(num_of_samples):
logging.info(f"{msg_prefix} Start creating PVC number {i + 1}.")
start_time = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ")
pvc_obj = helpers.create_pvc(sc_name=self.sc_obj.name, size=pvc_size)
timeout = 600 if self.interface == constants.CEPHBLOCKPOOL_THICK else 60
helpers.wait_for_resource_state(
pvc_obj, constants.STATUS_BOUND, timeout=timeout
)
pvc_obj.reload()
creation_time = performance_lib.measure_pvc_creation_time(
self.interface, pvc_obj.name, start_time
)
logging.info(
f"{msg_prefix} PVC number {i + 1} was created in {creation_time} seconds."
)
if creation_time > accepted_creation_time:
raise ex.PerformanceException(
f"{msg_prefix} PVC creation time is {creation_time} and is greater than "
f"{accepted_creation_time} seconds."
)
creation_time_measures.append(creation_time)
pv_name = pvc_obj.backed_pv
pvc_reclaim_policy = pvc_obj.reclaim_policy
pod_obj = self.write_file_on_pvc(pvc_obj)
pod_obj.delete(wait=True)
teardown_factory(pvc_obj)
logging.info(f"{msg_prefix} Start deleting PVC number {i + 1}")
if pvc_reclaim_policy == constants.RECLAIM_POLICY_DELETE:
pvc_obj.delete()
pvc_obj.ocp.wait_for_delete(pvc_obj.name)
helpers.validate_pv_delete(pvc_obj.backed_pv)
deletion_time = helpers.measure_pvc_deletion_time(
self.interface, pv_name
)
logging.info(
f"{msg_prefix} PVC number {i + 1} was deleted in {deletion_time} seconds."
)
if deletion_time > accepted_deletion_time:
raise ex.PerformanceException(
f"{msg_prefix} PVC deletion time is {deletion_time} and is greater than "
f"{accepted_deletion_time} seconds."
)
deletion_time_measures.append(deletion_time)
else:
logging.info(
f"Reclaim policy of the PVC {pvc_obj.name} is not Delete;"
f" therefore not measuring deletion time for this PVC."
)
creation_average = self.process_time_measurements(
"creation",
creation_time_measures,
accepted_creation_deviation_percent,
msg_prefix,
)
self.full_results.add_key("creation-time", creation_average)
deletion_average = self.process_time_measurements(
"deletion",
deletion_time_measures,
accepted_deletion_deviation_percent,
msg_prefix,
)
self.full_results.add_key("deletion-time", deletion_average)
self.full_results.all_results["creation"] = creation_time_measures
self.full_results.all_results["deletion"] = deletion_time_measures
self.end_time = time.strftime("%Y-%m-%dT%H:%M:%SGMT", time.gmtime())
self.full_results.add_key(
"test_time", {"start": self.start_time, "end": self.end_time}
)
self.full_results.es_write()
log.info(f"The Result can be found at : {self.full_results.results_link()}")
def process_time_measurements(
self, action_name, time_measures, accepted_deviation_percent, msg_prefix
):
"""
Analyses the given time measured. If the standard deviation of these times is bigger than the
provided accepted deviation percent, fails the test
Args:
action_name (str): Name of the action for which these measurements were collected; used for the logging
time_measures (list of floats): A list of time measurements
accepted_deviation_percent (int): Accepted deviation percent to which computed standard deviation may be
compared
msg_prefix (str) : A string for comprehensive logging
Returns:
(float) The average value of the provided time measurements
"""
average = statistics.mean(time_measures)
log.info(
f"{msg_prefix} The average {action_name} time for the sampled {len(time_measures)} "
f"PVCs is {average} seconds."
)
if self.interface == constants.CEPHBLOCKPOOL_THICK:
st_deviation = statistics.stdev(time_measures)
st_deviation_percent = st_deviation / average * 100.0
if st_deviation_percent > accepted_deviation_percent:
log.error(
f"{msg_prefix} The standard deviation percent for {action_name} of {len(time_measures)} sampled "
f"PVCs is {st_deviation_percent}% which is bigger than accepted {accepted_deviation_percent}."
)
else:
log.info(
f"{msg_prefix} The standard deviation percent for {action_name} of {len(time_measures)} sampled "
f"PVCs is {st_deviation_percent}% and is within the accepted range."
)
self.full_results.add_key(
f"{action_name}_deviation_pct", st_deviation_percent
)
return average
def write_file_on_pvc(self, pvc_obj, filesize=1):
"""
Writes a file on given PVC
Args:
pvc_obj: PVC object to write a file on
filesize: size of file to write (in GB - default is 1GB)
Returns:
Pod on this pvc on which the file was written
"""
pod_obj = self.pod_factory(
interface=self.interface, pvc=pvc_obj, status=constants.STATUS_RUNNING
)
# filesize to be written is always 1 GB
file_size = f"{int(filesize * 1024)}M"
log.info(f"Starting IO on the POD {pod_obj.name}")
# Going to run only write IO
pod_obj.fillup_fs(size=file_size, fio_filename=f"{pod_obj.name}_file")
# Wait for the fio to finish
fio_result = pod_obj.get_fio_results()
err_count = fio_result.get("jobs")[0].get("error")
assert (
err_count == 0
), f"IO error on pod {pod_obj.name}. FIO result: {fio_result}"
log.info("IO on the PVC has finished")
return pod_obj
@pytest.mark.parametrize(
argnames=["interface_type"],
argvalues=[
pytest.param(
*[constants.CEPHBLOCKPOOL],
marks=[pytest.mark.performance],
),
pytest.param(
*[constants.CEPHFILESYSTEM],
marks=[pytest.mark.performance],
),
pytest.param(
*[constants.CEPHBLOCKPOOL_THICK],
marks=[pytest.mark.performance_extended],
),
],
)
@pytest.mark.usefixtures(base_setup.__name__)
@pytest.mark.usefixtures(namespace.__name__)
@pytest.mark.polarion_id("OCS-2618")
def test_multiple_pvc_deletion_measurement_performance(self, teardown_factory):
"""
Measuring PVC deletion time of 120 PVCs in 180 seconds
Args:
teardown_factory: A fixture used when we want a new resource that was created during the tests
to be removed in the teardown phase.
Returns:
"""
number_of_pvcs = 120
pvc_size = "1Gi"
msg_prefix = f"Interface: {self.interface}, PVC size: {pvc_size}."
log.info(f"{msg_prefix} Start creating new 120 PVCs")
pvc_objs, _ = helpers.create_multiple_pvcs(
sc_name=self.sc_obj.name,
namespace=self.namespace,
number_of_pvc=number_of_pvcs,
size=pvc_size,
burst=True,
)
for pvc_obj in pvc_objs:
pvc_obj.reload()
teardown_factory(pvc_obj)
timeout = 600 if self.interface == constants.CEPHBLOCKPOOL_THICK else 60
with ThreadPoolExecutor(max_workers=5) as executor:
for pvc_obj in pvc_objs:
executor.submit(
helpers.wait_for_resource_state,
pvc_obj,
constants.STATUS_BOUND,
timeout=timeout,
)
executor.submit(pvc_obj.reload)
pod_objs = []
for pvc_obj in pvc_objs:
pod_obj = self.write_file_on_pvc(pvc_obj, 0.3)
pod_objs.append(pod_obj)
# Get pvc_name, require pvc_name to fetch deletion time data from log
threads = list()
for pvc_obj in pvc_objs:
process = threading.Thread(target=pvc_obj.reload)
process.start()
threads.append(process)
for process in threads:
process.join()
pvc_name_list, pv_name_list = ([] for i in range(2))
threads = list()
for pvc_obj in pvc_objs:
process1 = threading.Thread(target=pvc_name_list.append(pvc_obj.name))
process2 = threading.Thread(target=pv_name_list.append(pvc_obj.backed_pv))
process1.start()
process2.start()
threads.append(process1)
threads.append(process2)
for process in threads:
process.join()
log.info(f"{msg_prefix} Preparing to delete 120 PVC")
# Delete PVC
for pvc_obj, pod_obj in zip(pvc_objs, pod_objs):
pod_obj.delete(wait=True)
pvc_obj.delete()
pvc_obj.ocp.wait_for_delete(pvc_obj.name)
# Get PVC deletion time
pvc_deletion_time = helpers.measure_pv_deletion_time_bulk(
interface=self.interface, pv_name_list=pv_name_list
)
log.info(
f"{msg_prefix} {number_of_pvcs} bulk deletion time is {pvc_deletion_time}"
)
# accepted deletion time is 2 secs for each PVC
accepted_pvc_deletion_time = number_of_pvcs * 2
for del_time in pvc_deletion_time.values():
if del_time > accepted_pvc_deletion_time:
raise ex.PerformanceException(
f"{msg_prefix} {number_of_pvcs} PVCs deletion time is {pvc_deletion_time.values()} and is "
f"greater than {accepted_pvc_deletion_time} seconds"
)
logging.info(f"{msg_prefix} {number_of_pvcs} PVCs deletion times are:")
for name, a_time in pvc_deletion_time.items():
logging.info(f"{name} deletion time is: {a_time} seconds")
| [((629, 656), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (646, 656), False, 'import logging\n'), ((2792, 2808), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (2806, 2808), False, 'import pytest\n'), ((3602, 3618), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (3616, 3618), False, 'import pytest\n'), ((5799, 5843), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['base_setup.__name__'], {}), '(base_setup.__name__)\n', (5822, 5843), False, 'import pytest\n'), ((14825, 14869), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['base_setup.__name__'], {}), '(base_setup.__name__)\n', (14848, 14869), False, 'import pytest\n'), ((14875, 14918), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['namespace.__name__'], {}), '(namespace.__name__)\n', (14898, 14918), False, 'import pytest\n'), ((14924, 14959), 'pytest.mark.polarion_id', 'pytest.mark.polarion_id', (['"""OCS-2618"""'], {}), "('OCS-2618')\n", (14947, 14959), False, 'import pytest\n'), ((6188, 6223), 'ocs_ci.helpers.helpers.get_full_test_logs_path', 'get_full_test_logs_path', ([], {'cname': 'self'}), '(cname=self)\n', (6211, 6223), False, 'from ocs_ci.helpers.helpers import get_full_test_logs_path\n'), ((12100, 12130), 'statistics.mean', 'statistics.mean', (['time_measures'], {}), '(time_measures)\n', (12115, 12130), False, 'import statistics\n'), ((15554, 15696), 'ocs_ci.helpers.helpers.create_multiple_pvcs', 'helpers.create_multiple_pvcs', ([], {'sc_name': 'self.sc_obj.name', 'namespace': 'self.namespace', 'number_of_pvc': 'number_of_pvcs', 'size': 'pvc_size', 'burst': '(True)'}), '(sc_name=self.sc_obj.name, namespace=self.\n namespace, number_of_pvc=number_of_pvcs, size=pvc_size, burst=True)\n', (15582, 15696), False, 'from ocs_ci.helpers import helpers, performance_lib\n'), ((17583, 17677), 'ocs_ci.helpers.helpers.measure_pv_deletion_time_bulk', 'helpers.measure_pv_deletion_time_bulk', ([], {'interface': 'self.interface', 'pv_name_list': 'pv_name_list'}), '(interface=self.interface,\n pv_name_list=pv_name_list)\n', (17620, 17677), False, 'from ocs_ci.helpers import helpers, performance_lib\n'), ((18290, 18361), 'logging.info', 'logging.info', (['f"""{msg_prefix} {number_of_pvcs} PVCs deletion times are:"""'], {}), "(f'{msg_prefix} {number_of_pvcs} PVCs deletion times are:')\n", (18302, 18361), False, 'import logging\n'), ((1984, 1991), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (1989, 1991), False, 'from uuid import uuid4\n'), ((6677, 6690), 'time.gmtime', 'time.gmtime', ([], {}), '()\n', (6688, 6690), False, 'import time\n'), ((7807, 7871), 'logging.info', 'logging.info', (['f"""{msg_prefix} Start creating PVC number {i + 1}."""'], {}), "(f'{msg_prefix} Start creating PVC number {i + 1}.')\n", (7819, 7871), False, 'import logging\n'), ((7977, 8036), 'ocs_ci.helpers.helpers.create_pvc', 'helpers.create_pvc', ([], {'sc_name': 'self.sc_obj.name', 'size': 'pvc_size'}), '(sc_name=self.sc_obj.name, size=pvc_size)\n', (7995, 8036), False, 'from ocs_ci.helpers import helpers, performance_lib\n'), ((8134, 8220), 'ocs_ci.helpers.helpers.wait_for_resource_state', 'helpers.wait_for_resource_state', (['pvc_obj', 'constants.STATUS_BOUND'], {'timeout': 'timeout'}), '(pvc_obj, constants.STATUS_BOUND, timeout=\n timeout)\n', (8165, 8220), False, 'from ocs_ci.helpers import helpers, performance_lib\n'), ((8304, 8391), 'ocs_ci.helpers.performance_lib.measure_pvc_creation_time', 'performance_lib.measure_pvc_creation_time', (['self.interface', 'pvc_obj.name', 'start_time'], {}), '(self.interface, pvc_obj.name,\n start_time)\n', (8345, 8391), False, 'from ocs_ci.helpers import helpers, performance_lib\n'), ((8431, 8524), 'logging.info', 'logging.info', (['f"""{msg_prefix} PVC number {i + 1} was created in {creation_time} seconds."""'], {}), "(\n f'{msg_prefix} PVC number {i + 1} was created in {creation_time} seconds.')\n", (8443, 8524), False, 'import logging\n'), ((9118, 9181), 'logging.info', 'logging.info', (['f"""{msg_prefix} Start deleting PVC number {i + 1}"""'], {}), "(f'{msg_prefix} Start deleting PVC number {i + 1}')\n", (9130, 9181), False, 'import logging\n'), ((11005, 11018), 'time.gmtime', 'time.gmtime', ([], {}), '()\n', (11016, 11018), False, 'import time\n'), ((12386, 12417), 'statistics.stdev', 'statistics.stdev', (['time_measures'], {}), '(time_measures)\n', (12402, 12417), False, 'import statistics\n'), ((15959, 15992), 'concurrent.futures.ThreadPoolExecutor', 'ThreadPoolExecutor', ([], {'max_workers': '(5)'}), '(max_workers=5)\n', (15977, 15992), False, 'from concurrent.futures import ThreadPoolExecutor\n'), ((16616, 16655), 'threading.Thread', 'threading.Thread', ([], {'target': 'pvc_obj.reload'}), '(target=pvc_obj.reload)\n', (16632, 16655), False, 'import threading\n'), ((18429, 18487), 'logging.info', 'logging.info', (['f"""{name} deletion time is: {a_time} seconds"""'], {}), "(f'{name} deletion time is: {a_time} seconds')\n", (18441, 18487), False, 'import logging\n'), ((2580, 2612), 'ocs_ci.framework.config.PERF.get', 'config.PERF.get', (['"""dev_es_server"""'], {}), "('dev_es_server')\n", (2595, 2612), False, 'from ocs_ci.framework import config\n'), ((2638, 2668), 'ocs_ci.framework.config.PERF.get', 'config.PERF.get', (['"""dev_es_port"""'], {}), "('dev_es_port')\n", (2653, 2668), False, 'from ocs_ci.framework import config\n'), ((8627, 8768), 'ocs_ci.ocs.exceptions.PerformanceException', 'ex.PerformanceException', (['f"""{msg_prefix} PVC creation time is {creation_time} and is greater than {accepted_creation_time} seconds."""'], {}), "(\n f'{msg_prefix} PVC creation time is {creation_time} and is greater than {accepted_creation_time} seconds.'\n )\n", (8650, 8768), True, 'import ocs_ci.ocs.exceptions as ex\n'), ((9359, 9404), 'ocs_ci.helpers.helpers.validate_pv_delete', 'helpers.validate_pv_delete', (['pvc_obj.backed_pv'], {}), '(pvc_obj.backed_pv)\n', (9385, 9404), False, 'from ocs_ci.helpers import helpers, performance_lib\n'), ((9437, 9495), 'ocs_ci.helpers.helpers.measure_pvc_deletion_time', 'helpers.measure_pvc_deletion_time', (['self.interface', 'pv_name'], {}), '(self.interface, pv_name)\n', (9470, 9495), False, 'from ocs_ci.helpers import helpers, performance_lib\n'), ((9550, 9643), 'logging.info', 'logging.info', (['f"""{msg_prefix} PVC number {i + 1} was deleted in {deletion_time} seconds."""'], {}), "(\n f'{msg_prefix} PVC number {i + 1} was deleted in {deletion_time} seconds.')\n", (9562, 9643), False, 'import logging\n'), ((10063, 10197), 'logging.info', 'logging.info', (['f"""Reclaim policy of the PVC {pvc_obj.name} is not Delete; therefore not measuring deletion time for this PVC."""'], {}), "(\n f'Reclaim policy of the PVC {pvc_obj.name} is not Delete; therefore not measuring deletion time for this PVC.'\n )\n", (10075, 10197), False, 'import logging\n'), ((4466, 4551), 'pytest.param', 'pytest.param', (["*[constants.CEPHBLOCKPOOL, '5Gi']"], {'marks': '[pytest.mark.performance]'}), "(*[constants.CEPHBLOCKPOOL, '5Gi'], marks=[pytest.mark.performance]\n )\n", (4478, 4551), False, 'import pytest\n'), ((4607, 4693), 'pytest.param', 'pytest.param', (["*[constants.CEPHBLOCKPOOL, '15Gi']"], {'marks': '[pytest.mark.performance]'}), "(*[constants.CEPHBLOCKPOOL, '15Gi'], marks=[pytest.mark.\n performance])\n", (4619, 4693), False, 'import pytest\n'), ((4749, 4835), 'pytest.param', 'pytest.param', (["*[constants.CEPHBLOCKPOOL, '25Gi']"], {'marks': '[pytest.mark.performance]'}), "(*[constants.CEPHBLOCKPOOL, '25Gi'], marks=[pytest.mark.\n performance])\n", (4761, 4835), False, 'import pytest\n'), ((4891, 4977), 'pytest.param', 'pytest.param', (["*[constants.CEPHFILESYSTEM, '5Gi']"], {'marks': '[pytest.mark.performance]'}), "(*[constants.CEPHFILESYSTEM, '5Gi'], marks=[pytest.mark.\n performance])\n", (4903, 4977), False, 'import pytest\n'), ((5033, 5120), 'pytest.param', 'pytest.param', (["*[constants.CEPHFILESYSTEM, '15Gi']"], {'marks': '[pytest.mark.performance]'}), "(*[constants.CEPHFILESYSTEM, '15Gi'], marks=[pytest.mark.\n performance])\n", (5045, 5120), False, 'import pytest\n'), ((5176, 5263), 'pytest.param', 'pytest.param', (["*[constants.CEPHFILESYSTEM, '25Gi']"], {'marks': '[pytest.mark.performance]'}), "(*[constants.CEPHFILESYSTEM, '25Gi'], marks=[pytest.mark.\n performance])\n", (5188, 5263), False, 'import pytest\n'), ((5319, 5419), 'pytest.param', 'pytest.param', (["*[constants.CEPHBLOCKPOOL_THICK, '5Gi']"], {'marks': '[pytest.mark.performance_extended]'}), "(*[constants.CEPHBLOCKPOOL_THICK, '5Gi'], marks=[pytest.mark.\n performance_extended])\n", (5331, 5419), False, 'import pytest\n'), ((5475, 5576), 'pytest.param', 'pytest.param', (["*[constants.CEPHBLOCKPOOL_THICK, '15Gi']"], {'marks': '[pytest.mark.performance_extended]'}), "(*[constants.CEPHBLOCKPOOL_THICK, '15Gi'], marks=[pytest.mark.\n performance_extended])\n", (5487, 5576), False, 'import pytest\n'), ((5632, 5733), 'pytest.param', 'pytest.param', (["*[constants.CEPHBLOCKPOOL_THICK, '25Gi']"], {'marks': '[pytest.mark.performance_extended]'}), "(*[constants.CEPHBLOCKPOOL_THICK, '25Gi'], marks=[pytest.mark.\n performance_extended])\n", (5644, 5733), False, 'import pytest\n'), ((14397, 14470), 'pytest.param', 'pytest.param', (['*[constants.CEPHBLOCKPOOL]'], {'marks': '[pytest.mark.performance]'}), '(*[constants.CEPHBLOCKPOOL], marks=[pytest.mark.performance])\n', (14409, 14470), False, 'import pytest\n'), ((14531, 14605), 'pytest.param', 'pytest.param', (['*[constants.CEPHFILESYSTEM]'], {'marks': '[pytest.mark.performance]'}), '(*[constants.CEPHFILESYSTEM], marks=[pytest.mark.performance])\n', (14543, 14605), False, 'import pytest\n'), ((14666, 14759), 'pytest.param', 'pytest.param', (['*[constants.CEPHBLOCKPOOL_THICK]'], {'marks': '[pytest.mark.performance_extended]'}), '(*[constants.CEPHBLOCKPOOL_THICK], marks=[pytest.mark.\n performance_extended])\n', (14678, 14759), False, 'import pytest\n'), ((2202, 2241), 'ocs_ci.framework.config.PERF.get', 'config.PERF.get', (['"""production_es_server"""'], {}), "('production_es_server')\n", (2217, 2241), False, 'from ocs_ci.framework import config\n'), ((2271, 2308), 'ocs_ci.framework.config.PERF.get', 'config.PERF.get', (['"""production_es_port"""'], {}), "('production_es_port')\n", (2286, 2308), False, 'from ocs_ci.framework import config\n'), ((7897, 7923), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (7921, 7923), False, 'import datetime\n'), ((9762, 9903), 'ocs_ci.ocs.exceptions.PerformanceException', 'ex.PerformanceException', (['f"""{msg_prefix} PVC deletion time is {deletion_time} and is greater than {accepted_deletion_time} seconds."""'], {}), "(\n f'{msg_prefix} PVC deletion time is {deletion_time} and is greater than {accepted_deletion_time} seconds.'\n )\n", (9785, 9903), True, 'import ocs_ci.ocs.exceptions as ex\n'), ((2703, 2735), 'ocs_ci.framework.config.PERF.get', 'config.PERF.get', (['"""dev_es_server"""'], {}), "('dev_es_server')\n", (2718, 2735), False, 'from ocs_ci.framework import config\n'), ((2738, 2768), 'ocs_ci.framework.config.PERF.get', 'config.PERF.get', (['"""dev_es_port"""'], {}), "('dev_es_port')\n", (2753, 2768), False, 'from ocs_ci.framework import config\n'), ((2347, 2386), 'ocs_ci.framework.config.PERF.get', 'config.PERF.get', (['"""production_es_server"""'], {}), "('production_es_server')\n", (2362, 2386), False, 'from ocs_ci.framework import config\n'), ((2389, 2426), 'ocs_ci.framework.config.PERF.get', 'config.PERF.get', (['"""production_es_port"""'], {}), "('production_es_port')\n", (2404, 2426), False, 'from ocs_ci.framework import config\n')] |
MikeBirdsall/food-log | templates/t/searchresult_withnone.py | 5edc1fa515d5e2721e96afb7d2b437296903a31d | #!/usr/bin/python3
from jinja2 import Environment, FileSystemLoader
def spacenone(value):
return "" if value is None else str(value)
results = [
dict(
description="Noodles and Company steak Stromboli",
comment="",
size="small",
cals=530,
carbs=50,
fat=25,
protein=27,
score=30),
dict(
description="Steak sandwich",
comment="",
size="4 oz and bun",
cals=480,
carbs=44,
fat=20,
protein=27,
score=30),
dict(
description="chipotle tacos",
comment="Steak, no beans, gu...",
size="",
cals=285,
carbs=None,
fat=16,
protein=None,
score=30),
dict(
description="Steak Sandwich",
comment="",
size="",
cals=380,
carbs=45,
fat=3.5,
protein=34,
score=30),
]
input_ = dict(
title="Search for Courses",
h1="Full Text Search: steak NOT shake",
results=results,
)
env = Environment(loader=FileSystemLoader(".."))
env.filters['spacenone'] = spacenone
template = env.get_template("searchresult.html")
output = template.render(input_)
print(output)
| [((1072, 1094), 'jinja2.FileSystemLoader', 'FileSystemLoader', (['""".."""'], {}), "('..')\n", (1088, 1094), False, 'from jinja2 import Environment, FileSystemLoader\n')] |
aman-roy/pune.pycon.org | payments/views.py | f56cc948bd56767110d337c694ecbf5540bdf4b9 | from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
from payments.models import Invoice, RazorpayKeys
from payments.razorpay.razorpay_payments import RazorpayPayments
from payments.models import Payment, Order
import json
@csrf_exempt
def webhook(request):
if request.method == 'POST':
keys = RazorpayKeys.objects.first()
payment = RazorpayPayments(keys.api_key, keys.api_secret)
data = json.loads(request.body)
if 'payload' not in data or 'invoice' not in data['payload']:
return JsonResponse({"message": "Invalid Data"})
invoice_entity = data['payload']['invoice']['entity']
order_entity = data['payload']['order']['entity']
payment_entity = data['payload']['payment']['entity']
invoice = Invoice.objects.get(invoice_id=invoice_entity['id'])
invoice.status = invoice_entity['status']
invoice.save()
payment.save_payment(payment_entity)
payment.save_order(order_entity)
return JsonResponse({"message": "Success"})
return JsonResponse({"message": "Method Not Allowed"})
def sync(request):
keys = RazorpayKeys.objects.first()
payment = RazorpayPayments(keys.api_key, keys.api_secret)
invoices = Invoice.objects.all()
for invoice in invoices:
invoice_details = payment.fetch_invoices(invoice.invoice_id)
invoice.status = invoice_details['status']
invoice.save()
if invoice.status == 'paid':
orders = Order.objects.filter(order_id=invoice_details['order_id'])
if len(orders) == 0:
order_details = payment.fetch_orders(
invoice_details['order_id'])
payment.save_order(order_details)
if invoice_details['payment_id']:
payments = Payment.objects.filter(payment_id=invoice_details['payment_id'])
if len(payments) == 0:
payment_details = payment.fetch_payment(invoice_details['payment_id'])
payment.save_payment(payment_details)
return JsonResponse({"message": "synced"}) | [((1090, 1137), 'django.http.JsonResponse', 'JsonResponse', (["{'message': 'Method Not Allowed'}"], {}), "({'message': 'Method Not Allowed'})\n", (1102, 1137), False, 'from django.http import JsonResponse\n'), ((1170, 1198), 'payments.models.RazorpayKeys.objects.first', 'RazorpayKeys.objects.first', ([], {}), '()\n', (1196, 1198), False, 'from payments.models import Invoice, RazorpayKeys\n'), ((1213, 1260), 'payments.razorpay.razorpay_payments.RazorpayPayments', 'RazorpayPayments', (['keys.api_key', 'keys.api_secret'], {}), '(keys.api_key, keys.api_secret)\n', (1229, 1260), False, 'from payments.razorpay.razorpay_payments import RazorpayPayments\n'), ((1276, 1297), 'payments.models.Invoice.objects.all', 'Invoice.objects.all', ([], {}), '()\n', (1295, 1297), False, 'from payments.models import Invoice, RazorpayKeys\n'), ((2111, 2146), 'django.http.JsonResponse', 'JsonResponse', (["{'message': 'synced'}"], {}), "({'message': 'synced'})\n", (2123, 2146), False, 'from django.http import JsonResponse\n'), ((346, 374), 'payments.models.RazorpayKeys.objects.first', 'RazorpayKeys.objects.first', ([], {}), '()\n', (372, 374), False, 'from payments.models import Invoice, RazorpayKeys\n'), ((393, 440), 'payments.razorpay.razorpay_payments.RazorpayPayments', 'RazorpayPayments', (['keys.api_key', 'keys.api_secret'], {}), '(keys.api_key, keys.api_secret)\n', (409, 440), False, 'from payments.razorpay.razorpay_payments import RazorpayPayments\n'), ((456, 480), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (466, 480), False, 'import json\n'), ((814, 866), 'payments.models.Invoice.objects.get', 'Invoice.objects.get', ([], {'invoice_id': "invoice_entity['id']"}), "(invoice_id=invoice_entity['id'])\n", (833, 866), False, 'from payments.models import Invoice, RazorpayKeys\n'), ((1041, 1077), 'django.http.JsonResponse', 'JsonResponse', (["{'message': 'Success'}"], {}), "({'message': 'Success'})\n", (1053, 1077), False, 'from django.http import JsonResponse\n'), ((570, 611), 'django.http.JsonResponse', 'JsonResponse', (["{'message': 'Invalid Data'}"], {}), "({'message': 'Invalid Data'})\n", (582, 611), False, 'from django.http import JsonResponse\n'), ((1528, 1586), 'payments.models.Order.objects.filter', 'Order.objects.filter', ([], {'order_id': "invoice_details['order_id']"}), "(order_id=invoice_details['order_id'])\n", (1548, 1586), False, 'from payments.models import Payment, Order\n'), ((1846, 1910), 'payments.models.Payment.objects.filter', 'Payment.objects.filter', ([], {'payment_id': "invoice_details['payment_id']"}), "(payment_id=invoice_details['payment_id'])\n", (1868, 1910), False, 'from payments.models import Payment, Order\n')] |
danschef/gear-detector | src/convnet/image_classifier.py | 153d1031778f183ac38edf0532d2f266029c5ea7 | import configparser
import os
import sys
from time import localtime, strftime, mktime
import torch
import torch.nn as nn
import torch.optim as optim
from torch.autograd import Variable
from net import Net
from geo_helper import store_image_bounds
from image_helper import CLASSES
from image_helper import save_image
from image_helper import test_set_loader
from image_helper import train_set_loader
from image_helper import validation_set_loader
CONFIG = configparser.ConfigParser()
CONFIG.read('./src/config.ini')
###########################################
# Training Stage
###########################################
def train(net, epochs=50, learning_rate=0.001):
start_time = strftime('%H:%M:%S', localtime())
print(f"Started training at: {start_time}")
datetime = strftime("%Y%m%d_%H%M", localtime())
logfile = f"{CONFIG['CNN Paths']['accuracy_log_path']}/{datetime}.log"
###########################################
# Loss Function
###########################################
criterion = nn.CrossEntropyLoss()
optimizer = optim.SGD(net.parameters(), lr=learning_rate, momentum=0.9)
for epoch in range(epochs): # loop over the dataset multiple times
running_loss = 0.0
for i, (images, labels) in enumerate(train_set_loader(), 0):
# Wrap images and labels into Variables
images, labels = Variable(images), Variable(labels)
# Clear all accumulated gradients
optimizer.zero_grad()
# Predict classes using images from the test set
outputs = net(images)
# Compute the loss based on the predictions and actual labels
loss = criterion(outputs, labels)
# Backpropagate the loss
loss.backward()
# Adjust parameters according to the computed gradients
optimizer.step()
# print statistics
running_loss += loss.item()
if i % 100 == 99: # print every 100 mini-batches
print('[%d, %5d] loss: %.3f, accuracy: %.3f' %
(epoch + 1, i + 1, running_loss / 100, validate(logfile, net)))
running_loss = 0.0
end_time = strftime('%H:%M:%S', localtime())
print(f"Finished Training: {end_time}")
#####################################
# Validation stage
#####################################
def validate(logfile, net):
dataiter = iter(validation_set_loader())
hits = 0.0
for idx, item in enumerate(dataiter):
images, labels = item
outputs = net(Variable(images))
_, predicted = torch.max(outputs.data, 1)
if (labels == predicted[0]).all():
hits += 1
accuracy = hits / (idx + 1)
log_accuracy(logfile, accuracy)
return accuracy
def log_accuracy(filename, accuracy):
with open(filename, "a") as file:
file.write(str(accuracy)+ '\n')
#####################################
# Prediction stage
#####################################
def predict(net):
print(f"Prediction started at: {strftime('%H:%M:%S', localtime())}")
dataiter = iter(test_set_loader())
prediction_cnt = {
'cloud': 0,
'edge': 0,
'land': 0,
'nets': 0,
'rock': 0,
'vessel': 0,
'water': 0
}
datetime = strftime("%Y%m%d_%H%M", localtime())
prediction_log = f"{CONFIG['CNN Paths']['predicted_geodata_path']}/{datetime}.json"
prediction_img_folder = f"{CONFIG['CNN Paths']['predicted_imagery_path']}/{datetime}"
for idx, item in enumerate(dataiter):
if idx > int(CONFIG['CNN Prediction']['batch_size']):
break
if idx % 100 == 0:
print('.', end='', flush=True)
images, _labels = item
##########################################################
# Feed the images into the CNN and check what it predicts
##########################################################
outputs = net(Variable(images))
_, predicted = torch.max(outputs.data, 1)
# Save images from prediction for visual check
if CLASSES[predicted[0]] == 'nets':
image_path = dataiter._dataset.imgs[idx][0]
save_image(image_path, prediction_img_folder)
store_image_bounds(image_path, prediction_log)
prediction_cnt[CLASSES[predicted[0]]] += 1
print(f"\nPrediction ended at: {strftime('%H:%M:%S', localtime())}")
print(f"\nPredicted: {prediction_cnt}")
def model_full_path(path, checkpoint):
return f"{path}_{checkpoint}.pt"
################################################################
# Train network or use existing one for prediction
################################################################
def main(mode=''):
image_bands = int(CONFIG['CNN Training']['image_bands'])
training_epochs = int(CONFIG['CNN Training']['epochs'])
resume_epochs = int(CONFIG['CNN Resume Training']['epochs'])
learning_rate = float(CONFIG['CNN Training']['learning_rate'])
batch_size = CONFIG['CNN Prediction']['batch_size']
if len(sys.argv) > 1:
mode = sys.argv[1]
net = Net(in_channels=image_bands)
model_path = CONFIG['CNN Paths']['model_path']
checkpoint = CONFIG['CNN Prediction']['checkpoint']
# Use network for prediction
if mode == 'predict' and os.path.exists(model_full_path(model_path, checkpoint)):
print(f"Use trained network {checkpoint} for prediction of max {batch_size} images")
# Load existing model
model = torch.load(model_full_path(model_path, checkpoint))
net.load_state_dict(model)
predict(net)
# Start training
elif mode == 'train':
print(f"Start network training for {training_epochs} epochs")
train(net, training_epochs, learning_rate)
# Save model after training
checkpoint = strftime("%Y%m%d_%H%M", localtime())
torch.save(net.state_dict(), model_full_path(model_path, checkpoint))
# Resume training
elif mode == 'resume':
checkpoint = CONFIG['CNN Resume Training']['checkpoint']
print(f"Resume training on Model {checkpoint} for {resume_epochs} epochs")
# Load existing model and resume training
model = torch.load(model_full_path(model_path, checkpoint))
net.load_state_dict(model)
train(net, resume_epochs, learning_rate)
torch.save(net.state_dict(), model_full_path(model_path, checkpoint))
else:
print('No mode provided.')
main()
| [((459, 486), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (484, 486), False, 'import configparser\n'), ((1035, 1056), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (1054, 1056), True, 'import torch.nn as nn\n'), ((5163, 5191), 'net.Net', 'Net', ([], {'in_channels': 'image_bands'}), '(in_channels=image_bands)\n', (5166, 5191), False, 'from net import Net\n'), ((712, 723), 'time.localtime', 'localtime', ([], {}), '()\n', (721, 723), False, 'from time import localtime, strftime, mktime\n'), ((813, 824), 'time.localtime', 'localtime', ([], {}), '()\n', (822, 824), False, 'from time import localtime, strftime, mktime\n'), ((2242, 2253), 'time.localtime', 'localtime', ([], {}), '()\n', (2251, 2253), False, 'from time import localtime, strftime, mktime\n'), ((2444, 2467), 'image_helper.validation_set_loader', 'validation_set_loader', ([], {}), '()\n', (2465, 2467), False, 'from image_helper import validation_set_loader\n'), ((2623, 2649), 'torch.max', 'torch.max', (['outputs.data', '(1)'], {}), '(outputs.data, 1)\n', (2632, 2649), False, 'import torch\n'), ((3133, 3150), 'image_helper.test_set_loader', 'test_set_loader', ([], {}), '()\n', (3148, 3150), False, 'from image_helper import test_set_loader\n'), ((3357, 3368), 'time.localtime', 'localtime', ([], {}), '()\n', (3366, 3368), False, 'from time import localtime, strftime, mktime\n'), ((4039, 4065), 'torch.max', 'torch.max', (['outputs.data', '(1)'], {}), '(outputs.data, 1)\n', (4048, 4065), False, 'import torch\n'), ((1280, 1298), 'image_helper.train_set_loader', 'train_set_loader', ([], {}), '()\n', (1296, 1298), False, 'from image_helper import train_set_loader\n'), ((2581, 2597), 'torch.autograd.Variable', 'Variable', (['images'], {}), '(images)\n', (2589, 2597), False, 'from torch.autograd import Variable\n'), ((3997, 4013), 'torch.autograd.Variable', 'Variable', (['images'], {}), '(images)\n', (4005, 4013), False, 'from torch.autograd import Variable\n'), ((4234, 4279), 'image_helper.save_image', 'save_image', (['image_path', 'prediction_img_folder'], {}), '(image_path, prediction_img_folder)\n', (4244, 4279), False, 'from image_helper import save_image\n'), ((4292, 4338), 'geo_helper.store_image_bounds', 'store_image_bounds', (['image_path', 'prediction_log'], {}), '(image_path, prediction_log)\n', (4310, 4338), False, 'from geo_helper import store_image_bounds\n'), ((1386, 1402), 'torch.autograd.Variable', 'Variable', (['images'], {}), '(images)\n', (1394, 1402), False, 'from torch.autograd import Variable\n'), ((1404, 1420), 'torch.autograd.Variable', 'Variable', (['labels'], {}), '(labels)\n', (1412, 1420), False, 'from torch.autograd import Variable\n'), ((5918, 5929), 'time.localtime', 'localtime', ([], {}), '()\n', (5927, 5929), False, 'from time import localtime, strftime, mktime\n'), ((3096, 3107), 'time.localtime', 'localtime', ([], {}), '()\n', (3105, 3107), False, 'from time import localtime, strftime, mktime\n'), ((4449, 4460), 'time.localtime', 'localtime', ([], {}), '()\n', (4458, 4460), False, 'from time import localtime, strftime, mktime\n')] |
aaanh/duplicated_accelcamp | src/modules/AlphabetPlotter.py | 7d4b60ace023bede907f8ed367ba492731a1951d | import tkinter as tk
from tkinter import filedialog
import csv
import matplotlib.pyplot as plt
root = tk.Tk(screenName=':0.0')
root.withdraw()
file_path = filedialog.askopenfilename()
lastIndex = len(file_path.split('/')) - 1
v0 = [0, 0, 0]
x0 = [0, 0, 0]
fToA = 1
error = 0.28
errorZ = 3
t = []
time = []
m = [[] for i in range(3)]
magnitude = [[] for i in range(3)]
shift_x = 0
shift_y = 0
# For when the data starts at (2,1)
if file_path.split('/')[lastIndex].split('.')[2] == "pocket":
shift_x = 2
shift_y = 1
error = 0.3
fToA = 1
# For when the data starts at (0,0)
elif file_path.split('/')[lastIndex].split('.')[2] == "pocket_mobile":
shift_x = 0
shift_y = 0
error = 0.3
fToA = 1
# For when the data starts at (1,0)
elif file_path.split('/')[lastIndex].split('.')[2] == "android":
shift_x = 0
shift_y = 1
error = 0.02
fToA = 9.81
errorZ = 100
shift = 0
uselessboolean = True
with open(file_path, 'r+') as csvfile:
readCSV = csv.reader(csvfile, delimiter=',')
for row in readCSV:
if shift < shift_y:
shift += 1
else:
t = row[shift_x]
m[0] = row[1 + shift_x]
m[1] = row[2 + shift_x]
m[2] = row[3 + shift_x]
time.append(float(t))
for i in range(0, 3):
magnitude[i].append(float(m[i]) if abs(float(m[i])) > error else 0)
acceleration = [[(j * fToA) for j in i] for i in magnitude]
acceleration[2] = [i - 9.805 for i in acceleration[2]]
# Translates Data into Position
velocity = [[0 for i in time] for i in range(3)]
position = [[0 for i in time] for i in range(3)]
for j in range(3):
velocity[j][0] = v0[j]
for i in range(1, len(time)):
velocity[j][i] = velocity[j][i - 1] + acceleration[j][i - 1] * (time[i] - time[i - 1])
for j in range(3):
position[j][0] = x0[j]
for i in range(1, len(time)):
position[j][i] = position[j][i - 1] + velocity[j][i - 1] * (time[i] - time[i - 1])
for i in range(len(acceleration[2])):
if abs(velocity[2][i]) > errorZ:
position[0][i] = 0
position[1][i] = 0
fig, axs = plt.subplots(2)
axs[0].plot(time, acceleration[0])
axs[0].set_xlabel('Time (s)')
axs[0].set_ylabel('AccelerationX (m/s^2)')
axs[1].plot(time, acceleration[1])
axs[1].set_xlabel('Time (s)')
axs[1].set_ylabel('AccelerationY (m/s^2)')
'''
axs[2].scatter(time, acceleration[2])
axs[2].set_xlabel('Time (s)')
axs[2].set_ylabel('AccelerationZ (m/s^2)')
axs[3].scatter(time, velocity[2])
axs[3].set_xlabel('Time (s)')
axs[3].set_ylabel('VelocityZ (m/s)')
axs[4].scatter(time, position[2])
axs[4].set_xlabel('Time (s)')
axs[4].set_ylabel('PositionZ (m)')
axs.scatter(position[0], position[1], marker = "_", linewidth = 70)
axs.set_xlabel('PositionX')
axs.set_ylabel('PositionY')
plt.plot(position[0], position[1], marker = '_', markersize = 30, linewidth = 3, markeredgewidth = 10)'''
plt.show() | [((108, 132), 'tkinter.Tk', 'tk.Tk', ([], {'screenName': '""":0.0"""'}), "(screenName=':0.0')\n", (113, 132), True, 'import tkinter as tk\n'), ((165, 193), 'tkinter.filedialog.askopenfilename', 'filedialog.askopenfilename', ([], {}), '()\n', (191, 193), False, 'from tkinter import filedialog\n'), ((2223, 2238), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)'], {}), '(2)\n', (2235, 2238), True, 'import matplotlib.pyplot as plt\n'), ((3023, 3033), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3031, 3033), True, 'import matplotlib.pyplot as plt\n'), ((1040, 1074), 'csv.reader', 'csv.reader', (['csvfile'], {'delimiter': '""","""'}), "(csvfile, delimiter=',')\n", (1050, 1074), False, 'import csv\n')] |
mitodl/mit-xpro | users/migrations/0008_profile_fields_optional.py | 981d6c87d963837f0b9ccdd996067fe81394dba4 | # Generated by Django 2.2.3 on 2019-07-15 19:24
from django.db import migrations, models
def backpopulate_incomplete_profiles(apps, schema):
"""Backpopulate users who don't have a profile record"""
User = apps.get_model("users", "User")
Profile = apps.get_model("users", "Profile")
for user in User.objects.annotate(
has_profile=models.Exists(Profile.objects.filter(user=models.OuterRef("pk")))
).filter(has_profile=False):
Profile.objects.get_or_create(user=user)
def remove_incomplete_profiles(apps, schema):
"""Delete records that will cause rollbacks on nullable/blankable fields to fail"""
Profile = apps.get_model("users", "Profile")
Profile.objects.filter(
models.Q(birth_year__isnull=True)
| models.Q(gender__exact="")
| models.Q(job_title__exact="")
| models.Q(company__exact="")
).delete()
class Migration(migrations.Migration):
dependencies = [("users", "0007_validate_country_and_state")]
operations = [
migrations.AlterField(
model_name="profile",
name="birth_year",
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name="profile",
name="company",
field=models.CharField(blank=True, default="", max_length=128),
),
migrations.AlterField(
model_name="profile",
name="gender",
field=models.CharField(
blank=True,
choices=[
("m", "Male"),
("f", "Female"),
("o", "Other/Prefer Not to Say"),
],
default="",
max_length=10,
),
),
migrations.AlterField(
model_name="profile",
name="industry",
field=models.CharField(blank=True, default="", max_length=60),
),
migrations.AlterField(
model_name="profile",
name="job_function",
field=models.CharField(blank=True, default="", max_length=60),
),
migrations.AlterField(
model_name="profile",
name="job_title",
field=models.CharField(blank=True, default="", max_length=128),
),
migrations.AlterField(
model_name="profile",
name="leadership_level",
field=models.CharField(blank=True, default="", max_length=60),
),
migrations.RunPython(
backpopulate_incomplete_profiles, reverse_code=remove_incomplete_profiles
),
]
| [((2522, 2622), 'django.db.migrations.RunPython', 'migrations.RunPython', (['backpopulate_incomplete_profiles'], {'reverse_code': 'remove_incomplete_profiles'}), '(backpopulate_incomplete_profiles, reverse_code=\n remove_incomplete_profiles)\n', (2542, 2622), False, 'from django.db import migrations, models\n'), ((1133, 1175), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (1152, 1175), False, 'from django.db import migrations, models\n'), ((1299, 1355), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(128)'}), "(blank=True, default='', max_length=128)\n", (1315, 1355), False, 'from django.db import migrations, models\n'), ((1478, 1613), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'choices': "[('m', 'Male'), ('f', 'Female'), ('o', 'Other/Prefer Not to Say')]", 'default': '""""""', 'max_length': '(10)'}), "(blank=True, choices=[('m', 'Male'), ('f', 'Female'), ('o',\n 'Other/Prefer Not to Say')], default='', max_length=10)\n", (1494, 1613), False, 'from django.db import migrations, models\n'), ((1892, 1947), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(60)'}), "(blank=True, default='', max_length=60)\n", (1908, 1947), False, 'from django.db import migrations, models\n'), ((2076, 2131), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(60)'}), "(blank=True, default='', max_length=60)\n", (2092, 2131), False, 'from django.db import migrations, models\n'), ((2257, 2313), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(128)'}), "(blank=True, default='', max_length=128)\n", (2273, 2313), False, 'from django.db import migrations, models\n'), ((2446, 2501), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'default': '""""""', 'max_length': '(60)'}), "(blank=True, default='', max_length=60)\n", (2462, 2501), False, 'from django.db import migrations, models\n'), ((848, 875), 'django.db.models.Q', 'models.Q', ([], {'company__exact': '""""""'}), "(company__exact='')\n", (856, 875), False, 'from django.db import migrations, models\n'), ((808, 837), 'django.db.models.Q', 'models.Q', ([], {'job_title__exact': '""""""'}), "(job_title__exact='')\n", (816, 837), False, 'from django.db import migrations, models\n'), ((727, 760), 'django.db.models.Q', 'models.Q', ([], {'birth_year__isnull': '(True)'}), '(birth_year__isnull=True)\n', (735, 760), False, 'from django.db import migrations, models\n'), ((771, 797), 'django.db.models.Q', 'models.Q', ([], {'gender__exact': '""""""'}), "(gender__exact='')\n", (779, 797), False, 'from django.db import migrations, models\n'), ((399, 420), 'django.db.models.OuterRef', 'models.OuterRef', (['"""pk"""'], {}), "('pk')\n", (414, 420), False, 'from django.db import migrations, models\n')] |
nwukie/ChiDG | test/unit_testing/grid/element_linear_dx_data/test_element_linearC/element/geom_element_AD.py | d096548ba3bd0a338a29f522fb00a669f0e33e9b | from __future__ import division
import torch
import torch.autograd as autograd
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import numpy as np
import sys
import os
import time
#
# TORCH INSTALLATION: refer to https://pytorch.org/get-started/locally/
#
def update_progress(job_title, progress):
length = 20 # modify this to change the length
block = int(round(length*progress))
msg = "\r{0}: [{1}] {2}%".format(job_title, "#"*block + "-"*(length-block), round(progress*100, 2))
if progress >= 1: msg += " DONE\r\n"
sys.stdout.write(msg)
sys.stdout.flush()
def cls():
os.system('cls' if os.name=='nt' else 'clear')
cls()
################################################################################################################
# Initialize torch tensor for coordiantes
coords_data = [[ 0.0 , 0.0 , 0.0 ],
[ 1.0/(2.0**0.5), 0.0 , 1.0/(2.0**0.5)],
[ 1.0/(2.0**0.5), 0.0 ,-1.0/(2.0**0.5)],
[ 2.0**0.5 , 0.0 , 0.0 ],
[ 0.0 , 1.0 , 0.0 ],
[ 1.0/(2.0**0.5), 1.0 , 1.0/(2.0**0.5)],
[ 1.0/(2.0**0.5), 1.0 ,-1.0/(2.0**0.5)],
[ 2.0**0.5 , 1.0 , 0.0 ],
]
coords = torch.tensor(coords_data,requires_grad=True,dtype=torch.float64)
nnodes_r = coords.size(0)
nnodes_ie = 8
nnodes_if = 4
nterms_s = 8
ndirs = 3
coord_sys = 'CARTESIAN'
# Define matrix of polynomial basis terms at support nodes
val_r_data = [[ 1.0,-1.0,-1.0,-1.0, 1.0, 1.0, 1.0,-1.0],
[ 1.0,-1.0,-1.0, 1.0,-1.0,-1.0, 1.0, 1.0],
[ 1.0, 1.0,-1.0,-1.0,-1.0, 1.0,-1.0, 1.0],
[ 1.0, 1.0,-1.0, 1.0, 1.0,-1.0,-1.0,-1.0],
[ 1.0,-1.0, 1.0,-1.0, 1.0,-1.0,-1.0, 1.0],
[ 1.0,-1.0, 1.0, 1.0,-1.0, 1.0,-1.0,-1.0],
[ 1.0, 1.0, 1.0,-1.0,-1.0,-1.0, 1.0,-1.0],
[ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0],
]
val_r = torch.tensor(val_r_data,requires_grad=False,dtype=torch.float64)
# Define matrices at interpolation nodes (quadrature, level = 1)
val_i_data = [[ 1.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), 1.0/3.0, 1.0/3.0, 1.0/3.0,-1.0/3.0*np.sqrt(1.0/3.0)],
[ 1.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0/3.0,-1.0/3.0, 1.0/3.0, 1.0/3.0*np.sqrt(1.0/3.0)],
[ 1.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0/3.0, 1.0/3.0,-1.0/3.0, 1.0/3.0*np.sqrt(1.0/3.0)],
[ 1.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), 1.0/3.0,-1.0/3.0,-1.0/3.0,-1.0/3.0*np.sqrt(1.0/3.0)],
[ 1.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), 1.0/3.0,-1.0/3.0,-1.0/3.0, 1.0/3.0*np.sqrt(1.0/3.0)],
[ 1.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0/3.0, 1.0/3.0,-1.0/3.0,-1.0/3.0*np.sqrt(1.0/3.0)],
[ 1.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0/3.0,-1.0/3.0, 1.0/3.0,-1.0/3.0*np.sqrt(1.0/3.0)],
[ 1.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), 1.0/3.0, 1.0/3.0, 1.0/3.0, 1.0/3.0*np.sqrt(1.0/3.0)],
]
val_i = torch.tensor(val_i_data,requires_grad=False,dtype=torch.float64)
ddxi_i_data = [[ 0.0,0.0,0.0,1.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),0.0, 1.0/3.0],
[ 0.0,0.0,0.0,1.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),0.0, 1.0/3.0],
[ 0.0,0.0,0.0,1.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),0.0,-1.0/3.0],
[ 0.0,0.0,0.0,1.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),0.0,-1.0/3.0],
[ 0.0,0.0,0.0,1.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),0.0,-1.0/3.0],
[ 0.0,0.0,0.0,1.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),0.0,-1.0/3.0],
[ 0.0,0.0,0.0,1.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),0.0, 1.0/3.0],
[ 0.0,0.0,0.0,1.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),0.0, 1.0/3.0],
]
ddxi_i = torch.tensor(ddxi_i_data,requires_grad=False,dtype=torch.float64)
ddeta_i_data = [[ 0.0,1.0,0.0,0.0,-np.sqrt(1.0/3.0),0.0,-np.sqrt(1.0/3.0), 1.0/3.0],
[ 0.0,1.0,0.0,0.0, np.sqrt(1.0/3.0),0.0,-np.sqrt(1.0/3.0),-1.0/3.0],
[ 0.0,1.0,0.0,0.0,-np.sqrt(1.0/3.0),0.0,-np.sqrt(1.0/3.0), 1.0/3.0],
[ 0.0,1.0,0.0,0.0, np.sqrt(1.0/3.0),0.0,-np.sqrt(1.0/3.0),-1.0/3.0],
[ 0.0,1.0,0.0,0.0,-np.sqrt(1.0/3.0),0.0, np.sqrt(1.0/3.0),-1.0/3.0],
[ 0.0,1.0,0.0,0.0, np.sqrt(1.0/3.0),0.0, np.sqrt(1.0/3.0), 1.0/3.0],
[ 0.0,1.0,0.0,0.0,-np.sqrt(1.0/3.0),0.0, np.sqrt(1.0/3.0),-1.0/3.0],
[ 0.0,1.0,0.0,0.0, np.sqrt(1.0/3.0),0.0, np.sqrt(1.0/3.0), 1.0/3.0],
]
ddeta_i = torch.tensor(ddeta_i_data,requires_grad=False,dtype=torch.float64)
ddzeta_i_data= [[ 0.0,0.0,1.0,0.0,0.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), 1.0/3.0],
[ 0.0,0.0,1.0,0.0,0.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0/3.0],
[ 0.0,0.0,1.0,0.0,0.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0/3.0],
[ 0.0,0.0,1.0,0.0,0.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), 1.0/3.0],
[ 0.0,0.0,1.0,0.0,0.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), 1.0/3.0],
[ 0.0,0.0,1.0,0.0,0.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0/3.0],
[ 0.0,0.0,1.0,0.0,0.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0/3.0],
[ 0.0,0.0,1.0,0.0,0.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), 1.0/3.0],
]
ddzeta_i = torch.tensor(ddzeta_i_data,requires_grad=False,dtype=torch.float64)
# Define element interpolation nodes weights for linear element
weights_e_data = [1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0]
weights_e = torch.tensor(weights_e_data,requires_grad=False,dtype=torch.float64)
# Define val_f for each face
# Face 1, XI_MIN
val_1_data = [[ 1.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), 1.0/3.0,-1.0/3.0],
[ 1.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0/3.0, 1.0/3.0],
[ 1.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0/3.0, 1.0/3.0],
[ 1.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), 1.0/3.0,-1.0/3.0],
]
val_1 = torch.tensor(val_1_data,requires_grad=False,dtype=torch.float64)
# Face 2, XI_MAX
val_2_data = [[ 1.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),1.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), 1.0/3.0, 1.0/3.0],
[ 1.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),1.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0/3.0,-1.0/3.0],
[ 1.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),1.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0/3.0,-1.0/3.0],
[ 1.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),1.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), 1.0/3.0, 1.0/3.0],
]
val_2 = torch.tensor(val_2_data,requires_grad=False,dtype=torch.float64)
# Face 3, ETA_MIN
val_3_data = [[ 1.0,-1.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), 1.0/3.0, np.sqrt(1.0/3.0),-1.0/3.0],
[ 1.0,-1.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0/3.0, np.sqrt(1.0/3.0), 1.0/3.0],
[ 1.0,-1.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0/3.0,-np.sqrt(1.0/3.0), 1.0/3.0],
[ 1.0,-1.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), 1.0/3.0,-np.sqrt(1.0/3.0),-1.0/3.0],
]
val_3 = torch.tensor(val_3_data,requires_grad=False,dtype=torch.float64)
# Face 4, ETA_MAX
val_4_data = [[ 1.0,1.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), 1.0/3.0,-np.sqrt(1.0/3.0), 1.0/3.0],
[ 1.0,1.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0/3.0,-np.sqrt(1.0/3.0),-1.0/3.0],
[ 1.0,1.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0/3.0, np.sqrt(1.0/3.0),-1.0/3.0],
[ 1.0,1.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), 1.0/3.0, np.sqrt(1.0/3.0), 1.0/3.0],
]
val_4 = torch.tensor(val_4_data,requires_grad=False,dtype=torch.float64)
# Face 5, ZETA_MIN
val_5_data = [[ 1.0,-np.sqrt(1.0/3.0),-1.0,-np.sqrt(1.0/3.0), 1.0/3.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0/3.0],
[ 1.0,-np.sqrt(1.0/3.0),-1.0, np.sqrt(1.0/3.0),-1.0/3.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), 1.0/3.0],
[ 1.0, np.sqrt(1.0/3.0),-1.0,-np.sqrt(1.0/3.0),-1.0/3.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), 1.0/3.0],
[ 1.0, np.sqrt(1.0/3.0),-1.0, np.sqrt(1.0/3.0), 1.0/3.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0/3.0],
]
val_5 = torch.tensor(val_5_data,requires_grad=False,dtype=torch.float64)
# Face 6, ZETA_MAX
val_6_data = [[ 1.0,-np.sqrt(1.0/3.0),1.0,-np.sqrt(1.0/3.0), 1.0/3.0,-np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0), 1.0/3.0],
[ 1.0,-np.sqrt(1.0/3.0),1.0, np.sqrt(1.0/3.0),-1.0/3.0, np.sqrt(1.0/3.0),-np.sqrt(1.0/3.0),-1.0/3.0],
[ 1.0, np.sqrt(1.0/3.0),1.0,-np.sqrt(1.0/3.0),-1.0/3.0,-np.sqrt(1.0/3.0), np.sqrt(1.0/3.0),-1.0/3.0],
[ 1.0, np.sqrt(1.0/3.0),1.0, np.sqrt(1.0/3.0), 1.0/3.0, np.sqrt(1.0/3.0), np.sqrt(1.0/3.0), 1.0/3.0],
]
val_6 = torch.tensor(val_6_data,requires_grad=False,dtype=torch.float64)
#--------------------------------------------------------------------
# Matrix modes_to_nodes
val_r_inv = torch.inverse(val_r)
# Computes coordiantes modes
coords_modes = torch.mm(val_r_inv,coords)
# Initialized coordiantes
interp_coords = torch.mm(val_i,coords_modes)
# Initialized jacobian
jacobian = torch.empty(3,3,nnodes_ie, dtype=torch.float64)
for inode in range(0,nnodes_ie):
jacobian[0,0,inode] = torch.dot(ddxi_i[inode,:] , coords_modes[:,0])
jacobian[0,1,inode] = torch.dot(ddeta_i[inode,:] , coords_modes[:,0])
jacobian[0,2,inode] = torch.dot(ddzeta_i[inode,:] , coords_modes[:,0])
jacobian[1,0,inode] = torch.dot(ddxi_i[inode,:] , coords_modes[:,1])
jacobian[1,1,inode] = torch.dot(ddeta_i[inode,:] , coords_modes[:,1])
jacobian[1,2,inode] = torch.dot(ddzeta_i[inode,:] , coords_modes[:,1])
jacobian[2,0,inode] = torch.dot(ddxi_i[inode,:] , coords_modes[:,2])
jacobian[2,1,inode] = torch.dot(ddeta_i[inode,:] , coords_modes[:,2])
jacobian[2,2,inode] = torch.dot(ddzeta_i[inode,:] , coords_modes[:,2])
update_progress("Computing Jacobian ", inode/(nnodes_ie-1))
if coord_sys == 'CYLINDRICAL':
scaling_factor = torch.mm(val_i,coords_modes[:,0])
for inode in range(0,nnodes_ie):
jacobian[1,0,inode] = jacobian[1,0,inode] * scaling_factor[inode]
jacobian[1,1,inode] = jacobian[1,1,inode] * scaling_factor[inode]
jacobian[1,2,inode] = jacobian[1,2,inode] * scaling_factor[inode]
# Matrics and Determinant
metrics = torch.empty(3,3,nnodes_ie, dtype=torch.float64)
jinv = torch.empty(nnodes_ie, dtype=torch.float64)
for inode in range(0,nnodes_ie):
ijacobian = torch.empty(3,3, dtype=torch.float64)
imetric = torch.empty(3,3, dtype=torch.float64)
for irow in range(0,3):
for icol in range(0,3):
ijacobian[irow,icol] = jacobian[irow,icol,inode]
# Compute jacobian for the ith node
update_progress("Computing Jinv and Metric ", inode/(nnodes_ie-1))
jinv[inode] = torch.det(ijacobian)
imetric = torch.inverse(ijacobian)
for irow in range(0,3):
for icol in range(0,3):
metrics[irow,icol,inode] = imetric[irow,icol]
# Compute inverse Mass matrix
invmass = torch.empty(nterms_s,nterms_s,nnodes_ie, dtype=torch.float64)
mass = torch.empty(nterms_s,nterms_s,nnodes_ie, dtype=torch.float64)
val_tmp = torch.empty(nterms_s,nnodes_ie, dtype=torch.float64)
i = 1
for iterm in range(0,nterms_s):
for inode in range(0,nnodes_ie):
val_tmp[inode,iterm] = val_i[inode,iterm] * weights_e[inode] * jinv[inode]
update_progress("Computing invmass ", i/(nterms_s*nnodes_ie))
i += 1
mass = torch.mm(torch.t(val_tmp),val_i)
invmass = torch.inverse(mass)
# Compute BR2_VOL for each face
br2_vol_face1 = torch.mm(val_i,torch.mm(invmass,torch.t(val_1)))
br2_vol_face2 = torch.mm(val_i,torch.mm(invmass,torch.t(val_2)))
br2_vol_face3 = torch.mm(val_i,torch.mm(invmass,torch.t(val_3)))
br2_vol_face4 = torch.mm(val_i,torch.mm(invmass,torch.t(val_4)))
br2_vol_face5 = torch.mm(val_i,torch.mm(invmass,torch.t(val_5)))
br2_vol_face6 = torch.mm(val_i,torch.mm(invmass,torch.t(val_6)))
update_progress("Computing br2_vol ", 1)
# Compute BR2_FACE for each face
br2_face_face1 = torch.mm(val_1,torch.mm(invmass,torch.t(val_1)))
br2_face_face2 = torch.mm(val_2,torch.mm(invmass,torch.t(val_2)))
br2_face_face3 = torch.mm(val_3,torch.mm(invmass,torch.t(val_3)))
br2_face_face4 = torch.mm(val_4,torch.mm(invmass,torch.t(val_4)))
br2_face_face5 = torch.mm(val_5,torch.mm(invmass,torch.t(val_5)))
br2_face_face6 = torch.mm(val_6,torch.mm(invmass,torch.t(val_6)))
update_progress("Computing br2_face ", 1)
# Grad1, Grad2, and Grad3
grad1 = torch.empty(nnodes_ie,nterms_s, dtype=torch.float64)
grad2 = torch.empty(nnodes_ie,nterms_s, dtype=torch.float64)
grad3 = torch.empty(nnodes_ie,nterms_s, dtype=torch.float64)
i = 1
for iterm in range(0,nterms_s):
for inode in range(0,nnodes_ie):
grad1[inode,iterm] = metrics[0,0,inode] * ddxi_i[inode,iterm] + metrics[1,0,inode] * ddeta_i[inode,iterm] + metrics[2,0,inode] * ddzeta_i[inode,iterm]
grad2[inode,iterm] = metrics[0,1,inode] * ddxi_i[inode,iterm] + metrics[1,1,inode] * ddeta_i[inode,iterm] + metrics[2,1,inode] * ddzeta_i[inode,iterm]
grad3[inode,iterm] = metrics[0,2,inode] * ddxi_i[inode,iterm] + metrics[1,2,inode] * ddeta_i[inode,iterm] + metrics[2,2,inode] * ddzeta_i[inode,iterm]
update_progress("Computing grad1, grad2, grad3 ", i/(nnodes_ie*nterms_s))
i += 1
#WRITE_____________________
#
# Metrics
#
f = open("metrics.txt","w")
i = 1
for inode in range (0,nnodes_ie):
f.write("Metric interpolation node %d \n" % (inode+1))
array = np.zeros([3, 3])
for irow in range(0,3):
for icol in range(0,3):
array[irow,icol] = metrics[irow,icol,inode].item()
update_progress("Writing metrics to file ", i/(nnodes_ie*9))
i += 1
np.savetxt(f,array)
f.close()
#
# jinv
#
f = open("jinv.txt","w")
array = np.zeros([1])
i = 1
for inode in range (0,nnodes_ie):
f.write("Jinv interpolation node %d \n" % (inode+1))
array[0] = jinv[inode].item()
np.savetxt(f,array)
update_progress("Writing jinv to file ", i/(nnodes_ie))
i += 1
f.close()
#
# Grad1
#
f = open("grad1.txt","w")
f.write("Grad1 \n")
array = np.zeros([nnodes_ie,nterms_s])
i = 1
for inode in range (0,nnodes_ie):
for iterm in range(0,nterms_s):
array[inode,iterm] = grad1[inode,iterm].item()
update_progress("Writing grad1 to file ", i/(nnodes_ie*nterms_s))
i += 1
np.savetxt(f,array)
f.close()
#
# Grad2
#
f = open("grad2.txt","w")
f.write("Grad2 \n")
array = np.zeros([nnodes_ie,nterms_s])
i = 1
for inode in range (0,nnodes_ie):
for iterm in range(0,nterms_s):
array[inode,iterm] = grad2[inode,iterm].item()
update_progress("Writing grad2 to file ", i/(nnodes_ie*nterms_s))
i += 1
np.savetxt(f,array)
f.close()
#
# Grad3
#
f = open("grad3.txt","w")
f.write("Grad3 \n")
array = np.zeros([nnodes_ie,nterms_s])
i = 1
for inode in range (0,nnodes_ie):
for iterm in range(0,nterms_s):
array[inode,iterm] = grad3[inode,iterm].item()
update_progress("Writing grad3 to file ", i/(nnodes_ie*nterms_s))
i += 1
np.savetxt(f,array)
f.close()
#
# dmetric_dx
#
f = open("dmetric_dx.txt","w")
i = 1
for inode in range (0,nnodes_ie):
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
array = np.zeros([3,3])
f.write("dmetric_dx interpolation node %s, diff_node %s, diff_dir %s \n" % (inode+1,inode_diff+1,idir+1))
for irow in range(0,3):
for icol in range(0,3):
data = metrics[irow,icol,inode]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dmetric_dx to file ", i/(nnodes_ie*nnodes_r*ndirs*3*3))
# This avoid to accumulate derivatives
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
#
# interp_coords_dx
#
f = open("dinterp_xcoords_dx.txt","w")
i = 1
f.write("xcoord interpolation, coord 1, row=node, col=nnodes_r*dir \n")
array = np.zeros([nnodes_ie,nnodes_r*ndirs])
for inode in range (0,nnodes_ie):
data = interp_coords[inode,0]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
if idir == 0:
index = inode_diff
elif idir == 1:
index = nnodes_r + inode_diff
elif idir == 2:
index = 2*nnodes_r + inode_diff
array[inode,index] = ddata_np[inode_diff,idir]
update_progress("Writing interp_xcoords_dx to file ", i/(nnodes_ie*nnodes_r*3))
i += 1
# This avoid to accumulate derivatives
dummy = coords.grad.data.zero_()
np.savetxt(f,array)
f.close()
f = open("dinterp_ycoords_dx.txt","w")
i = 1
f.write("ycoord interpolation, coord 2, row=node, col=nnodes_r*dir \n")
array = np.zeros([nnodes_ie,nnodes_r*ndirs])
for inode in range (0,nnodes_ie):
data = interp_coords[inode,1]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
if idir == 0:
index = inode_diff
elif idir == 1:
index = nnodes_r + inode_diff
elif idir == 2:
index = 2*nnodes_r + inode_diff
array[inode,index] = ddata_np[inode_diff,idir]
update_progress("Writing interp_ycoords_dx to file ", i/(nnodes_ie*nnodes_r*3))
i += 1
# This avoid to accumulate derivatives
dummy = coords.grad.data.zero_()
np.savetxt(f,array)
f.close()
f = open("dinterp_zcoords_dx.txt","w")
i = 1
f.write("zcoord interpolation, coord 3, row=node, col=nnodes_r*dir \n")
array = np.zeros([nnodes_ie,nnodes_r*ndirs])
for inode in range (0,nnodes_ie):
data = interp_coords[inode,2]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
if idir == 0:
index = inode_diff
elif idir == 1:
index = nnodes_r + inode_diff
elif idir == 2:
index = 2*nnodes_r + inode_diff
array[inode,index] = ddata_np[inode_diff,idir]
update_progress("Writing interp_zcoords_dx to file ", i/(nnodes_ie*nnodes_r*3))
i += 1
# This avoid to accumulate derivatives
dummy = coords.grad.data.zero_()
np.savetxt(f,array)
f.close()
#
# djinv_dx
#
f = open("djinv_dx.txt","w")
i = 1
for inode in range (0,nnodes_ie):
array = np.zeros([nnodes_r,ndirs])
f.write("djinv_dx interpolation node %s, row=inode_diff, col=dir \n" % (inode+1))
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
data = jinv[inode]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[inode_diff,idir] = ddata_np[inode_diff,idir]
update_progress("Writing djinv_dx to file ", i/(nnodes_ie*nnodes_r*ndirs))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
#
# dmass_dx
#
f = open("dmass_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dmass_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nterms_s,nterms_s])
for irow in range(0,nterms_s):
for icol in range(0,nterms_s):
data = mass[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dmass_dx to file ", i/(nterms_s*nnodes_r*ndirs*nterms_s))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
#
# dinvmass_dx
#
f = open("dinvmass_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dinvmass_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nterms_s,nterms_s])
for irow in range(0,nterms_s):
for icol in range(0,nterms_s):
data = invmass[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dinvmass_dx to file ", i/(nterms_s*nnodes_r*ndirs*nterms_s))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
#
# dbr2_vol_dx
#
#
f = open("dbr2_vol_face1_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_vol_face1_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_ie,nnodes_if])
for irow in range(0,nnodes_ie):
for icol in range(0,nnodes_if):
data = br2_vol_face1[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_vol_face1_dx to file ", i/(nnodes_ie*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
f = open("dbr2_vol_face2_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_vol_face2_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_ie,nnodes_if])
for irow in range(0,nnodes_ie):
for icol in range(0,nnodes_if):
data = br2_vol_face2[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_vol_face2_dx to file ", i/(nnodes_ie*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
f = open("dbr2_vol_face3_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_vol_face3_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_ie,nnodes_if])
for irow in range(0,nnodes_ie):
for icol in range(0,nnodes_if):
data = br2_vol_face3[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_vol_face3_dx to file ", i/(nnodes_ie*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
f = open("dbr2_vol_face4_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_vol_face4_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_ie,nnodes_if])
for irow in range(0,nnodes_ie):
for icol in range(0,nnodes_if):
data = br2_vol_face4[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_vol_face4_dx to file ", i/(nnodes_ie*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
f = open("dbr2_vol_face5_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_vol_face5_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_ie,nnodes_if])
for irow in range(0,nnodes_ie):
for icol in range(0,nnodes_if):
data = br2_vol_face5[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_vol_face5_dx to file ", i/(nnodes_ie*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
f = open("dbr2_vol_face6_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_vol_face6_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_ie,nnodes_if])
for irow in range(0,nnodes_ie):
for icol in range(0,nnodes_if):
data = br2_vol_face6[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_vol_face6_dx to file ", i/(nnodes_ie*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
#
# dbr2_face_dx
#
#
f = open("dbr2_face_face1_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_face_face1_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_if,nnodes_if])
for irow in range(0,nnodes_if):
for icol in range(0,nnodes_if):
data = br2_face_face1[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_face_face1_dx to file ", i/(nnodes_if*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
f = open("dbr2_face_face2_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_face_face2_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_if,nnodes_if])
for irow in range(0,nnodes_if):
for icol in range(0,nnodes_if):
data = br2_face_face2[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_face_face2_dx to file ", i/(nnodes_if*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
f = open("dbr2_face_face3_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_face_face3_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_if,nnodes_if])
for irow in range(0,nnodes_if):
for icol in range(0,nnodes_if):
data = br2_face_face3[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_face_face3_dx to file ", i/(nnodes_if*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
f = open("dbr2_face_face4_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_face_face4_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_if,nnodes_if])
for irow in range(0,nnodes_if):
for icol in range(0,nnodes_if):
data = br2_face_face4[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_face_face4_dx to file ", i/(nnodes_if*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
f = open("dbr2_face_face5_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_face_face5_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_if,nnodes_if])
for irow in range(0,nnodes_if):
for icol in range(0,nnodes_if):
data = br2_face_face5[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_face_face5_dx to file ", i/(nnodes_if*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
f = open("dbr2_face_face6_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dbr2_face_face6_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_if,nnodes_if])
for irow in range(0,nnodes_if):
for icol in range(0,nnodes_if):
data = br2_face_face6[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dbr2_face_face6_dx to file ", i/(nnodes_if*nnodes_r*ndirs*nnodes_if))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
#
# dgrad1_dx
#
f = open("dgrad1_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dgrad1_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_ie,nterms_s])
for irow in range(0,nnodes_ie):
for icol in range(0,nterms_s):
data = grad1[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dgrad1_dx to file ", i/(nnodes_ie*nnodes_r*ndirs*nterms_s))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
#
# dgrad2_dx
#
f = open("dgrad2_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dgrad2_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_ie,nterms_s])
for irow in range(0,nnodes_ie):
for icol in range(0,nterms_s):
data = grad2[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dgrad2_dx to file ", i/(nnodes_ie*nnodes_r*ndirs*nterms_s))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
#
# dgrad3_dx
#
f = open("dgrad3_dx.txt","w")
i = 1
for inode_diff in range(0,nnodes_r):
for idir in range(0,ndirs):
f.write("dgrad3_dx => diff_node %s, diff_dir %s \n" % (inode_diff+1,idir+1))
array = np.zeros([nnodes_ie,nterms_s])
for irow in range(0,nnodes_ie):
for icol in range(0,nterms_s):
data = grad3[irow,icol]
data.backward(retain_graph=True)
ddata = coords.grad
ddata_np = ddata.numpy()
array[irow,icol] = ddata_np[inode_diff,idir]
update_progress("Writing dgrad3_dx to file ", i/(nnodes_ie*nnodes_r*ndirs*nterms_s))
dummy = coords.grad.data.zero_()
i += 1
np.savetxt(f,array)
f.close()
| [((1336, 1402), 'torch.tensor', 'torch.tensor', (['coords_data'], {'requires_grad': '(True)', 'dtype': 'torch.float64'}), '(coords_data, requires_grad=True, dtype=torch.float64)\n', (1348, 1402), False, 'import torch\n'), ((2061, 2127), 'torch.tensor', 'torch.tensor', (['val_r_data'], {'requires_grad': '(False)', 'dtype': 'torch.float64'}), '(val_r_data, requires_grad=False, dtype=torch.float64)\n', (2073, 2127), False, 'import torch\n'), ((3249, 3315), 'torch.tensor', 'torch.tensor', (['val_i_data'], {'requires_grad': '(False)', 'dtype': 'torch.float64'}), '(val_i_data, requires_grad=False, dtype=torch.float64)\n', (3261, 3315), False, 'import torch\n'), ((4023, 4090), 'torch.tensor', 'torch.tensor', (['ddxi_i_data'], {'requires_grad': '(False)', 'dtype': 'torch.float64'}), '(ddxi_i_data, requires_grad=False, dtype=torch.float64)\n', (4035, 4090), False, 'import torch\n'), ((4798, 4866), 'torch.tensor', 'torch.tensor', (['ddeta_i_data'], {'requires_grad': '(False)', 'dtype': 'torch.float64'}), '(ddeta_i_data, requires_grad=False, dtype=torch.float64)\n', (4810, 4866), False, 'import torch\n'), ((5575, 5644), 'torch.tensor', 'torch.tensor', (['ddzeta_i_data'], {'requires_grad': '(False)', 'dtype': 'torch.float64'}), '(ddzeta_i_data, requires_grad=False, dtype=torch.float64)\n', (5587, 5644), False, 'import torch\n'), ((5772, 5842), 'torch.tensor', 'torch.tensor', (['weights_e_data'], {'requires_grad': '(False)', 'dtype': 'torch.float64'}), '(weights_e_data, requires_grad=False, dtype=torch.float64)\n', (5784, 5842), False, 'import torch\n'), ((6387, 6453), 'torch.tensor', 'torch.tensor', (['val_1_data'], {'requires_grad': '(False)', 'dtype': 'torch.float64'}), '(val_1_data, requires_grad=False, dtype=torch.float64)\n', (6399, 6453), False, 'import torch\n'), ((6969, 7035), 'torch.tensor', 'torch.tensor', (['val_2_data'], {'requires_grad': '(False)', 'dtype': 'torch.float64'}), '(val_2_data, requires_grad=False, dtype=torch.float64)\n', (6981, 7035), False, 'import torch\n'), ((7556, 7622), 'torch.tensor', 'torch.tensor', (['val_3_data'], {'requires_grad': '(False)', 'dtype': 'torch.float64'}), '(val_3_data, requires_grad=False, dtype=torch.float64)\n', (7568, 7622), False, 'import torch\n'), ((8138, 8204), 'torch.tensor', 'torch.tensor', (['val_4_data'], {'requires_grad': '(False)', 'dtype': 'torch.float64'}), '(val_4_data, requires_grad=False, dtype=torch.float64)\n', (8150, 8204), False, 'import torch\n'), ((8725, 8791), 'torch.tensor', 'torch.tensor', (['val_5_data'], {'requires_grad': '(False)', 'dtype': 'torch.float64'}), '(val_5_data, requires_grad=False, dtype=torch.float64)\n', (8737, 8791), False, 'import torch\n'), ((9308, 9374), 'torch.tensor', 'torch.tensor', (['val_6_data'], {'requires_grad': '(False)', 'dtype': 'torch.float64'}), '(val_6_data, requires_grad=False, dtype=torch.float64)\n', (9320, 9374), False, 'import torch\n'), ((9485, 9505), 'torch.inverse', 'torch.inverse', (['val_r'], {}), '(val_r)\n', (9498, 9505), False, 'import torch\n'), ((9552, 9579), 'torch.mm', 'torch.mm', (['val_r_inv', 'coords'], {}), '(val_r_inv, coords)\n', (9560, 9579), False, 'import torch\n'), ((9623, 9652), 'torch.mm', 'torch.mm', (['val_i', 'coords_modes'], {}), '(val_i, coords_modes)\n', (9631, 9652), False, 'import torch\n'), ((9688, 9737), 'torch.empty', 'torch.empty', (['(3)', '(3)', 'nnodes_ie'], {'dtype': 'torch.float64'}), '(3, 3, nnodes_ie, dtype=torch.float64)\n', (9699, 9737), False, 'import torch\n'), ((10909, 10958), 'torch.empty', 'torch.empty', (['(3)', '(3)', 'nnodes_ie'], {'dtype': 'torch.float64'}), '(3, 3, nnodes_ie, dtype=torch.float64)\n', (10920, 10958), False, 'import torch\n'), ((10968, 11011), 'torch.empty', 'torch.empty', (['nnodes_ie'], {'dtype': 'torch.float64'}), '(nnodes_ie, dtype=torch.float64)\n', (10979, 11011), False, 'import torch\n'), ((11639, 11702), 'torch.empty', 'torch.empty', (['nterms_s', 'nterms_s', 'nnodes_ie'], {'dtype': 'torch.float64'}), '(nterms_s, nterms_s, nnodes_ie, dtype=torch.float64)\n', (11650, 11702), False, 'import torch\n'), ((11711, 11774), 'torch.empty', 'torch.empty', (['nterms_s', 'nterms_s', 'nnodes_ie'], {'dtype': 'torch.float64'}), '(nterms_s, nterms_s, nnodes_ie, dtype=torch.float64)\n', (11722, 11774), False, 'import torch\n'), ((11783, 11836), 'torch.empty', 'torch.empty', (['nterms_s', 'nnodes_ie'], {'dtype': 'torch.float64'}), '(nterms_s, nnodes_ie, dtype=torch.float64)\n', (11794, 11836), False, 'import torch\n'), ((12149, 12168), 'torch.inverse', 'torch.inverse', (['mass'], {}), '(mass)\n', (12162, 12168), False, 'import torch\n'), ((13176, 13229), 'torch.empty', 'torch.empty', (['nnodes_ie', 'nterms_s'], {'dtype': 'torch.float64'}), '(nnodes_ie, nterms_s, dtype=torch.float64)\n', (13187, 13229), False, 'import torch\n'), ((13237, 13290), 'torch.empty', 'torch.empty', (['nnodes_ie', 'nterms_s'], {'dtype': 'torch.float64'}), '(nnodes_ie, nterms_s, dtype=torch.float64)\n', (13248, 13290), False, 'import torch\n'), ((13298, 13351), 'torch.empty', 'torch.empty', (['nnodes_ie', 'nterms_s'], {'dtype': 'torch.float64'}), '(nnodes_ie, nterms_s, dtype=torch.float64)\n', (13309, 13351), False, 'import torch\n'), ((14520, 14533), 'numpy.zeros', 'np.zeros', (['[1]'], {}), '([1])\n', (14528, 14533), True, 'import numpy as np\n'), ((14851, 14882), 'numpy.zeros', 'np.zeros', (['[nnodes_ie, nterms_s]'], {}), '([nnodes_ie, nterms_s])\n', (14859, 14882), True, 'import numpy as np\n'), ((15115, 15135), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (15125, 15135), True, 'import numpy as np\n'), ((15212, 15243), 'numpy.zeros', 'np.zeros', (['[nnodes_ie, nterms_s]'], {}), '([nnodes_ie, nterms_s])\n', (15220, 15243), True, 'import numpy as np\n'), ((15476, 15496), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (15486, 15496), True, 'import numpy as np\n'), ((15573, 15604), 'numpy.zeros', 'np.zeros', (['[nnodes_ie, nterms_s]'], {}), '([nnodes_ie, nterms_s])\n', (15581, 15604), True, 'import numpy as np\n'), ((15837, 15857), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (15847, 15857), True, 'import numpy as np\n'), ((16958, 16997), 'numpy.zeros', 'np.zeros', (['[nnodes_ie, nnodes_r * ndirs]'], {}), '([nnodes_ie, nnodes_r * ndirs])\n', (16966, 16997), True, 'import numpy as np\n'), ((17692, 17712), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (17702, 17712), True, 'import numpy as np\n'), ((17848, 17887), 'numpy.zeros', 'np.zeros', (['[nnodes_ie, nnodes_r * ndirs]'], {}), '([nnodes_ie, nnodes_r * ndirs])\n', (17856, 17887), True, 'import numpy as np\n'), ((18582, 18602), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (18592, 18602), True, 'import numpy as np\n'), ((18738, 18777), 'numpy.zeros', 'np.zeros', (['[nnodes_ie, nnodes_r * ndirs]'], {}), '([nnodes_ie, nnodes_r * ndirs])\n', (18746, 18777), True, 'import numpy as np\n'), ((19472, 19492), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (19482, 19492), True, 'import numpy as np\n'), ((577, 598), 'sys.stdout.write', 'sys.stdout.write', (['msg'], {}), '(msg)\n', (593, 598), False, 'import sys\n'), ((603, 621), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (619, 621), False, 'import sys\n'), ((638, 686), 'os.system', 'os.system', (["('cls' if os.name == 'nt' else 'clear')"], {}), "('cls' if os.name == 'nt' else 'clear')\n", (647, 686), False, 'import os\n'), ((9796, 9847), 'torch.dot', 'torch.dot', (['ddxi_i[(inode), :]', 'coords_modes[:, (0)]'], {}), '(ddxi_i[(inode), :], coords_modes[:, (0)])\n', (9805, 9847), False, 'import torch\n'), ((9871, 9923), 'torch.dot', 'torch.dot', (['ddeta_i[(inode), :]', 'coords_modes[:, (0)]'], {}), '(ddeta_i[(inode), :], coords_modes[:, (0)])\n', (9880, 9923), False, 'import torch\n'), ((9946, 9999), 'torch.dot', 'torch.dot', (['ddzeta_i[(inode), :]', 'coords_modes[:, (0)]'], {}), '(ddzeta_i[(inode), :], coords_modes[:, (0)])\n', (9955, 9999), False, 'import torch\n'), ((10021, 10072), 'torch.dot', 'torch.dot', (['ddxi_i[(inode), :]', 'coords_modes[:, (1)]'], {}), '(ddxi_i[(inode), :], coords_modes[:, (1)])\n', (10030, 10072), False, 'import torch\n'), ((10096, 10148), 'torch.dot', 'torch.dot', (['ddeta_i[(inode), :]', 'coords_modes[:, (1)]'], {}), '(ddeta_i[(inode), :], coords_modes[:, (1)])\n', (10105, 10148), False, 'import torch\n'), ((10171, 10224), 'torch.dot', 'torch.dot', (['ddzeta_i[(inode), :]', 'coords_modes[:, (1)]'], {}), '(ddzeta_i[(inode), :], coords_modes[:, (1)])\n', (10180, 10224), False, 'import torch\n'), ((10246, 10297), 'torch.dot', 'torch.dot', (['ddxi_i[(inode), :]', 'coords_modes[:, (2)]'], {}), '(ddxi_i[(inode), :], coords_modes[:, (2)])\n', (10255, 10297), False, 'import torch\n'), ((10321, 10373), 'torch.dot', 'torch.dot', (['ddeta_i[(inode), :]', 'coords_modes[:, (2)]'], {}), '(ddeta_i[(inode), :], coords_modes[:, (2)])\n', (10330, 10373), False, 'import torch\n'), ((10396, 10449), 'torch.dot', 'torch.dot', (['ddzeta_i[(inode), :]', 'coords_modes[:, (2)]'], {}), '(ddzeta_i[(inode), :], coords_modes[:, (2)])\n', (10405, 10449), False, 'import torch\n'), ((10577, 10614), 'torch.mm', 'torch.mm', (['val_i', 'coords_modes[:, (0)]'], {}), '(val_i, coords_modes[:, (0)])\n', (10585, 10614), False, 'import torch\n'), ((11062, 11100), 'torch.empty', 'torch.empty', (['(3)', '(3)'], {'dtype': 'torch.float64'}), '(3, 3, dtype=torch.float64)\n', (11073, 11100), False, 'import torch\n'), ((11116, 11154), 'torch.empty', 'torch.empty', (['(3)', '(3)'], {'dtype': 'torch.float64'}), '(3, 3, dtype=torch.float64)\n', (11127, 11154), False, 'import torch\n'), ((11413, 11433), 'torch.det', 'torch.det', (['ijacobian'], {}), '(ijacobian)\n', (11422, 11433), False, 'import torch\n'), ((11453, 11477), 'torch.inverse', 'torch.inverse', (['ijacobian'], {}), '(ijacobian)\n', (11466, 11477), False, 'import torch\n'), ((12115, 12131), 'torch.t', 'torch.t', (['val_tmp'], {}), '(val_tmp)\n', (12122, 12131), False, 'import torch\n'), ((14197, 14213), 'numpy.zeros', 'np.zeros', (['[3, 3]'], {}), '([3, 3])\n', (14205, 14213), True, 'import numpy as np\n'), ((14445, 14465), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (14455, 14465), True, 'import numpy as np\n'), ((14669, 14689), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (14679, 14689), True, 'import numpy as np\n'), ((19599, 19626), 'numpy.zeros', 'np.zeros', (['[nnodes_r, ndirs]'], {}), '([nnodes_r, ndirs])\n', (19607, 19626), True, 'import numpy as np\n'), ((20194, 20214), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (20204, 20214), True, 'import numpy as np\n'), ((2379, 2397), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2386, 2397), True, 'import numpy as np\n'), ((2472, 2490), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2479, 2490), True, 'import numpy as np\n'), ((2601, 2619), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2608, 2619), True, 'import numpy as np\n'), ((2637, 2655), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2644, 2655), True, 'import numpy as np\n'), ((2748, 2766), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2755, 2766), True, 'import numpy as np\n'), ((2877, 2895), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2884, 2895), True, 'import numpy as np\n'), ((2895, 2913), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2902, 2913), True, 'import numpy as np\n'), ((2988, 3006), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2995, 3006), True, 'import numpy as np\n'), ((3006, 3024), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3013, 3024), True, 'import numpy as np\n'), ((3117, 3135), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3124, 3135), True, 'import numpy as np\n'), ((3135, 3153), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3142, 3153), True, 'import numpy as np\n'), ((3153, 3171), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3160, 3171), True, 'import numpy as np\n'), ((3521, 3539), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3528, 3539), True, 'import numpy as np\n'), ((3606, 3624), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3613, 3624), True, 'import numpy as np\n'), ((3709, 3727), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3716, 3727), True, 'import numpy as np\n'), ((3794, 3812), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3801, 3812), True, 'import numpy as np\n'), ((3861, 3879), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3868, 3879), True, 'import numpy as np\n'), ((3879, 3897), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3886, 3897), True, 'import numpy as np\n'), ((3946, 3964), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3953, 3964), True, 'import numpy as np\n'), ((3964, 3982), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3971, 3982), True, 'import numpy as np\n'), ((4210, 4228), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (4217, 4228), True, 'import numpy as np\n'), ((4380, 4398), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (4387, 4398), True, 'import numpy as np\n'), ((4487, 4505), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (4494, 4505), True, 'import numpy as np\n'), ((4550, 4568), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (4557, 4568), True, 'import numpy as np\n'), ((4572, 4590), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (4579, 4590), True, 'import numpy as np\n'), ((4657, 4675), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (4664, 4675), True, 'import numpy as np\n'), ((4720, 4738), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (4727, 4738), True, 'import numpy as np\n'), ((4742, 4760), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (4749, 4760), True, 'import numpy as np\n'), ((4990, 5008), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (4997, 5008), True, 'import numpy as np\n'), ((5093, 5111), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (5100, 5111), True, 'import numpy as np\n'), ((5160, 5178), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (5167, 5178), True, 'import numpy as np\n'), ((5178, 5196), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (5185, 5196), True, 'import numpy as np\n'), ((5330, 5348), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (5337, 5348), True, 'import numpy as np\n'), ((5433, 5451), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (5440, 5451), True, 'import numpy as np\n'), ((5500, 5518), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (5507, 5518), True, 'import numpy as np\n'), ((5518, 5536), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (5525, 5536), True, 'import numpy as np\n'), ((5953, 5971), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (5960, 5971), True, 'import numpy as np\n'), ((5971, 5989), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (5978, 5989), True, 'import numpy as np\n'), ((6030, 6048), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6037, 6048), True, 'import numpy as np\n'), ((6089, 6107), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6096, 6107), True, 'import numpy as np\n'), ((6166, 6184), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6173, 6184), True, 'import numpy as np\n'), ((6189, 6207), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6196, 6207), True, 'import numpy as np\n'), ((6266, 6284), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6273, 6284), True, 'import numpy as np\n'), ((6284, 6302), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6291, 6302), True, 'import numpy as np\n'), ((6611, 6629), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6618, 6629), True, 'import numpy as np\n'), ((6651, 6669), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6658, 6669), True, 'import numpy as np\n'), ((6747, 6765), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6754, 6765), True, 'import numpy as np\n'), ((6787, 6805), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6794, 6805), True, 'import numpy as np\n'), ((6847, 6865), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6854, 6865), True, 'import numpy as np\n'), ((6865, 6883), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6872, 6883), True, 'import numpy as np\n'), ((6887, 6905), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6894, 6905), True, 'import numpy as np\n'), ((6905, 6923), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6912, 6923), True, 'import numpy as np\n'), ((7117, 7135), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7124, 7135), True, 'import numpy as np\n'), ((7144, 7162), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7151, 7162), True, 'import numpy as np\n'), ((7218, 7236), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7225, 7236), True, 'import numpy as np\n'), ((7263, 7281), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7270, 7281), True, 'import numpy as np\n'), ((7319, 7337), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7326, 7337), True, 'import numpy as np\n'), ((7355, 7373), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7362, 7373), True, 'import numpy as np\n'), ((7438, 7456), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7445, 7456), True, 'import numpy as np\n'), ((7456, 7474), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7463, 7474), True, 'import numpy as np\n'), ((7803, 7821), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7810, 7821), True, 'import numpy as np\n'), ((7821, 7839), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7828, 7839), True, 'import numpy as np\n'), ((7903, 7921), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7910, 7921), True, 'import numpy as np\n'), ((7966, 7984), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7973, 7984), True, 'import numpy as np\n'), ((8021, 8039), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8028, 8039), True, 'import numpy as np\n'), ((8039, 8057), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8046, 8057), True, 'import numpy as np\n'), ((8057, 8075), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8064, 8075), True, 'import numpy as np\n'), ((8084, 8102), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8091, 8102), True, 'import numpy as np\n'), ((8296, 8314), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8303, 8314), True, 'import numpy as np\n'), ((8314, 8332), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8321, 8332), True, 'import numpy as np\n'), ((8388, 8406), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8395, 8406), True, 'import numpy as np\n'), ((8433, 8451), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8440, 8451), True, 'import numpy as np\n'), ((8484, 8502), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8491, 8502), True, 'import numpy as np\n'), ((8534, 8552), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8541, 8552), True, 'import numpy as np\n'), ((8603, 8621), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8610, 8621), True, 'import numpy as np\n'), ((8626, 8644), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8633, 8644), True, 'import numpy as np\n'), ((8973, 8991), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8980, 8991), True, 'import numpy as np\n'), ((9000, 9018), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (9007, 9018), True, 'import numpy as np\n'), ((9069, 9087), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (9076, 9087), True, 'import numpy as np\n'), ((9136, 9154), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (9143, 9154), True, 'import numpy as np\n'), ((9187, 9205), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (9194, 9205), True, 'import numpy as np\n'), ((9209, 9227), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (9216, 9227), True, 'import numpy as np\n'), ((9236, 9254), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (9243, 9254), True, 'import numpy as np\n'), ((9254, 9272), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (9261, 9272), True, 'import numpy as np\n'), ((12251, 12265), 'torch.t', 'torch.t', (['val_1'], {}), '(val_1)\n', (12258, 12265), False, 'import torch\n'), ((12316, 12330), 'torch.t', 'torch.t', (['val_2'], {}), '(val_2)\n', (12323, 12330), False, 'import torch\n'), ((12381, 12395), 'torch.t', 'torch.t', (['val_3'], {}), '(val_3)\n', (12388, 12395), False, 'import torch\n'), ((12446, 12460), 'torch.t', 'torch.t', (['val_4'], {}), '(val_4)\n', (12453, 12460), False, 'import torch\n'), ((12511, 12525), 'torch.t', 'torch.t', (['val_5'], {}), '(val_5)\n', (12518, 12525), False, 'import torch\n'), ((12576, 12590), 'torch.t', 'torch.t', (['val_6'], {}), '(val_6)\n', (12583, 12590), False, 'import torch\n'), ((12735, 12749), 'torch.t', 'torch.t', (['val_1'], {}), '(val_1)\n', (12742, 12749), False, 'import torch\n'), ((12801, 12815), 'torch.t', 'torch.t', (['val_2'], {}), '(val_2)\n', (12808, 12815), False, 'import torch\n'), ((12867, 12881), 'torch.t', 'torch.t', (['val_3'], {}), '(val_3)\n', (12874, 12881), False, 'import torch\n'), ((12933, 12947), 'torch.t', 'torch.t', (['val_4'], {}), '(val_4)\n', (12940, 12947), False, 'import torch\n'), ((12999, 13013), 'torch.t', 'torch.t', (['val_5'], {}), '(val_5)\n', (13006, 13013), False, 'import torch\n'), ((13065, 13079), 'torch.t', 'torch.t', (['val_6'], {}), '(val_6)\n', (13072, 13079), False, 'import torch\n'), ((20445, 20475), 'numpy.zeros', 'np.zeros', (['[nterms_s, nterms_s]'], {}), '([nterms_s, nterms_s])\n', (20453, 20475), True, 'import numpy as np\n'), ((20972, 20992), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (20982, 20992), True, 'import numpy as np\n'), ((21232, 21262), 'numpy.zeros', 'np.zeros', (['[nterms_s, nterms_s]'], {}), '([nterms_s, nterms_s])\n', (21240, 21262), True, 'import numpy as np\n'), ((21762, 21782), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (21772, 21782), True, 'import numpy as np\n'), ((22036, 22068), 'numpy.zeros', 'np.zeros', (['[nnodes_ie, nnodes_if]'], {}), '([nnodes_ie, nnodes_if])\n', (22044, 22068), True, 'import numpy as np\n'), ((22578, 22598), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (22588, 22598), True, 'import numpy as np\n'), ((22830, 22862), 'numpy.zeros', 'np.zeros', (['[nnodes_ie, nnodes_if]'], {}), '([nnodes_ie, nnodes_if])\n', (22838, 22862), True, 'import numpy as np\n'), ((23372, 23392), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (23382, 23392), True, 'import numpy as np\n'), ((23624, 23656), 'numpy.zeros', 'np.zeros', (['[nnodes_ie, nnodes_if]'], {}), '([nnodes_ie, nnodes_if])\n', (23632, 23656), True, 'import numpy as np\n'), ((24166, 24186), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (24176, 24186), True, 'import numpy as np\n'), ((24418, 24450), 'numpy.zeros', 'np.zeros', (['[nnodes_ie, nnodes_if]'], {}), '([nnodes_ie, nnodes_if])\n', (24426, 24450), True, 'import numpy as np\n'), ((24960, 24980), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (24970, 24980), True, 'import numpy as np\n'), ((25212, 25244), 'numpy.zeros', 'np.zeros', (['[nnodes_ie, nnodes_if]'], {}), '([nnodes_ie, nnodes_if])\n', (25220, 25244), True, 'import numpy as np\n'), ((25754, 25774), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (25764, 25774), True, 'import numpy as np\n'), ((26006, 26038), 'numpy.zeros', 'np.zeros', (['[nnodes_ie, nnodes_if]'], {}), '([nnodes_ie, nnodes_if])\n', (26014, 26038), True, 'import numpy as np\n'), ((26548, 26568), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (26558, 26568), True, 'import numpy as np\n'), ((26825, 26857), 'numpy.zeros', 'np.zeros', (['[nnodes_if, nnodes_if]'], {}), '([nnodes_if, nnodes_if])\n', (26833, 26857), True, 'import numpy as np\n'), ((27368, 27388), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (27378, 27388), True, 'import numpy as np\n'), ((27622, 27654), 'numpy.zeros', 'np.zeros', (['[nnodes_if, nnodes_if]'], {}), '([nnodes_if, nnodes_if])\n', (27630, 27654), True, 'import numpy as np\n'), ((28165, 28185), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (28175, 28185), True, 'import numpy as np\n'), ((28419, 28451), 'numpy.zeros', 'np.zeros', (['[nnodes_if, nnodes_if]'], {}), '([nnodes_if, nnodes_if])\n', (28427, 28451), True, 'import numpy as np\n'), ((28962, 28982), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (28972, 28982), True, 'import numpy as np\n'), ((29216, 29248), 'numpy.zeros', 'np.zeros', (['[nnodes_if, nnodes_if]'], {}), '([nnodes_if, nnodes_if])\n', (29224, 29248), True, 'import numpy as np\n'), ((29759, 29779), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (29769, 29779), True, 'import numpy as np\n'), ((30013, 30045), 'numpy.zeros', 'np.zeros', (['[nnodes_if, nnodes_if]'], {}), '([nnodes_if, nnodes_if])\n', (30021, 30045), True, 'import numpy as np\n'), ((30556, 30576), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (30566, 30576), True, 'import numpy as np\n'), ((30810, 30842), 'numpy.zeros', 'np.zeros', (['[nnodes_if, nnodes_if]'], {}), '([nnodes_if, nnodes_if])\n', (30818, 30842), True, 'import numpy as np\n'), ((31353, 31373), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (31363, 31373), True, 'import numpy as np\n'), ((31606, 31637), 'numpy.zeros', 'np.zeros', (['[nnodes_ie, nterms_s]'], {}), '([nnodes_ie, nterms_s])\n', (31614, 31637), True, 'import numpy as np\n'), ((32137, 32157), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (32147, 32157), True, 'import numpy as np\n'), ((32390, 32421), 'numpy.zeros', 'np.zeros', (['[nnodes_ie, nterms_s]'], {}), '([nnodes_ie, nterms_s])\n', (32398, 32421), True, 'import numpy as np\n'), ((32921, 32941), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (32931, 32941), True, 'import numpy as np\n'), ((33174, 33205), 'numpy.zeros', 'np.zeros', (['[nnodes_ie, nterms_s]'], {}), '([nnodes_ie, nterms_s])\n', (33182, 33205), True, 'import numpy as np\n'), ((33705, 33725), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (33715, 33725), True, 'import numpy as np\n'), ((2214, 2232), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2221, 2232), True, 'import numpy as np\n'), ((2232, 2250), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2239, 2250), True, 'import numpy as np\n'), ((2250, 2268), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2257, 2268), True, 'import numpy as np\n'), ((2303, 2321), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2310, 2321), True, 'import numpy as np\n'), ((2343, 2361), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2350, 2361), True, 'import numpy as np\n'), ((2361, 2379), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2368, 2379), True, 'import numpy as np\n'), ((2432, 2450), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2439, 2450), True, 'import numpy as np\n'), ((2490, 2508), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2497, 2508), True, 'import numpy as np\n'), ((2508, 2526), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2515, 2526), True, 'import numpy as np\n'), ((2561, 2579), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2568, 2579), True, 'import numpy as np\n'), ((2619, 2637), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2626, 2637), True, 'import numpy as np\n'), ((2690, 2708), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2697, 2708), True, 'import numpy as np\n'), ((2730, 2748), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2737, 2748), True, 'import numpy as np\n'), ((2766, 2784), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2773, 2784), True, 'import numpy as np\n'), ((2819, 2837), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2826, 2837), True, 'import numpy as np\n'), ((2859, 2877), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2866, 2877), True, 'import numpy as np\n'), ((2948, 2966), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (2955, 2966), True, 'import numpy as np\n'), ((3024, 3042), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3031, 3042), True, 'import numpy as np\n'), ((3077, 3095), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3084, 3095), True, 'import numpy as np\n'), ((3206, 3224), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3213, 3224), True, 'import numpy as np\n'), ((3351, 3369), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3358, 3369), True, 'import numpy as np\n'), ((3369, 3387), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3376, 3387), True, 'import numpy as np\n'), ((3436, 3454), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3443, 3454), True, 'import numpy as np\n'), ((3454, 3472), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3461, 3472), True, 'import numpy as np\n'), ((3539, 3557), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3546, 3557), True, 'import numpy as np\n'), ((3624, 3642), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3631, 3642), True, 'import numpy as np\n'), ((3691, 3709), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3698, 3709), True, 'import numpy as np\n'), ((3776, 3794), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (3783, 3794), True, 'import numpy as np\n'), ((4125, 4143), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (4132, 4143), True, 'import numpy as np\n'), ((4147, 4165), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (4154, 4165), True, 'import numpy as np\n'), ((4232, 4250), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (4239, 4250), True, 'import numpy as np\n'), ((4295, 4313), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (4302, 4313), True, 'import numpy as np\n'), ((4317, 4335), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (4324, 4335), True, 'import numpy as np\n'), ((4402, 4420), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (4409, 4420), True, 'import numpy as np\n'), ((4465, 4483), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (4472, 4483), True, 'import numpy as np\n'), ((4635, 4653), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (4642, 4653), True, 'import numpy as np\n'), ((4905, 4923), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (4912, 4923), True, 'import numpy as np\n'), ((4923, 4941), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (4930, 4941), True, 'import numpy as np\n'), ((5008, 5026), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (5015, 5026), True, 'import numpy as np\n'), ((5075, 5093), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (5082, 5093), True, 'import numpy as np\n'), ((5245, 5263), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (5252, 5263), True, 'import numpy as np\n'), ((5263, 5281), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (5270, 5281), True, 'import numpy as np\n'), ((5348, 5366), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (5355, 5366), True, 'import numpy as np\n'), ((5415, 5433), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (5422, 5433), True, 'import numpy as np\n'), ((5912, 5930), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (5919, 5930), True, 'import numpy as np\n'), ((5930, 5948), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (5937, 5948), True, 'import numpy as np\n'), ((6048, 6066), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6055, 6066), True, 'import numpy as np\n'), ((6071, 6089), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6078, 6089), True, 'import numpy as np\n'), ((6148, 6166), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6155, 6166), True, 'import numpy as np\n'), ((6207, 6225), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6214, 6225), True, 'import numpy as np\n'), ((6307, 6325), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6314, 6325), True, 'import numpy as np\n'), ((6325, 6343), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6332, 6343), True, 'import numpy as np\n'), ((6493, 6511), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6500, 6511), True, 'import numpy as np\n'), ((6511, 6529), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6518, 6529), True, 'import numpy as np\n'), ((6533, 6551), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6540, 6551), True, 'import numpy as np\n'), ((6551, 6569), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6558, 6569), True, 'import numpy as np\n'), ((6629, 6647), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6636, 6647), True, 'import numpy as np\n'), ((6669, 6687), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6676, 6687), True, 'import numpy as np\n'), ((6729, 6747), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6736, 6747), True, 'import numpy as np\n'), ((6769, 6787), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (6776, 6787), True, 'import numpy as np\n'), ((7081, 7099), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7088, 7099), True, 'import numpy as np\n'), ((7099, 7117), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7106, 7117), True, 'import numpy as np\n'), ((7200, 7218), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7207, 7218), True, 'import numpy as np\n'), ((7236, 7254), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7243, 7254), True, 'import numpy as np\n'), ((7337, 7355), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7344, 7355), True, 'import numpy as np\n'), ((7382, 7400), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7389, 7400), True, 'import numpy as np\n'), ((7474, 7492), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7481, 7492), True, 'import numpy as np\n'), ((7501, 7519), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7508, 7519), True, 'import numpy as np\n'), ((7667, 7685), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7674, 7685), True, 'import numpy as np\n'), ((7685, 7703), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7692, 7703), True, 'import numpy as np\n'), ((7703, 7721), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7710, 7721), True, 'import numpy as np\n'), ((7730, 7748), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7737, 7748), True, 'import numpy as np\n'), ((7785, 7803), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7792, 7803), True, 'import numpy as np\n'), ((7848, 7866), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7855, 7866), True, 'import numpy as np\n'), ((7921, 7939), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7928, 7939), True, 'import numpy as np\n'), ((7939, 7957), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (7946, 7957), True, 'import numpy as np\n'), ((8246, 8264), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8253, 8264), True, 'import numpy as np\n'), ((8269, 8287), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8276, 8287), True, 'import numpy as np\n'), ((8365, 8383), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8372, 8383), True, 'import numpy as np\n'), ((8415, 8433), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8422, 8433), True, 'import numpy as np\n'), ((8507, 8525), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8514, 8525), True, 'import numpy as np\n'), ((8552, 8570), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8559, 8570), True, 'import numpy as np\n'), ((8653, 8671), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8660, 8671), True, 'import numpy as np\n'), ((8671, 8689), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8678, 8689), True, 'import numpy as np\n'), ((8833, 8851), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8840, 8851), True, 'import numpy as np\n'), ((8855, 8873), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8862, 8873), True, 'import numpy as np\n'), ((8882, 8900), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8889, 8900), True, 'import numpy as np\n'), ((8900, 8918), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8907, 8918), True, 'import numpy as np\n'), ((8951, 8969), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (8958, 8969), True, 'import numpy as np\n'), ((9018, 9036), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (9025, 9036), True, 'import numpy as np\n'), ((9091, 9109), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (9098, 9109), True, 'import numpy as np\n'), ((9118, 9136), 'numpy.sqrt', 'np.sqrt', (['(1.0 / 3.0)'], {}), '(1.0 / 3.0)\n', (9125, 9136), True, 'import numpy as np\n'), ((16053, 16069), 'numpy.zeros', 'np.zeros', (['[3, 3]'], {}), '([3, 3])\n', (16061, 16069), True, 'import numpy as np\n'), ((16778, 16798), 'numpy.savetxt', 'np.savetxt', (['f', 'array'], {}), '(f, array)\n', (16788, 16798), True, 'import numpy as np\n')] |
brian-thomas/osr_stat_generator | osr_stat_generator/generator.py | 89f6a71e17c274befa3af7222a24c34a77f1f40e |
"""
OSR (LOTFP) stat generator.
"""
import random
def d(num_sides):
"""
Represents rolling a die of size 'num_sides'.
Returns random number from that size die
"""
return random.randint(1, num_sides)
def xdy(num_dice, num_sides):
""" represents rolling num_dice of size num_sides.
Returns random number from that many dice being 'rolled'.
"""
return sum(d(num_sides) for i in range(num_dice))
class LotFP_Stat (object):
def _get_bonus(attribute):
if attribute <= 3:
return -3
if attribute >= 4 and attribute <= 5:
return -2
if attribute >= 6 and attribute <= 8:
return -1
if attribute >= 13 and attribute <= 15:
return 1
if attribute >= 16 and attribute <= 17:
return 2
if attribute >= 18:
return 3
# the default
return 0
@property
def bonus(self): return self._bonus
@property
def name(self): return self._name
@property
def value(self): return self._value
def __str__(self):
return (f"%s : %s(%s)" % (self.name, self.value, self.bonus))
def __init__(self, name, value):
self._name = name
self._value = value
self._bonus = LotFP_Stat._get_bonus(value)
class Stat_Set(object):
"""
Define a package of OSR/DnD stats
"""
_Stat_Name = ["CON", "DEX", "INT", "WIS", "STR", "CHA"]
@property
def stats(self)->list:
return self._stats
def sum(self)->int:
# get a summed value for all stats in this set
ssum = 0
for s in self.stats:
ssum += s.value
return ssum
@property
def is_hopeless(self)->bool:
""" Determine if the character is so poorly stated they have
bonus sum less than 1.
"""
bonuses = [s.bonus for s in self._stats]
if sum(bonuses) < 1:
return True
return False
def __str__(self)->str:
string = ""
for stat in stats:
string = string + " " + str(stat.value) + " ("+str(stat.bonus) + ")"
return string
def __init__(self, stats):
self._stats = []
for i in range(0,len(stats)):
self._stats.append(LotFP_Stat(Stat_Set._Stat_Name[i], stats[i]))
def generate_stats (nrof_sets:int=1, no_hopeless_char:bool=True)->list:
""" Generate stats for a character.
"""
stat_sets = []
while (nrof_sets > 0):
stats = []
for i in range (0, 6):
stats.append(xdy(3,6))
stat_set = Stat_Set(stats)
# no "hopeless" characters
if no_hopeless_char and stat_set.is_hopeless:
continue
stat_sets.append(stat_set)
nrof_sets -= 1
return stat_sets
| [((198, 226), 'random.randint', 'random.randint', (['(1)', 'num_sides'], {}), '(1, num_sides)\n', (212, 226), False, 'import random\n')] |
nick6655/management-sdk-python | cohesity_management_sdk/models/health_tile.py | 88e792cb83e5c24a22af495b220c145d0c45841d | # -*- coding: utf-8 -*-
# Copyright 2021 Cohesity Inc.
import cohesity_management_sdk.models.alert
class HealthTile(object):
"""Implementation of the 'HealthTile' model.
Health for Dashboard.
Attributes:
capacity_bytes (long|int): Raw Cluster Capacity in Bytes. This is not
usable capacity and does not take replication factor into
account.
cluster_cloud_usage_bytes (long|int): Usage in Bytes on the cloud.
last_day_alerts (list of Alert): Alerts in last 24 hours.
last_day_num_criticals (long|int): Number of Critical Alerts.
last_day_num_warnings (long|int): Number of Warning Alerts.
num_nodes (int): Number of nodes in the cluster.
num_nodes_with_issues (int): Number of nodes in the cluster that are
unhealthy.
percent_full (float): Percent the cluster is full.
raw_used_bytes (long|int): Raw Bytes used in the cluster.
"""
# Create a mapping from Model property names to API property names
_names = {
"capacity_bytes":'capacityBytes',
"cluster_cloud_usage_bytes":'clusterCloudUsageBytes',
"last_day_alerts":'lastDayAlerts',
"last_day_num_criticals":'lastDayNumCriticals',
"last_day_num_warnings":'lastDayNumWarnings',
"num_nodes":'numNodes',
"num_nodes_with_issues":'numNodesWithIssues',
"percent_full":'percentFull',
"raw_used_bytes":'rawUsedBytes'
}
def __init__(self,
capacity_bytes=None,
cluster_cloud_usage_bytes=None,
last_day_alerts=None,
last_day_num_criticals=None,
last_day_num_warnings=None,
num_nodes=None,
num_nodes_with_issues=None,
percent_full=None,
raw_used_bytes=None):
"""Constructor for the HealthTile class"""
# Initialize members of the class
self.capacity_bytes = capacity_bytes
self.cluster_cloud_usage_bytes = cluster_cloud_usage_bytes
self.last_day_alerts = last_day_alerts
self.last_day_num_criticals = last_day_num_criticals
self.last_day_num_warnings = last_day_num_warnings
self.num_nodes = num_nodes
self.num_nodes_with_issues = num_nodes_with_issues
self.percent_full = percent_full
self.raw_used_bytes = raw_used_bytes
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
capacity_bytes = dictionary.get('capacityBytes')
cluster_cloud_usage_bytes = dictionary.get('clusterCloudUsageBytes')
last_day_alerts = None
if dictionary.get('lastDayAlerts') != None:
last_day_alerts = list()
for structure in dictionary.get('lastDayAlerts'):
last_day_alerts.append(cohesity_management_sdk.models.alert.Alert.from_dictionary(structure))
last_day_num_criticals = dictionary.get('lastDayNumCriticals')
last_day_num_warnings = dictionary.get('lastDayNumWarnings')
num_nodes = dictionary.get('numNodes')
num_nodes_with_issues = dictionary.get('numNodesWithIssues')
percent_full = dictionary.get('percentFull')
raw_used_bytes = dictionary.get('rawUsedBytes')
# Return an object of this model
return cls(capacity_bytes,
cluster_cloud_usage_bytes,
last_day_alerts,
last_day_num_criticals,
last_day_num_warnings,
num_nodes,
num_nodes_with_issues,
percent_full,
raw_used_bytes)
| [] |
nihanjali/PageRank | TextRank/textrank.py | baea9d89fb962fd1311a61127123bf36d9d2dd38 | import os
import sys
import copy
import collections
import nltk
import nltk.tokenize
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
import pagerank
'''
textrank.py
-----------
This module implements TextRank, an unsupervised keyword
significance scoring algorithm. TextRank builds a weighted
graph representation of a document using words as nodes
and coocurrence frequencies between pairs of words as edge
weights. It then applies PageRank to this graph, and
treats the PageRank score of each word as its significance.
The original research paper proposing this algorithm is
available here:
https://web.eecs.umich.edu/~mihalcea/papers/mihalcea.emnlp04.pdf
'''
## TextRank #####################################################################################
def __preprocessDocument(document, relevantPosTags):
'''
This function accepts a string representation
of a document as input, and returns a tokenized
list of words corresponding to that document.
'''
words = __tokenizeWords(document)
posTags = __tagPartsOfSpeech(words)
# Filter out words with irrelevant POS tags
filteredWords = []
for index, word in enumerate(words):
word = word.lower()
tag = posTags[index]
if not __isPunctuation(word) and tag in relevantPosTags:
filteredWords.append(word)
return filteredWords
def textrank(document, windowSize=2, rsp=0.15, relevantPosTags=["NN", "ADJ"]):
'''
This function accepts a string representation
of a document and three hyperperameters as input.
It returns Pandas matrix (that can be treated
as a dictionary) that maps words in the
document to their associated TextRank significance
scores. Note that only words that are classified
as having relevant POS tags are present in the
map.
'''
# Tokenize document:
words = __preprocessDocument(document, relevantPosTags)
# Build a weighted graph where nodes are words and
# edge weights are the number of times words cooccur
# within a window of predetermined size. In doing so
# we double count each coocurrence, but that will not
# alter relative weights which ultimately determine
# TextRank scores.
edgeWeights = collections.defaultdict(lambda: collections.Counter())
for index, word in enumerate(words):
for otherIndex in range(index - windowSize, index + windowSize + 1):
if otherIndex >= 0 and otherIndex < len(words) and otherIndex != index:
otherWord = words[otherIndex]
edgeWeights[word][otherWord] += 1.0
# Apply PageRank to the weighted graph:
wordProbabilities = pagerank.powerIteration(edgeWeights, rsp=rsp)
wordProbabilities.sort_values(ascending=False)
return wordProbabilities
## NLP utilities ################################################################################
def __asciiOnly(string):
return "".join([char if ord(char) < 128 else "" for char in string])
def __isPunctuation(word):
return word in [".", "?", "!", ",", "\"", ":", ";", "'", "-"]
def __tagPartsOfSpeech(words):
return [pair[1] for pair in nltk.pos_tag(words)]
def __tokenizeWords(sentence):
return nltk.tokenize.word_tokenize(sentence)
## tests ########################################################################################
def applyTextRank(fileName, title="a document"):
print
print "Reading \"%s\" ..." % title
filePath = os.path.join(os.path.dirname(__file__), fileName)
document = open(filePath).read()
document = __asciiOnly(document)
print "Applying TextRank to \"%s\" ..." % title
keywordScores = textrank(document)
print
header = "Keyword Significance Scores for \"%s\":" % title
print header
print "-" * len(header)
print keywordScores
print
def main():
applyTextRank("Cinderalla.txt", "Cinderalla")
applyTextRank("Beauty_and_the_Beast.txt", "Beauty and the Beast")
applyTextRank("Rapunzel.txt", "Rapunzel")
if __name__ == "__main__":
main()
| [] |
lionelkusch/neurolib | tests/test_exploration.py | 714eef48616af0ebdb62decc84826221472398f9 | import logging
import os
import random
import string
import time
import unittest
import neurolib.utils.paths as paths
import neurolib.utils.pypetUtils as pu
import numpy as np
import pytest
import xarray as xr
from neurolib.models.aln import ALNModel
from neurolib.models.fhn import FHNModel
from neurolib.models.multimodel import MultiModel
from neurolib.models.multimodel.builder.fitzhugh_nagumo import FitzHughNagumoNetwork
from neurolib.optimize.exploration import BoxSearch
from neurolib.utils.loadData import Dataset
from neurolib.utils.parameterSpace import ParameterSpace
def randomString(stringLength=10):
"""Generate a random string of fixed length"""
letters = string.ascii_lowercase
return "".join(random.choice(letters) for i in range(stringLength))
class TestBoxSearch(unittest.TestCase):
"""
Basic tests.
"""
def test_assertions(self):
parameters = ParameterSpace(
{"mue_ext_mean": np.linspace(0, 3, 2), "mui_ext_mean": np.linspace(0, 3, 2)}, kind="sequence"
)
with pytest.raises(AssertionError):
_ = BoxSearch(model=None, parameterSpace=parameters)
with pytest.raises(AssertionError):
_ = BoxSearch(model=None, parameterSpace=None)
with pytest.raises(AssertionError):
_ = BoxSearch(model=None, parameterSpace=parameters, evalFunction=None)
def test_fillin_default_parameters_for_sequential(self):
in_dict = {"a": [None, None, 1, 2], "b": [4, 5, None, None]}
SHOULD_BE = {"a": [0, 0, 1, 2], "b": [4, 5, 12, 12]}
model_params = {"a": 0, "b": 12}
parameters = ParameterSpace({"mue_ext_mean": [1.0, 2.0]})
search = BoxSearch(model=ALNModel(), parameterSpace=parameters)
out_dict = search._fillin_default_parameters_for_sequential(in_dict, model_params)
self.assertDictEqual(out_dict, SHOULD_BE)
class TestExplorationSingleNode(unittest.TestCase):
"""
ALN single node exploration.
"""
def test_single_node(self):
start = time.time()
model = ALNModel()
parameters = ParameterSpace({"mue_ext_mean": np.linspace(0, 3, 2), "mui_ext_mean": np.linspace(0, 3, 2)})
search = BoxSearch(model, parameters, filename="test_single_nodes.hdf")
search.run()
search.loadResults()
dataarray = search.xr()
self.assertTrue(isinstance(dataarray, xr.DataArray))
self.assertFalse(dataarray.attrs)
for i in search.dfResults.index:
search.dfResults.loc[i, "max_r"] = np.max(
search.results[i]["rates_exc"][:, -int(1000 / model.params["dt"]) :]
)
end = time.time()
logging.info("\t > Done in {:.2f} s".format(end - start))
class TestExplorationBrainNetwork(unittest.TestCase):
"""
FHN brain network simulation with BOLD simulation.
"""
def test_fhn_brain_network_exploration(self):
ds = Dataset("hcp")
model = FHNModel(Cmat=ds.Cmat, Dmat=ds.Dmat)
model.params.duration = 10 * 1000 # ms
model.params.dt = 0.2
model.params.bold = True
parameters = ParameterSpace(
{
"x_ext": [np.ones((model.params["N"],)) * a for a in np.linspace(0, 2, 2)],
"K_gl": np.linspace(0, 2, 2),
"coupling": ["additive", "diffusive"],
},
kind="grid",
)
search = BoxSearch(model=model, parameterSpace=parameters, filename="test_fhn_brain_network_exploration.hdf")
search.run(chunkwise=True, bold=True)
pu.getTrajectorynamesInFile(os.path.join(paths.HDF_DIR, "test_fhn_brain_network_exploration.hdf"))
search.loadDfResults()
search.getRun(0, pypetShortNames=True)
search.getRun(0, pypetShortNames=False)
search.loadResults()
# firing rate xr
dataarray = search.xr()
self.assertTrue(isinstance(dataarray, xr.DataArray))
self.assertFalse(dataarray.attrs)
# bold xr
dataarray = search.xr(bold=True)
self.assertTrue(isinstance(dataarray, xr.DataArray))
self.assertFalse(dataarray.attrs)
search.info()
class TestExplorationBrainNetworkPostprocessing(unittest.TestCase):
"""
ALN brain network simulation with custom evaluation function.
"""
@classmethod
def setUpClass(cls):
# def test_brain_network_postprocessing(self):
ds = Dataset("hcp")
model = ALNModel(Cmat=ds.Cmat, Dmat=ds.Dmat)
# Resting state fits
model.params["mue_ext_mean"] = 1.57
model.params["mui_ext_mean"] = 1.6
model.params["sigma_ou"] = 0.09
model.params["b"] = 5.0
model.params["signalV"] = 2
model.params["dt"] = 0.2
model.params["duration"] = 0.2 * 60 * 1000
# multi stage evaluation function
def evaluateSimulation(traj):
model = search.getModelFromTraj(traj)
model.randomICs()
model.params["dt"] = 0.2
model.params["duration"] = 4 * 1000.0
model.run(bold=True)
result_dict = {"outputs": model.outputs}
search.saveToPypet(result_dict, traj)
# define and run exploration
parameters = ParameterSpace({"mue_ext_mean": np.linspace(0, 3, 2), "mui_ext_mean": np.linspace(0, 3, 2)})
search = BoxSearch(
evalFunction=evaluateSimulation,
model=model,
parameterSpace=parameters,
filename=f"test_brain_postprocessing_{randomString(20)}.hdf",
)
search.run()
cls.model = model
cls.search = search
cls.ds = ds
def test_getRun(self):
self.search.getRun(0)
def test_loadResults(self):
self.search.loadResults()
def test_loadResults_all_False(self):
self.search.loadResults(all=False)
class TestCustomParameterExploration(unittest.TestCase):
"""Exploration with custom function"""
def test_circle_exploration(self):
def explore_me(traj):
pars = search.getParametersFromTraj(traj)
# let's calculate the distance to a circle
computation_result = abs((pars["x"] ** 2 + pars["y"] ** 2) - 1)
result_dict = {"scalar_result": computation_result, "list_result": [1, 2, 3, 4], "array_result": np.ones(3)}
search.saveToPypet(result_dict, traj)
parameters = ParameterSpace({"x": np.linspace(-2, 2, 2), "y": np.linspace(-2, 2, 2)})
search = BoxSearch(evalFunction=explore_me, parameterSpace=parameters, filename="test_circle_exploration.hdf")
search.run()
search.loadResults(pypetShortNames=False)
# call the result dataframe
search.dfResults
# test integrity of dataframe
for i in search.dfResults.index:
self.assertEqual(search.dfResults.loc[i, "scalar_result"], search.results[i]["scalar_result"])
self.assertListEqual(search.dfResults.loc[i, "list_result"], search.results[i]["list_result"])
np.testing.assert_array_equal(search.dfResults.loc[i, "array_result"], search.results[i]["array_result"])
class TestExplorationMultiModel(unittest.TestCase):
"""
MultiModel exploration test - uses FHN network.
"""
def test_multimodel_explore(self):
start = time.time()
DELAY = 13.0
fhn_net = FitzHughNagumoNetwork(np.random.rand(2, 2), np.array([[0.0, DELAY], [DELAY, 0.0]]))
model = MultiModel(fhn_net)
parameters = ParameterSpace({"*input*sigma": [0.0, 0.05], "*epsilon*": [0.5, 0.6]}, allow_star_notation=True)
search = BoxSearch(model, parameters, filename="test_multimodel.hdf")
search.run()
search.loadResults()
dataarray = search.xr()
self.assertTrue(isinstance(dataarray, xr.DataArray))
self.assertTrue(isinstance(dataarray.attrs, dict))
self.assertListEqual(
list(dataarray.attrs.keys()),
[k.replace("*", "_").replace(".", "_").replace("|", "_") for k in parameters.dict().keys()],
)
end = time.time()
logging.info("\t > Done in {:.2f} s".format(end - start))
class TestExplorationMultiModelSequential(unittest.TestCase):
"""
MultiModel exploration test with sequential exploration - uses FHN network.
"""
def test_multimodel_explore(self):
start = time.time()
DELAY = 13.0
fhn_net = FitzHughNagumoNetwork(np.random.rand(2, 2), np.array([[0.0, DELAY], [DELAY, 0.0]]))
model = MultiModel(fhn_net)
parameters = ParameterSpace(
{"*input*sigma": [0.0, 0.05], "*epsilon*": [0.5, 0.6, 0.7]}, allow_star_notation=True, kind="sequence"
)
search = BoxSearch(model, parameters, filename="test_multimodel.hdf")
search.run()
search.loadResults()
dataarray = search.xr()
self.assertTrue(isinstance(dataarray, xr.DataArray))
self.assertTrue("run_no" in dataarray.dims)
self.assertEqual(len(dataarray["run_no"]), 5)
self.assertTrue(isinstance(dataarray.attrs, dict))
self.assertListEqual(
list(dataarray.attrs.keys()),
[k.replace("*", "_").replace(".", "_").replace("|", "_") for k in parameters.dict().keys()],
)
end = time.time()
logging.info("\t > Done in {:.2f} s".format(end - start))
class TestExplorationSingleNodeSequential(unittest.TestCase):
"""
ALN single node test with sequential exploration.
"""
def test_single_node(self):
start = time.time()
model = ALNModel()
parameters = ParameterSpace({"mue_ext_mean": [0.0, 1.5, 3.0], "mui_ext_mean": [1.5, 3.0]}, kind="sequence")
search = BoxSearch(model, parameters, filename="test_single_nodes.hdf")
search.run()
search.loadResults()
dataarray = search.xr()
self.assertTrue(isinstance(dataarray, xr.DataArray))
self.assertTrue("run_no" in dataarray.dims)
self.assertEqual(len(dataarray["run_no"]), 5)
self.assertFalse(dataarray.attrs)
for i in search.dfResults.index:
search.dfResults.loc[i, "max_r"] = np.max(
search.results[i]["rates_exc"][:, -int(1000 / model.params["dt"]) :]
)
end = time.time()
logging.info("\t > Done in {:.2f} s".format(end - start))
if __name__ == "__main__":
unittest.main()
| [((10456, 10471), 'unittest.main', 'unittest.main', ([], {}), '()\n', (10469, 10471), False, 'import unittest\n'), ((1635, 1679), 'neurolib.utils.parameterSpace.ParameterSpace', 'ParameterSpace', (["{'mue_ext_mean': [1.0, 2.0]}"], {}), "({'mue_ext_mean': [1.0, 2.0]})\n", (1649, 1679), False, 'from neurolib.utils.parameterSpace import ParameterSpace\n'), ((2045, 2056), 'time.time', 'time.time', ([], {}), '()\n', (2054, 2056), False, 'import time\n'), ((2074, 2084), 'neurolib.models.aln.ALNModel', 'ALNModel', ([], {}), '()\n', (2082, 2084), False, 'from neurolib.models.aln import ALNModel\n'), ((2216, 2278), 'neurolib.optimize.exploration.BoxSearch', 'BoxSearch', (['model', 'parameters'], {'filename': '"""test_single_nodes.hdf"""'}), "(model, parameters, filename='test_single_nodes.hdf')\n", (2225, 2278), False, 'from neurolib.optimize.exploration import BoxSearch\n'), ((2675, 2686), 'time.time', 'time.time', ([], {}), '()\n', (2684, 2686), False, 'import time\n'), ((2944, 2958), 'neurolib.utils.loadData.Dataset', 'Dataset', (['"""hcp"""'], {}), "('hcp')\n", (2951, 2958), False, 'from neurolib.utils.loadData import Dataset\n'), ((2975, 3011), 'neurolib.models.fhn.FHNModel', 'FHNModel', ([], {'Cmat': 'ds.Cmat', 'Dmat': 'ds.Dmat'}), '(Cmat=ds.Cmat, Dmat=ds.Dmat)\n', (2983, 3011), False, 'from neurolib.models.fhn import FHNModel\n'), ((3434, 3539), 'neurolib.optimize.exploration.BoxSearch', 'BoxSearch', ([], {'model': 'model', 'parameterSpace': 'parameters', 'filename': '"""test_fhn_brain_network_exploration.hdf"""'}), "(model=model, parameterSpace=parameters, filename=\n 'test_fhn_brain_network_exploration.hdf')\n", (3443, 3539), False, 'from neurolib.optimize.exploration import BoxSearch\n'), ((4452, 4466), 'neurolib.utils.loadData.Dataset', 'Dataset', (['"""hcp"""'], {}), "('hcp')\n", (4459, 4466), False, 'from neurolib.utils.loadData import Dataset\n'), ((4483, 4519), 'neurolib.models.aln.ALNModel', 'ALNModel', ([], {'Cmat': 'ds.Cmat', 'Dmat': 'ds.Dmat'}), '(Cmat=ds.Cmat, Dmat=ds.Dmat)\n', (4491, 4519), False, 'from neurolib.models.aln import ALNModel\n'), ((6533, 6639), 'neurolib.optimize.exploration.BoxSearch', 'BoxSearch', ([], {'evalFunction': 'explore_me', 'parameterSpace': 'parameters', 'filename': '"""test_circle_exploration.hdf"""'}), "(evalFunction=explore_me, parameterSpace=parameters, filename=\n 'test_circle_exploration.hdf')\n", (6542, 6639), False, 'from neurolib.optimize.exploration import BoxSearch\n'), ((7358, 7369), 'time.time', 'time.time', ([], {}), '()\n', (7367, 7369), False, 'import time\n'), ((7510, 7529), 'neurolib.models.multimodel.MultiModel', 'MultiModel', (['fhn_net'], {}), '(fhn_net)\n', (7520, 7529), False, 'from neurolib.models.multimodel import MultiModel\n'), ((7551, 7651), 'neurolib.utils.parameterSpace.ParameterSpace', 'ParameterSpace', (["{'*input*sigma': [0.0, 0.05], '*epsilon*': [0.5, 0.6]}"], {'allow_star_notation': '(True)'}), "({'*input*sigma': [0.0, 0.05], '*epsilon*': [0.5, 0.6]},\n allow_star_notation=True)\n", (7565, 7651), False, 'from neurolib.utils.parameterSpace import ParameterSpace\n'), ((7665, 7725), 'neurolib.optimize.exploration.BoxSearch', 'BoxSearch', (['model', 'parameters'], {'filename': '"""test_multimodel.hdf"""'}), "(model, parameters, filename='test_multimodel.hdf')\n", (7674, 7725), False, 'from neurolib.optimize.exploration import BoxSearch\n'), ((8130, 8141), 'time.time', 'time.time', ([], {}), '()\n', (8139, 8141), False, 'import time\n'), ((8424, 8435), 'time.time', 'time.time', ([], {}), '()\n', (8433, 8435), False, 'import time\n'), ((8576, 8595), 'neurolib.models.multimodel.MultiModel', 'MultiModel', (['fhn_net'], {}), '(fhn_net)\n', (8586, 8595), False, 'from neurolib.models.multimodel import MultiModel\n'), ((8617, 8739), 'neurolib.utils.parameterSpace.ParameterSpace', 'ParameterSpace', (["{'*input*sigma': [0.0, 0.05], '*epsilon*': [0.5, 0.6, 0.7]}"], {'allow_star_notation': '(True)', 'kind': '"""sequence"""'}), "({'*input*sigma': [0.0, 0.05], '*epsilon*': [0.5, 0.6, 0.7]},\n allow_star_notation=True, kind='sequence')\n", (8631, 8739), False, 'from neurolib.utils.parameterSpace import ParameterSpace\n'), ((8775, 8835), 'neurolib.optimize.exploration.BoxSearch', 'BoxSearch', (['model', 'parameters'], {'filename': '"""test_multimodel.hdf"""'}), "(model, parameters, filename='test_multimodel.hdf')\n", (8784, 8835), False, 'from neurolib.optimize.exploration import BoxSearch\n'), ((9346, 9357), 'time.time', 'time.time', ([], {}), '()\n', (9355, 9357), False, 'import time\n'), ((9607, 9618), 'time.time', 'time.time', ([], {}), '()\n', (9616, 9618), False, 'import time\n'), ((9636, 9646), 'neurolib.models.aln.ALNModel', 'ALNModel', ([], {}), '()\n', (9644, 9646), False, 'from neurolib.models.aln import ALNModel\n'), ((9668, 9767), 'neurolib.utils.parameterSpace.ParameterSpace', 'ParameterSpace', (["{'mue_ext_mean': [0.0, 1.5, 3.0], 'mui_ext_mean': [1.5, 3.0]}"], {'kind': '"""sequence"""'}), "({'mue_ext_mean': [0.0, 1.5, 3.0], 'mui_ext_mean': [1.5, 3.0]\n }, kind='sequence')\n", (9682, 9767), False, 'from neurolib.utils.parameterSpace import ParameterSpace\n'), ((9780, 9842), 'neurolib.optimize.exploration.BoxSearch', 'BoxSearch', (['model', 'parameters'], {'filename': '"""test_single_nodes.hdf"""'}), "(model, parameters, filename='test_single_nodes.hdf')\n", (9789, 9842), False, 'from neurolib.optimize.exploration import BoxSearch\n'), ((10345, 10356), 'time.time', 'time.time', ([], {}), '()\n', (10354, 10356), False, 'import time\n'), ((725, 747), 'random.choice', 'random.choice', (['letters'], {}), '(letters)\n', (738, 747), False, 'import random\n'), ((1051, 1080), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (1064, 1080), False, 'import pytest\n'), ((1098, 1146), 'neurolib.optimize.exploration.BoxSearch', 'BoxSearch', ([], {'model': 'None', 'parameterSpace': 'parameters'}), '(model=None, parameterSpace=parameters)\n', (1107, 1146), False, 'from neurolib.optimize.exploration import BoxSearch\n'), ((1161, 1190), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (1174, 1190), False, 'import pytest\n'), ((1208, 1250), 'neurolib.optimize.exploration.BoxSearch', 'BoxSearch', ([], {'model': 'None', 'parameterSpace': 'None'}), '(model=None, parameterSpace=None)\n', (1217, 1250), False, 'from neurolib.optimize.exploration import BoxSearch\n'), ((1265, 1294), 'pytest.raises', 'pytest.raises', (['AssertionError'], {}), '(AssertionError)\n', (1278, 1294), False, 'import pytest\n'), ((1312, 1379), 'neurolib.optimize.exploration.BoxSearch', 'BoxSearch', ([], {'model': 'None', 'parameterSpace': 'parameters', 'evalFunction': 'None'}), '(model=None, parameterSpace=parameters, evalFunction=None)\n', (1321, 1379), False, 'from neurolib.optimize.exploration import BoxSearch\n'), ((3619, 3688), 'os.path.join', 'os.path.join', (['paths.HDF_DIR', '"""test_fhn_brain_network_exploration.hdf"""'], {}), "(paths.HDF_DIR, 'test_fhn_brain_network_exploration.hdf')\n", (3631, 3688), False, 'import os\n'), ((7074, 7183), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (["search.dfResults.loc[i, 'array_result']", "search.results[i]['array_result']"], {}), "(search.dfResults.loc[i, 'array_result'],\n search.results[i]['array_result'])\n", (7103, 7183), True, 'import numpy as np\n'), ((7432, 7452), 'numpy.random.rand', 'np.random.rand', (['(2)', '(2)'], {}), '(2, 2)\n', (7446, 7452), True, 'import numpy as np\n'), ((7454, 7492), 'numpy.array', 'np.array', (['[[0.0, DELAY], [DELAY, 0.0]]'], {}), '([[0.0, DELAY], [DELAY, 0.0]])\n', (7462, 7492), True, 'import numpy as np\n'), ((8498, 8518), 'numpy.random.rand', 'np.random.rand', (['(2)', '(2)'], {}), '(2, 2)\n', (8512, 8518), True, 'import numpy as np\n'), ((8520, 8558), 'numpy.array', 'np.array', (['[[0.0, DELAY], [DELAY, 0.0]]'], {}), '([[0.0, DELAY], [DELAY, 0.0]])\n', (8528, 8558), True, 'import numpy as np\n'), ((951, 971), 'numpy.linspace', 'np.linspace', (['(0)', '(3)', '(2)'], {}), '(0, 3, 2)\n', (962, 971), True, 'import numpy as np\n'), ((989, 1009), 'numpy.linspace', 'np.linspace', (['(0)', '(3)', '(2)'], {}), '(0, 3, 2)\n', (1000, 1009), True, 'import numpy as np\n'), ((1713, 1723), 'neurolib.models.aln.ALNModel', 'ALNModel', ([], {}), '()\n', (1721, 1723), False, 'from neurolib.models.aln import ALNModel\n'), ((2138, 2158), 'numpy.linspace', 'np.linspace', (['(0)', '(3)', '(2)'], {}), '(0, 3, 2)\n', (2149, 2158), True, 'import numpy as np\n'), ((2176, 2196), 'numpy.linspace', 'np.linspace', (['(0)', '(3)', '(2)'], {}), '(0, 3, 2)\n', (2187, 2196), True, 'import numpy as np\n'), ((3290, 3310), 'numpy.linspace', 'np.linspace', (['(0)', '(2)', '(2)'], {}), '(0, 2, 2)\n', (3301, 3310), True, 'import numpy as np\n'), ((5305, 5325), 'numpy.linspace', 'np.linspace', (['(0)', '(3)', '(2)'], {}), '(0, 3, 2)\n', (5316, 5325), True, 'import numpy as np\n'), ((5343, 5363), 'numpy.linspace', 'np.linspace', (['(0)', '(3)', '(2)'], {}), '(0, 3, 2)\n', (5354, 5363), True, 'import numpy as np\n'), ((6359, 6369), 'numpy.ones', 'np.ones', (['(3)'], {}), '(3)\n', (6366, 6369), True, 'import numpy as np\n'), ((6464, 6485), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(2)'], {}), '(-2, 2, 2)\n', (6475, 6485), True, 'import numpy as np\n'), ((6492, 6513), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(2)'], {}), '(-2, 2, 2)\n', (6503, 6513), True, 'import numpy as np\n'), ((3200, 3229), 'numpy.ones', 'np.ones', (["(model.params['N'],)"], {}), "((model.params['N'],))\n", (3207, 3229), True, 'import numpy as np\n'), ((3243, 3263), 'numpy.linspace', 'np.linspace', (['(0)', '(2)', '(2)'], {}), '(0, 2, 2)\n', (3254, 3263), True, 'import numpy as np\n')] |
belst/irc3 | irc3/tags.py | c89303cf5937a4dc7cf1eda8e662dc702b5e0ad9 | # -*- coding: utf-8 -*-
'''
Module offering 2 functions, encode() and decode(), to transcode between
IRCv3.2 tags and python dictionaries.
'''
import re
import random
import string
_escapes = (
("\\", "\\\\"),
(";", r"\:"),
(" ", r"\s"),
("\r", r"\r"),
("\n", r"\n"),
)
# make the possibility of the substitute actually appearing in the text
# negligible. Even for targeted attacks
_substitute = (";TEMP:%s;" %
''.join(random.choice(string.ascii_letters) for i in range(20)))
_unescapes = (
("\\\\", _substitute),
(r"\:", ";"),
(r"\s", " "),
(r"\r", "\r"),
(r"\n", "\n"),
(_substitute, "\\"),
)
# valid tag-keys must contain of alphanumerics and hyphens only.
# for vendor-tagnames: TLD with slash appended
_valid_key = re.compile("^([\w.-]+/)?[\w-]+$")
# valid escaped tag-values must not contain
# NUL, CR, LF, semicolons or spaces
_valid_escaped_value = re.compile("^[^ ;\n\r\0]*$")
def _unescape(string):
for a, b in _unescapes:
string = string.replace(a, b)
return string
def _escape(string):
for a, b in _escapes:
string = string.replace(a, b)
return string
def encode(tags):
'''Encodes a dictionary of tags to fit into an IRC-message.
See IRC Message Tags: http://ircv3.net/specs/core/message-tags-3.2.html
>>> from collections import OrderedDict
>>> encode({'key': 'value'})
'key=value'
>>> d = {'aaa': 'bbb', 'ccc': None, 'example.com/ddd': 'eee'}
>>> d_ordered = OrderedDict(sorted(d.items(), key=lambda t: t[0]))
>>> encode(d_ordered)
'aaa=bbb;ccc;example.com/ddd=eee'
>>> d = {'key': 'value;with special\\\\characters', 'key2': 'with=equals'}
>>> d_ordered = OrderedDict(sorted(d.items(), key=lambda t: t[0]))
>>> print(encode(d_ordered))
key=value\\:with\\sspecial\\\characters;key2=with=equals
>>> print(encode({'key': r'\\something'}))
key=\\\\something
'''
tagstrings = []
for key, value in tags.items():
if not _valid_key.match(key):
raise ValueError("dictionary key is invalid as tag key: " + key)
# if no value, just append the key
if value:
tagstrings.append(key + "=" + _escape(value))
else:
tagstrings.append(key)
return ";".join(tagstrings)
def decode(tagstring):
'''Decodes a tag-string from an IRC-message into a python dictionary.
See IRC Message Tags: http://ircv3.net/specs/core/message-tags-3.2.html
>>> from pprint import pprint
>>> pprint(decode('key=value'))
{'key': 'value'}
>>> pprint(decode('aaa=bbb;ccc;example.com/ddd=eee'))
{'aaa': 'bbb', 'ccc': None, 'example.com/ddd': 'eee'}
>>> s = r'key=value\\:with\\sspecial\\\\characters;key2=with=equals'
>>> pprint(decode(s))
{'key': 'value;with special\\\\characters', 'key2': 'with=equals'}
>>> print(decode(s)['key'])
value;with special\\characters
>>> print(decode(r'key=\\\\something')['key'])
\\something
'''
if not tagstring:
# None/empty = no tags
return {}
tags = {}
for tag in tagstring.split(";"):
# value is either everything after "=", or None
key, value = (tag.split("=", 1) + [None])[:2]
if not _valid_key.match(key):
raise ValueError("invalid tag key: " + key)
if value:
if not _valid_escaped_value.match(value):
raise ValueError("invalid escaped tag value: " + value)
value = _unescape(value)
tags[key] = value
return tags
| [((785, 820), 're.compile', 're.compile', (['"""^([\\\\w.-]+/)?[\\\\w-]+$"""'], {}), "('^([\\\\w.-]+/)?[\\\\w-]+$')\n", (795, 820), False, 'import re\n'), ((923, 953), 're.compile', 're.compile', (["'^[^ ;\\n\\r\\x00]*$'"], {}), "('^[^ ;\\n\\r\\x00]*$')\n", (933, 953), False, 'import re\n'), ((1022, 1042), 'string.replace', 'string.replace', (['a', 'b'], {}), '(a, b)\n', (1036, 1042), False, 'import string\n'), ((1127, 1147), 'string.replace', 'string.replace', (['a', 'b'], {}), '(a, b)\n', (1141, 1147), False, 'import string\n'), ((459, 494), 'random.choice', 'random.choice', (['string.ascii_letters'], {}), '(string.ascii_letters)\n', (472, 494), False, 'import random\n')] |
Rahmatullina/FinalYearProject | app/forms.py | 326f521b9f600dbbc7ace2223bd5aafc79b2267c | from django import forms
from django.contrib.auth.forms import PasswordResetForm, SetPasswordForm
# from .models import RegionModel
# from .models import SERVICE_CHOICES, REGION_CHOICES
from django.contrib.auth import authenticate
# from django.contrib.auth.forms import UserCreationForm, UserChangeForm
# from .models import CustomUser
class LoginForm(forms.Form):
username = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control'}), max_length=100)
password = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control','type':'password'}),max_length=100)
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
user = authenticate(username=username, password=password)
if not user or not user.is_active:
raise forms.ValidationError("Sorry, that login was invalid or user is inactive. Please try again.")
return self.cleaned_data
def login(self, request):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
user = authenticate(username=username, password=password)
return user
# class PassResetForm(PasswordResetForm):
# email = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Enter email',
# 'type':'email'}), max_length=100)
#
#
# class PassResetConfirmForm(SetPasswordForm):
# new_password1 = forms.CharField(widget=forms.TextInput(attrs={'class':'form-control',
# 'placeholder':'Enter new password',
# 'type':'password'}), max_length=100)
# new_password2 = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control',
# 'placeholder': 'Enter new password again',
# 'type': 'password'}), max_length=100)
# class CustomUserCreationForm(UserCreationForm):
#
# class Meta(UserCreationForm):
# model = CustomUser
# fields = UserCreationForm.Meta.fields + ('region_name',)
#
#
# class CustomUserChangeForm(UserChangeForm):
# email = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control'}), max_length=100)
# username = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control'}), max_length=254)
#
# class Meta:
# model = CustomUser
# fields = ('email','username')
| [((736, 786), 'django.contrib.auth.authenticate', 'authenticate', ([], {'username': 'username', 'password': 'password'}), '(username=username, password=password)\n', (748, 786), False, 'from django.contrib.auth import authenticate\n'), ((1127, 1177), 'django.contrib.auth.authenticate', 'authenticate', ([], {'username': 'username', 'password': 'password'}), '(username=username, password=password)\n', (1139, 1177), False, 'from django.contrib.auth import authenticate\n'), ((406, 454), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control'}"}), "(attrs={'class': 'form-control'})\n", (421, 454), False, 'from django import forms\n'), ((510, 578), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control', 'type': 'password'}"}), "(attrs={'class': 'form-control', 'type': 'password'})\n", (525, 578), False, 'from django import forms\n'), ((848, 946), 'django.forms.ValidationError', 'forms.ValidationError', (['"""Sorry, that login was invalid or user is inactive. Please try again."""'], {}), "(\n 'Sorry, that login was invalid or user is inactive. Please try again.')\n", (869, 946), False, 'from django import forms\n')] |
MrZhang1994/mobile-federated-learning | src/fedavg_trainer.py | 6e088a91266d889869af5a1eb0bad83ca635a4a5 | # newly added libraries
import copy
import wandb
import time
import math
import csv
import shutil
from tqdm import tqdm
import torch
import numpy as np
import pandas as pd
from client import Client
from config import *
import scheduler as sch
class FedAvgTrainer(object):
def __init__(self, dataset, model, device, args):
self.device = device
self.args = args
[client_num, _, _, train_data_global, _, train_data_local_num_dict, train_data_local_dict, test_data_local_dict, class_num] = dataset
# record the client number of the dataset
self.client_num = client_num
self.class_num = class_num
# setup dataset
self.data_shape = list(train_data_global[0][0].size())
self.train_data_local_num_dict = train_data_local_num_dict
self.test_data_local_dict = test_data_local_dict
self.train_data_local_dict = train_data_local_dict
if args.partition_method == "noniid":
logger.info("-----------non-i.i.d transform----------")
# generate the non i.i.d dataset
self.gene_non_iid_dataset(train_data_global, "tmp")
# read the non i.i.d dataset
self.read_non_iid_dataset("tmp")
# rm the tmp directory
shutil.rmtree(os.path.join('.', 'tmp'))
self.client_list = []
self.setup_clients(train_data_local_num_dict, train_data_local_dict, test_data_local_dict)
# initialize the recorder of invalid dataset
self.invalid_datasets = dict()
# time counter starts from the first line
self.time_counter = channel_data['Time'][0]
# initialize the cycle_num here
self.cycle_num = 0
# initialize the scheduler function
if self.args.method == "sch_pn_method_1" or self.args.method == "sch_pn_method_1_empty":
for _ in range(100):
self.scheduler = sch.Scheduler_PN_method_1()
client_indexes, _ = self.scheduler.sch_pn_test(1, 2002)
if len(client_indexes) > 5:
break
elif self.args.method == "sch_pn_method_2" or self.args.method == "sch_pn_method_2_empty":
for _ in range(100):
self.scheduler = sch.Scheduler_PN_method_2()
client_indexes, _ = self.scheduler.sch_pn_test(1, 2002)
if len(client_indexes) > 5:
break
elif self.args.method == "sch_pn_method_3" or self.args.method == "sch_pn_method_3_empty":
for _ in range(100):
self.scheduler = sch.Scheduler_PN_method_3()
client_indexes, _ = self.scheduler.sch_pn_test(1, 2002)
if len(client_indexes) > 5:
break
elif self.args.method == "sch_random":
self.scheduler = sch.sch_random
elif self.args.method == "sch_channel":
self.scheduler = sch.sch_channel
elif self.args.method == "sch_rrobin":
self.scheduler = sch.sch_rrobin
elif self.args.method == "sch_loss":
self.scheduler = sch.sch_loss
else:
self.scheduler = sch.sch_random
self.model = model
self.model_global = model(self.args, model_name=self.args.model, output_dim=self.class_num)
self.model_global.train()
def setup_clients(self, train_data_local_num_dict, train_data_local_dict, test_data_local_dict):
logger.debug("############setup_clients (START)#############")
for client_idx in range(client_num_per_round):
c = Client(client_idx, train_data_local_dict[client_idx], test_data_local_dict[client_idx],
train_data_local_num_dict[client_idx], self.args, self.device)
self.client_list.append(c)
logger.debug("############setup_clients (END)#############")
def train(self):
"""
Global initialized values
"""
# maintain a lst for local losses
local_loss_lst = np.zeros((1, client_num_in_total))
# maintain a lst for local acc
_, dataset_acc_lst = self.local_test_on_all_clients(self.model_global, 0, True, False)
local_acc_lst = dataset_acc_lst[np.arange(client_num_in_total) % self.client_num]
# counting days
counting_days, reward = 0, 0
# initialize values for calculating iteration num
delta, rho, beta, rho_flag, beta_flag = np.random.rand(1)[0], np.random.rand(1)[0], np.random.rand(1)[0], True, True
# Initialize values for calculating FPF2 index
local_itr_lst = torch.zeros(self.args.comm_round, int(client_num_in_total)).to(self.device) # historical local iterations.
G_mat = torch.zeros(int(client_num_in_total)).to(self.device) # initial the value of G with zero
# if weight size is larger than THRESHOLD_WEIGHT_SIZE we will use a simpler method to calculate FPF
weight_size = sum([self.model_global.cpu().state_dict()[para].numpy().ravel().shape[0] for para in self.model_global.state_dict().keys()])
if weight_size < THRESHOLD_WEIGHT_SIZE:
A_mat = torch.ones(weight_size).to(self.device) # initial the value of A with ones.
local_w_diffs = torch.zeros((int(client_num_in_total), weight_size)).to(self.device)
else:
logger.warning("The weight size of the model {} is too large. Thus, we turn to use a more simple method to calculate FPF.".format(self.args.model))
LRU_itr_lst = torch.zeros(int(client_num_in_total)).to(self.device) # store the iteration gap for each client.
# show weight size for the model.
logger.debug("weight size: {}".format(weight_size))
"""
starts training, entering the loop of command round.
"""
Inform = {}
traffic = 0
for round_idx in range(self.args.comm_round):
logger.info("################Communication round : {}".format(round_idx))
# set the time_counter
self.time_counter = np.array(channel_data['Time'][channel_data['Time'] >= self.time_counter])[0]
logger.info("time_counter: {}".format(self.time_counter))
self.model_global.train()
# get client_indexes from scheduler
reward, loss_a, loss_c = 0, 0, 0
if (self.args.method)[:6] == "sch_pn":
if self.args.method[-5:] == "empty" or round_idx == 0:
client_indexes, local_itr = self.scheduler.sch_pn_empty(round_idx, self.time_counter)
else:
client_indexes, local_itr, (reward, loss_a, loss_c) = self.scheduler.sch_pn(round_idx, self.time_counter, loss_locals, FPF2_idx_lst, local_loss_lst, )
else:
if self.args.method == "sch_loss":
if round_idx == 0:
loss_locals = []
client_indexes, local_itr = self.scheduler(round_idx, self.time_counter, loss_locals)
else:
client_indexes, local_itr = self.scheduler(round_idx, self.time_counter)
# write to the scheduler csv
with open(scheduler_csv, mode = "a+", encoding='utf-8', newline='') as file:
csv_writer = csv.writer(file)
if round_idx == 0:
csv_writer.writerow(['time counter', 'client index', 'iteration'])
csv_writer.writerow([self.time_counter, str(client_indexes), local_itr])
file.flush()
logger.info("client_indexes = " + str(client_indexes))
traffic += len(client_indexes)
# write one line to trainer_csv
trainer_csv_line = [round_idx, self.time_counter, str(client_indexes), traffic]
# contribute to time counter
self.tx_time(list(client_indexes)) # transmit time
# store the last model's training parameters.
last_w = copy.deepcopy(self.model_global.cpu().state_dict())
# local Initialization
w_locals, loss_locals, beta_locals, rho_locals, cycle_locals = [], [], [], [], []
"""
for scalability: following the original FedAvg algorithm, we uniformly sample a fraction of clients in each round.
Instead of changing the 'Client' instances, our implementation keeps the 'Client' instances and then updates their local dataset
"""
for idx in range(len(client_indexes)):
# update dataset
client = self.client_list[idx]
client_idx = client_indexes[idx]
dataset_idx = client_idx % self.client_num
if dataset_idx in self.invalid_datasets.keys():
current_idx = self.invalid_datasets[dataset_idx]
else:
current_idx = dataset_idx
while True:
client.update_local_dataset(current_idx, self.train_data_local_dict[current_idx],
self.test_data_local_dict[current_idx],
self.train_data_local_num_dict[current_idx])
# train on new dataset
# add a new parameter "local_itr" to the funciton "client.train()"
# add a new return value "time_interval" which is the time consumed for training model in client.
w, loss, local_beta, local_rho, local_acc, local_cycle = client.train(net=copy.deepcopy(self.model_global).to(self.device), local_iteration = local_itr)
if loss != None and local_beta != None and local_rho != None and local_acc != None:
if dataset_idx != current_idx:
self.invalid_datasets[dataset_idx] = current_idx
break
current_idx = np.random.randint(self.class_num)
logger.warning("changing dataset for {} to {}".format(client_idx, current_idx))
# record current cycle
cycle_locals.append([client.get_sample_number(), local_cycle])
# record current w into w_locals
w_locals.append((client.get_sample_number(), copy.deepcopy(w)))
# record current loss into loss_locals
loss_locals.append(loss)
# record local beta into beta_locals
beta_locals.append(local_beta)
# record local beta into rho_locals
rho_locals.append(local_rho)
# update the local_loss_lst
local_loss_lst[0, client_idx] = loss
# update local_w_diffs
if weight_size < THRESHOLD_WEIGHT_SIZE:
local_w_diffs[client_idx, :] = torch.cat([w[para].reshape((-1, )) - last_w[para].reshape((-1, )) for para in self.model_global.state_dict().keys()]).to(self.device)
# update local_acc_lst
local_acc_lst[client_idx] = local_acc
# loss
logger.info('Client {:3d}, loss {:.3f}'.format(client_idx, loss))
# update global weights
w_glob = self.aggregate(w_locals)
# copy weight to net_glob
self.model_global.load_state_dict(w_glob)
# update the time counter
if list(client_indexes):
self.time_counter += math.ceil(LOCAL_TRAINING_TIME)
logger.debug("time_counter after training: {}".format(self.time_counter))
trainer_csv_line += [self.time_counter-trainer_csv_line[1], np.var(local_loss_lst), str(loss_locals), np.var(loss_locals), np.var(local_acc_lst)]
# print loss
if not loss_locals:
logger.info('Round {:3d}, Average loss None'.format(round_idx))
trainer_csv_line.append('None')
else:
loss_avg = sum(loss_locals) / len(loss_locals)
logger.info('Round {:3d}, Average loss {:.3f}'.format(round_idx, loss_avg))
trainer_csv_line.append(loss_avg)
if cycle_locals:
cycle_locals = np.asarray(cycle_locals)
logger.info('Elapsed cycles {:.3f}'.format(np.sum(cycle_locals[:, 0] * cycle_locals[:, 1]) / np.sum(cycle_locals[:, 0])))
# local test on all client.
if round_idx % self.args.frequency_of_the_test == 0 or round_idx == self.args.comm_round - 1:
test_acc, _ = self.local_test_on_all_clients(self.model_global, round_idx, EVAL_ON_TRAIN, True)
trainer_csv_line.append(test_acc)
# write headers for csv
with open(trainer_csv, mode = "a+", encoding='utf-8', newline='') as file:
csv_writer = csv.writer(file)
if round_idx == 0:
csv_writer.writerow(['round index', 'time counter', 'client index', 'traffic', 'train time', 'fairness',
'local loss', "local loss var", "local acc var", 'global loss', 'test accuracy'])
csv_writer.writerow(trainer_csv_line)
file.flush()
# log on wandb
Inform["reward"] = reward
wandb.log(Inform)
Inform = {
"reward": reward, "loss_a": loss_a,
"loss_c": loss_c, "round": round_idx,
"traffic": traffic,
"beta": beta, "rho": rho, "delta": delta,
"cum_time": trainer_csv_line[1]+self.cycle_num*59361,
"local_itr": local_itr,
"client_num": len(client_indexes),
"C3": (rho*delta)/beta,
"local_loss_var": np.var(loss_locals),
"local_acc_var": np.var(local_acc_lst)
}
# update FPF index list
if weight_size < THRESHOLD_WEIGHT_SIZE:
FPF2_idx_lst = torch.norm(local_w_diffs * A_mat, dim = 1) / G_mat
else:
FPF2_idx_lst = LRU_itr_lst / G_mat
FPF2_idx_lst = FPF2_idx_lst.cpu().numpy()
FPF2_idx_lst[np.bitwise_or(np.isnan(FPF2_idx_lst), np.isinf(FPF2_idx_lst))] = 0
# FPF2_idx_lst = FPF2_idx_lst / max(FPF2_idx_lst)
FPF2_idx_lst[np.bitwise_or(np.isnan(FPF2_idx_lst), np.isinf(FPF2_idx_lst))] = 0
# write FPF index list to csv
with open(FPF_csv, mode = "a+", encoding='utf-8', newline='') as file:
csv_writer = csv.writer(file)
if round_idx == 0:
csv_writer.writerow(['time counter'] + ["car_"+str(i) for i in range(client_num_in_total)])
csv_writer.writerow([trainer_csv_line[1]]+FPF2_idx_lst.tolist())
file.flush()
# update beta & delta & rho
if w_locals and loss_locals:
sample_nums = np.array([sample_num for sample_num, _ in w_locals])
local_w_diff_norms = np.array([torch.norm(torch.cat([w[para].reshape((-1, )) - w_glob[para].reshape((-1, )) for para in self.model_global.state_dict().keys()])).item() for _, w in w_locals])
# calculate delta
delta_tmp = np.sum(sample_nums * local_w_diff_norms) / np.sum(sample_nums) / self.args.lr
if (not np.isnan(delta_tmp) and not np.isinf(delta_tmp)):
delta = delta_tmp
# update rho
rho_tmp = np.sum(sample_nums * np.array(rho_locals)) / np.sum(sample_nums)
if rho_tmp > rho or rho_flag:
if (not np.isnan(rho_tmp) and not np.isinf(rho_tmp)) and rho_tmp < THRESHOLD_RHO:
rho, rho_flag = rho_tmp, False
# update beta
beta_tmp = np.sum(sample_nums * np.array(beta_locals)) / np.sum(sample_nums)
if beta_tmp > beta or beta_flag:
if (not np.isnan(beta_tmp) and not np.isinf(beta_tmp)) and beta_tmp < THRESHOLD_BETA:
beta, beta_flag = beta_tmp, False
if self.args.method == "sch_pn_method_1" or self.args.method == "sch_pn_method_1_empty":
self.scheduler.calculate_itr_method_1(delta)
elif self.args.method == "sch_pn_method_2" or self.args.method == "sch_pn_method_2_empty":
self.scheduler.calculate_itr_method_2(rho, beta, delta)
elif self.args.method == "sch_pn_method_3" or self.args.method == "sch_pn_method_3_empty":
self.scheduler.calculate_itr_method_3(rho, beta, delta)
if weight_size < THRESHOLD_WEIGHT_SIZE:
# update local_w_diffs
global_w_diff = torch.cat([w_glob[para].reshape((-1, )) - last_w[para].reshape((-1, )) for para in self.model_global.state_dict().keys()]).to(self.device)
local_w_diffs[list(set(list(range(client_num_in_total))) - set(list(client_indexes))), :] -= global_w_diff
# update A_mat
A_mat = A_mat * (1 - 1/G2) + (global_w_diff) / G2 / global_w_diff.mean()
# Update local_itr_lst
if list(client_indexes) and local_itr > 0: # only if client_idx is not empty and local_iter > 0, then I will update following values
local_itr_lst[round_idx, list(client_indexes)] = float(local_itr)
if weight_size >= THRESHOLD_WEIGHT_SIZE:
LRU_itr_lst += float(local_itr)
LRU_itr_lst[list(client_indexes)] = 0
# update G_mat
G_mat = G_mat * (1 - 1 / G1) + local_itr_lst[round_idx, :] / G1
# if current time_counter has exceed the channel table, I will simply stop early
if self.time_counter >= time_cnt_max[counting_days]:
counting_days += 1
if counting_days % RESTART_DAYS == 0:
if self.args.method == "find_constant" and loss_locals:
w_optimal, loss_optimal = self.central_train()
w = torch.cat([param.view(-1) for param in self.model_global.parameters()])
w_diff_optimal = torch.norm(w.cpu() - w_optimal.cpu())
logger.info("The norm of difference between w_optmal & w: {}".format(w_diff_optimal.item()))
logger.info("The norm of difference between loss & loss_optimal: {}".format(loss_avg - loss_optimal))
break
logger.info("################reinitialize model")
self.model_global = self.model(self.args, model_name=self.args.model, output_dim=self.class_num)
delta, rho, beta, rho_flag, beta_flag = np.random.rand(1)[0], np.random.rand(1)[0], np.random.rand(1)[0], True, True
traffic = 0
if counting_days >= DATE_LENGTH:
logger.info("################training restarts")
counting_days = 0
self.time_counter = 0
self.cycle_num = self.cycle_num+1
def central_train(self):
logger.info("################global optimal weights calculation")
model = self.model(self.args, model_name=self.args.model, output_dim=self.class_num)
criterion = torch.nn.CrossEntropyLoss().to(self.device)
model.to(self.device)
if self.args.client_optimizer == "sgd":
optimizer = torch.optim.SGD(model.parameters(), lr=self.args.lr)
else:
optimizer = torch.optim.Adam(filter(lambda p: p.requires_grad, model.parameters()), lr=self.args.lr,
weight_decay=self.args.wd, amsgrad=True)
for _ in tqdm(range(self.args.central_round)):
for client_idx in range(self.client_num):
x, labels = next(iter(self.train_data_local_dict[client_idx]))
x, labels = x.to(self.device), labels.to(self.device)
model.train()
model.zero_grad()
log_probs = model(x)
loss = criterion(log_probs, labels)
loss.backward()
loss = loss.item()
optimizer.step()
wandb.log({"central_training/loss": loss})
w_optimal = torch.cat([param.view(-1) for param in model.parameters()])
loss_optimal = loss
return w_optimal, loss_optimal
def gene_non_iid_dataset(self, train_global, directory):
"""
changing self.train_data_local_dict to non-i.i.d. dataset.
And change self.train_data_local_num_dict correspondingly.
"""
data, labels = train_global[0][0], train_global[0][1] # read the tensor from train_global.
# transform shape
data = data.view(data.shape[0], -1)
labels = labels.view(labels.shape[0], -1)
# get full_df
full_df = pd.DataFrame(np.concatenate((data.numpy(), labels.numpy()), axis=1)).sample(frac=1, random_state=self.args.seed)
# temporary store the data in dir
save_dir = os.path.join(".", directory)
if not os.path.exists(save_dir):
os.mkdir(save_dir)
for client_idx in tqdm(range(self.client_num)):
# get selected classes
try:
selected_classes = set(list(np.random.choice(list(set(full_df.iloc[:, -1])), CLASS_NUM)))
except:
selected_classes = set(full_df.iloc[:, -1])
# got valid data
valid_data = full_df[full_df.iloc[:, -1].isin(selected_classes)]
# get number of data on the local client
local_num = self.train_data_local_num_dict[client_idx]
# got selected data # remember to shuffle the data
try:
selected_data = valid_data[0:local_num]
except:
selected_data = valid_data
self.train_data_local_dict[client_idx] = len(selected_data)
# update the local client data
np.save(os.path.join(save_dir, "client_{}_data.npy".format(client_idx)), selected_data.iloc[:, 0:-1].values)
np.save(os.path.join(save_dir, "client_{}_labels.npy".format(client_idx)), selected_data.iloc[:, -1].values)
# remove the data from the full_df
full_df = full_df.drop(index=selected_data.index)
def read_non_iid_dataset(self, directory):
for client_idx in tqdm(range(self.client_num)):
data_shape = [self.train_data_local_num_dict[client_idx]] + self.data_shape[1:]
data_path = os.path.join(".", directory, "client_{}_data.npy".format(client_idx))
labels_path = os.path.join(".", directory, "client_{}_labels.npy".format(client_idx))
self.train_data_local_dict[client_idx] = [(torch.from_numpy(np.load(data_path)).view(tuple(data_shape)).float(), torch.from_numpy(np.load(labels_path)).long())]
def tx_time(self, client_indexes):
if not client_indexes:
self.time_counter += 1
return
# read the channel condition for corresponding cars.
channel_res = np.reshape(np.array(channel_data[channel_data['Time'] == self.time_counter * channel_data['Car'].isin(client_indexes)]["Distance to BS(4982,905)"]), (1, -1))
logger.debug("channel_res: {}".format(channel_res))
# linearly resolve the optimazation problem
tmp_t = 1
if self.args.radio_alloc == "optimal":
while np.sum(RES_WEIGHT * channel_res * RES_RATIO / tmp_t) > 1:
tmp_t += 1
elif self.args.radio_alloc == "uniform":
while np.max(channel_res) * RES_WEIGHT * RES_RATIO * len(channel_res) / tmp_t > 1:
tmp_t += 1
self.time_counter += math.ceil(TIME_COMPRESSION_RATIO*tmp_t)
logger.debug("time_counter after tx_time: {}".format(self.time_counter))
def aggregate(self, w_locals):
if not w_locals:
return copy.deepcopy(self.model_global.cpu().state_dict())
training_num = 0
for idx in range(len(w_locals)):
(sample_num, averaged_params) = w_locals[idx]
training_num += sample_num
(sample_num, averaged_params) = w_locals[0]
for k in averaged_params.keys():
for i in range(0, len(w_locals)):
local_sample_number, local_model_params = w_locals[i]
w = local_sample_number / training_num
if i == 0:
averaged_params[k] = local_model_params[k] * w
else:
averaged_params[k] += local_model_params[k] * w
return averaged_params
def local_test_on_all_clients(self, model_global, round_idx, eval_on_train=False, if_log=True):
logger.info("################local_test_on_all_clients : {}".format(round_idx))
train_metrics = {
'num_samples': [],
'num_correct': [],
'losses': []
}
test_metrics = {
'num_samples': [],
'num_correct': [],
'losses': []
}
client = self.client_list[0]
for client_idx in tqdm(range(min(int(client_num_in_total), self.client_num))):
"""
Note: for datasets like "fed_CIFAR100" and "fed_shakespheare",
the training client number is larger than the testing client number
"""
if self.test_data_local_dict[client_idx] is None or client_idx in self.invalid_datasets.keys():
continue
client.update_local_dataset(client_idx, self.train_data_local_dict[client_idx],
self.test_data_local_dict[client_idx],
self.train_data_local_num_dict[client_idx])
# test data
test_local_metrics = client.local_test(model_global, True)
test_metrics['num_samples'].append(copy.deepcopy(test_local_metrics['test_total']))
test_metrics['num_correct'].append(copy.deepcopy(test_local_metrics['test_correct']))
test_metrics['losses'].append(copy.deepcopy(test_local_metrics['test_loss']))
# train data
if eval_on_train:
train_local_metrics = client.local_test(model_global, False)
train_metrics['num_samples'].append(copy.deepcopy(train_local_metrics['test_total']))
train_metrics['num_correct'].append(copy.deepcopy(train_local_metrics['test_correct']))
train_metrics['losses'].append(copy.deepcopy(train_local_metrics['test_loss']))
# test on test dataset
test_acc = sum(test_metrics['num_correct']) / sum(test_metrics['num_samples'])
test_loss = sum(test_metrics['losses']) / sum(test_metrics['num_samples'])
stats = {
"Test/Acc": test_acc,
"Test/Loss": test_loss,
"round": round_idx,
"cum_time": self.time_counter+self.cycle_num*59361,
}
# test on training dataset
if eval_on_train:
train_acc = sum(train_metrics['num_correct']) / sum(train_metrics['num_samples'])
train_loss = sum(train_metrics['losses']) / sum(train_metrics['num_samples'])
stats.update({
'Train/Acc': train_acc,
'Train/Loss': train_loss,
"round": round_idx,
"cum_time": self.time_counter+self.cycle_num*59361,
})
if if_log:
logger.info(stats)
wandb.log(stats)
return test_acc, np.array(train_metrics['num_correct']) / np.array(train_metrics['num_samples'])
if if_log:
logger.info(stats)
wandb.log(stats)
return test_acc, None | [((4042, 4076), 'numpy.zeros', 'np.zeros', (['(1, client_num_in_total)'], {}), '((1, client_num_in_total))\n', (4050, 4076), True, 'import numpy as np\n'), ((23990, 24031), 'math.ceil', 'math.ceil', (['(TIME_COMPRESSION_RATIO * tmp_t)'], {}), '(TIME_COMPRESSION_RATIO * tmp_t)\n', (23999, 24031), False, 'import math\n'), ((3612, 3767), 'client.Client', 'Client', (['client_idx', 'train_data_local_dict[client_idx]', 'test_data_local_dict[client_idx]', 'train_data_local_num_dict[client_idx]', 'self.args', 'self.device'], {}), '(client_idx, train_data_local_dict[client_idx], test_data_local_dict[\n client_idx], train_data_local_num_dict[client_idx], self.args, self.device)\n', (3618, 3767), False, 'from client import Client\n'), ((13423, 13440), 'wandb.log', 'wandb.log', (['Inform'], {}), '(Inform)\n', (13432, 13440), False, 'import wandb\n'), ((20430, 20472), 'wandb.log', 'wandb.log', (["{'central_training/loss': loss}"], {}), "({'central_training/loss': loss})\n", (20439, 20472), False, 'import wandb\n'), ((27972, 27988), 'wandb.log', 'wandb.log', (['stats'], {}), '(stats)\n', (27981, 27988), False, 'import wandb\n'), ((1933, 1960), 'scheduler.Scheduler_PN_method_1', 'sch.Scheduler_PN_method_1', ([], {}), '()\n', (1958, 1960), True, 'import scheduler as sch\n'), ((4469, 4486), 'numpy.random.rand', 'np.random.rand', (['(1)'], {}), '(1)\n', (4483, 4486), True, 'import numpy as np\n'), ((4491, 4508), 'numpy.random.rand', 'np.random.rand', (['(1)'], {}), '(1)\n', (4505, 4508), True, 'import numpy as np\n'), ((4513, 4530), 'numpy.random.rand', 'np.random.rand', (['(1)'], {}), '(1)\n', (4527, 4530), True, 'import numpy as np\n'), ((6074, 6147), 'numpy.array', 'np.array', (["channel_data['Time'][channel_data['Time'] >= self.time_counter]"], {}), "(channel_data['Time'][channel_data['Time'] >= self.time_counter])\n", (6082, 6147), True, 'import numpy as np\n'), ((11553, 11583), 'math.ceil', 'math.ceil', (['LOCAL_TRAINING_TIME'], {}), '(LOCAL_TRAINING_TIME)\n', (11562, 11583), False, 'import math\n'), ((11755, 11777), 'numpy.var', 'np.var', (['local_loss_lst'], {}), '(local_loss_lst)\n', (11761, 11777), True, 'import numpy as np\n'), ((11797, 11816), 'numpy.var', 'np.var', (['loss_locals'], {}), '(loss_locals)\n', (11803, 11816), True, 'import numpy as np\n'), ((11818, 11839), 'numpy.var', 'np.var', (['local_acc_lst'], {}), '(local_acc_lst)\n', (11824, 11839), True, 'import numpy as np\n'), ((12323, 12347), 'numpy.asarray', 'np.asarray', (['cycle_locals'], {}), '(cycle_locals)\n', (12333, 12347), True, 'import numpy as np\n'), ((12960, 12976), 'csv.writer', 'csv.writer', (['file'], {}), '(file)\n', (12970, 12976), False, 'import csv\n'), ((13899, 13918), 'numpy.var', 'np.var', (['loss_locals'], {}), '(loss_locals)\n', (13905, 13918), True, 'import numpy as np\n'), ((13953, 13974), 'numpy.var', 'np.var', (['local_acc_lst'], {}), '(local_acc_lst)\n', (13959, 13974), True, 'import numpy as np\n'), ((14685, 14701), 'csv.writer', 'csv.writer', (['file'], {}), '(file)\n', (14695, 14701), False, 'import csv\n'), ((15071, 15123), 'numpy.array', 'np.array', (['[sample_num for sample_num, _ in w_locals]'], {}), '([sample_num for sample_num, _ in w_locals])\n', (15079, 15123), True, 'import numpy as np\n'), ((19490, 19517), 'torch.nn.CrossEntropyLoss', 'torch.nn.CrossEntropyLoss', ([], {}), '()\n', (19515, 19517), False, 'import torch\n'), ((23705, 23757), 'numpy.sum', 'np.sum', (['(RES_WEIGHT * channel_res * RES_RATIO / tmp_t)'], {}), '(RES_WEIGHT * channel_res * RES_RATIO / tmp_t)\n', (23711, 23757), True, 'import numpy as np\n'), ((26168, 26215), 'copy.deepcopy', 'copy.deepcopy', (["test_local_metrics['test_total']"], {}), "(test_local_metrics['test_total'])\n", (26181, 26215), False, 'import copy\n'), ((26264, 26313), 'copy.deepcopy', 'copy.deepcopy', (["test_local_metrics['test_correct']"], {}), "(test_local_metrics['test_correct'])\n", (26277, 26313), False, 'import copy\n'), ((26357, 26403), 'copy.deepcopy', 'copy.deepcopy', (["test_local_metrics['test_loss']"], {}), "(test_local_metrics['test_loss'])\n", (26370, 26403), False, 'import copy\n'), ((27783, 27799), 'wandb.log', 'wandb.log', (['stats'], {}), '(stats)\n', (27792, 27799), False, 'import wandb\n'), ((2268, 2295), 'scheduler.Scheduler_PN_method_2', 'sch.Scheduler_PN_method_2', ([], {}), '()\n', (2293, 2295), True, 'import scheduler as sch\n'), ((4252, 4282), 'numpy.arange', 'np.arange', (['client_num_in_total'], {}), '(client_num_in_total)\n', (4261, 4282), True, 'import numpy as np\n'), ((5169, 5192), 'torch.ones', 'torch.ones', (['weight_size'], {}), '(weight_size)\n', (5179, 5192), False, 'import torch\n'), ((7331, 7347), 'csv.writer', 'csv.writer', (['file'], {}), '(file)\n', (7341, 7347), False, 'import csv\n'), ((10015, 10048), 'numpy.random.randint', 'np.random.randint', (['self.class_num'], {}), '(self.class_num)\n', (10032, 10048), True, 'import numpy as np\n'), ((14108, 14148), 'torch.norm', 'torch.norm', (['(local_w_diffs * A_mat)'], {'dim': '(1)'}), '(local_w_diffs * A_mat, dim=1)\n', (14118, 14148), False, 'import torch\n'), ((15683, 15702), 'numpy.sum', 'np.sum', (['sample_nums'], {}), '(sample_nums)\n', (15689, 15702), True, 'import numpy as np\n'), ((16009, 16028), 'numpy.sum', 'np.sum', (['sample_nums'], {}), '(sample_nums)\n', (16015, 16028), True, 'import numpy as np\n'), ((26589, 26637), 'copy.deepcopy', 'copy.deepcopy', (["train_local_metrics['test_total']"], {}), "(train_local_metrics['test_total'])\n", (26602, 26637), False, 'import copy\n'), ((26691, 26741), 'copy.deepcopy', 'copy.deepcopy', (["train_local_metrics['test_correct']"], {}), "(train_local_metrics['test_correct'])\n", (26704, 26741), False, 'import copy\n'), ((26790, 26837), 'copy.deepcopy', 'copy.deepcopy', (["train_local_metrics['test_loss']"], {}), "(train_local_metrics['test_loss'])\n", (26803, 26837), False, 'import copy\n'), ((27829, 27867), 'numpy.array', 'np.array', (["train_metrics['num_correct']"], {}), "(train_metrics['num_correct'])\n", (27837, 27867), True, 'import numpy as np\n'), ((27870, 27908), 'numpy.array', 'np.array', (["train_metrics['num_samples']"], {}), "(train_metrics['num_samples'])\n", (27878, 27908), True, 'import numpy as np\n'), ((2603, 2630), 'scheduler.Scheduler_PN_method_3', 'sch.Scheduler_PN_method_3', ([], {}), '()\n', (2628, 2630), True, 'import scheduler as sch\n'), ((10377, 10393), 'copy.deepcopy', 'copy.deepcopy', (['w'], {}), '(w)\n', (10390, 10393), False, 'import copy\n'), ((14321, 14343), 'numpy.isnan', 'np.isnan', (['FPF2_idx_lst'], {}), '(FPF2_idx_lst)\n', (14329, 14343), True, 'import numpy as np\n'), ((14345, 14367), 'numpy.isinf', 'np.isinf', (['FPF2_idx_lst'], {}), '(FPF2_idx_lst)\n', (14353, 14367), True, 'import numpy as np\n'), ((14476, 14498), 'numpy.isnan', 'np.isnan', (['FPF2_idx_lst'], {}), '(FPF2_idx_lst)\n', (14484, 14498), True, 'import numpy as np\n'), ((14500, 14522), 'numpy.isinf', 'np.isinf', (['FPF2_idx_lst'], {}), '(FPF2_idx_lst)\n', (14508, 14522), True, 'import numpy as np\n'), ((15393, 15433), 'numpy.sum', 'np.sum', (['(sample_nums * local_w_diff_norms)'], {}), '(sample_nums * local_w_diff_norms)\n', (15399, 15433), True, 'import numpy as np\n'), ((15436, 15455), 'numpy.sum', 'np.sum', (['sample_nums'], {}), '(sample_nums)\n', (15442, 15455), True, 'import numpy as np\n'), ((15495, 15514), 'numpy.isnan', 'np.isnan', (['delta_tmp'], {}), '(delta_tmp)\n', (15503, 15514), True, 'import numpy as np\n'), ((15523, 15542), 'numpy.isinf', 'np.isinf', (['delta_tmp'], {}), '(delta_tmp)\n', (15531, 15542), True, 'import numpy as np\n'), ((12407, 12458), 'numpy.sum', 'np.sum', (['(cycle_locals[:, (0)] * cycle_locals[:, (1)])'], {}), '(cycle_locals[:, (0)] * cycle_locals[:, (1)])\n', (12413, 12458), True, 'import numpy as np\n'), ((12457, 12485), 'numpy.sum', 'np.sum', (['cycle_locals[:, (0)]'], {}), '(cycle_locals[:, (0)])\n', (12463, 12485), True, 'import numpy as np\n'), ((15659, 15679), 'numpy.array', 'np.array', (['rho_locals'], {}), '(rho_locals)\n', (15667, 15679), True, 'import numpy as np\n'), ((15984, 16005), 'numpy.array', 'np.array', (['beta_locals'], {}), '(beta_locals)\n', (15992, 16005), True, 'import numpy as np\n'), ((18901, 18918), 'numpy.random.rand', 'np.random.rand', (['(1)'], {}), '(1)\n', (18915, 18918), True, 'import numpy as np\n'), ((18923, 18940), 'numpy.random.rand', 'np.random.rand', (['(1)'], {}), '(1)\n', (18937, 18940), True, 'import numpy as np\n'), ((18945, 18962), 'numpy.random.rand', 'np.random.rand', (['(1)'], {}), '(1)\n', (18959, 18962), True, 'import numpy as np\n'), ((15777, 15794), 'numpy.isnan', 'np.isnan', (['rho_tmp'], {}), '(rho_tmp)\n', (15785, 15794), True, 'import numpy as np\n'), ((15803, 15820), 'numpy.isinf', 'np.isinf', (['rho_tmp'], {}), '(rho_tmp)\n', (15811, 15820), True, 'import numpy as np\n'), ((16106, 16124), 'numpy.isnan', 'np.isnan', (['beta_tmp'], {}), '(beta_tmp)\n', (16114, 16124), True, 'import numpy as np\n'), ((16133, 16151), 'numpy.isinf', 'np.isinf', (['beta_tmp'], {}), '(beta_tmp)\n', (16141, 16151), True, 'import numpy as np\n'), ((23099, 23119), 'numpy.load', 'np.load', (['labels_path'], {}), '(labels_path)\n', (23106, 23119), True, 'import numpy as np\n'), ((9636, 9668), 'copy.deepcopy', 'copy.deepcopy', (['self.model_global'], {}), '(self.model_global)\n', (9649, 9668), False, 'import copy\n'), ((23857, 23876), 'numpy.max', 'np.max', (['channel_res'], {}), '(channel_res)\n', (23863, 23876), True, 'import numpy as np\n'), ((23029, 23047), 'numpy.load', 'np.load', (['data_path'], {}), '(data_path)\n', (23036, 23047), True, 'import numpy as np\n')] |
jfparentledartech/DEFT | src/test.py | 6e7e98664cd635509bdff69533a24a7c4e4e3ea3 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import time
import cv2
import matplotlib.pyplot as plt
import numpy as np
from progress.bar import Bar
import torch
import pickle
import motmetrics as mm
from lib.opts import opts
from lib.logger import Logger
from lib.utils.utils import AverageMeter
from lib.dataset.dataset_factory import dataset_factory
from lib.utils.pixset_metrics import compute_metrics
pixset_categories = [
'car',
'truck',
'bus',
'pedestrian',
'motorcyclist',
'cyclist',
'van'
]
opt = opts().parse()
filename = '../options/test_opt_pixset.txt'
with open(filename, 'wb') as f:
pickle.dump(opt, f)
# # print('dataset -> ', opt.dataset)
# print('lstm -> ', opt.lstm)
# print(f'saved {filename}')
# with open(filename, 'rb') as f:
# opt = pickle.load(f)
# print('use pixell ->', opt.use_pixell)
from lib.detector import Detector
from lib.utils.image import plot_tracking, plot_tracking_ddd
import json
min_box_area = 20
_vehicles = ["car", "truck", "bus", "van"]
_cycles = ["motorcyclist", "cyclist"]
_pedestrians = ["pedestrian"]
attribute_to_id = {
"": 0,
"cycle.with_rider": 1,
"cycle.without_rider": 2,
"pedestrian.moving": 3,
"pedestrian.standing": 4,
"pedestrian.sitting_lying_down": 5,
"vehicle.moving": 6,
"vehicle.parked": 7,
"vehicle.stopped": 8,
}
id_to_attribute = {v: k for k, v in attribute_to_id.items()}
nuscenes_att = np.zeros(8, np.float32)
class PrefetchDataset(torch.utils.data.Dataset):
def __init__(self, opt, dataset, pre_process_func):
self.images = dataset.images
self.load_image_func = dataset.coco.loadImgs
self.get_ann_ids = dataset.coco.getAnnIds
self.load_annotations = dataset.coco.loadAnns
self.img_dir = dataset.img_dir
self.pre_process_func = pre_process_func
self.get_default_calib = dataset.get_default_calib
self.opt = opt
def __getitem__(self, index):
self.images.sort() # TODO remove
img_id = self.images[index]
img_info = self.load_image_func(ids=[img_id])[0]
img_path = os.path.join(self.img_dir, img_info["file_name"])
image = cv2.imread(img_path)
annotation_ids = self.get_ann_ids(imgIds=[img_id])
annotations = self.load_annotations(ids=annotation_ids)
images, meta = {}, {}
for scale in opt.test_scales:
input_meta = {}
calib = (
img_info["calib"]
if "calib" in img_info
else self.get_default_calib(image.shape[1], image.shape[0])
)
input_meta["calib"] = calib
images[scale], meta[scale] = self.pre_process_func(image, scale, input_meta)
ret = {
"images": images,
"image": image,
"meta": meta,
"frame_id": img_info["frame_id"],
"annotations": annotations
}
if "frame_id" in img_info and img_info["frame_id"] == 1:
ret["is_first_frame"] = 1
ret["video_id"] = img_info["video_id"]
return img_id, ret, img_info
def __len__(self):
return len(self.images)
def prefetch_test(opt):
start_time = time.time()
show_image = True
if not opt.not_set_cuda_env:
os.environ["CUDA_VISIBLE_DEVICES"] = opt.gpus_str
Dataset = dataset_factory[opt.test_dataset]
opt = opts().update_dataset_info_and_set_heads(opt, Dataset)
# split = "val" if not opt.trainval else "test"
split = "test"
# split = "val"
dataset = Dataset(opt, split)
detector = Detector(opt)
if opt.load_results != "":
load_results = json.load(open(opt.load_results, "r"))
for img_id in load_results:
for k in range(len(load_results[img_id])):
if load_results[img_id][k]["class"] - 1 in opt.ignore_loaded_cats:
load_results[img_id][k]["score"] = -1
else:
load_results = {}
data_loader = torch.utils.data.DataLoader(
PrefetchDataset(opt, dataset, detector.pre_process),
batch_size=1,
shuffle=False,
num_workers=0,
pin_memory=True,
)
results = {}
num_iters = len(data_loader) if opt.num_iters < 0 else opt.num_iters
bar = Bar("{}".format(opt.exp_id), max=num_iters)
time_stats = ["tot", "load", "pre", "net", "dec", "post", "merge", "track"]
avg_time_stats = {t: AverageMeter() for t in time_stats}
if opt.use_loaded_results:
for img_id in data_loader.dataset.images:
results[img_id] = load_results["{}".format(img_id)]
num_iters = 0
final_results = []
out_path = ""
if opt.dataset in ["nuscenes", "pixset"]:
ret = {
"meta": {
"use_camera": True,
"use_lidar": False,
"use_radar": False,
"use_map": False,
"use_external": False,
},
"results": {},
}
accumulators = [mm.MOTAccumulator(auto_id=True) for _ in pixset_categories]
for ind, (img_id, pre_processed_images, img_info) in enumerate(data_loader):
bar.next()
if ind >= num_iters:
break
if opt.dataset == "nuscenes":
sample_token = img_info["sample_token"][0]
sensor_id = img_info["sensor_id"].numpy().tolist()[0]
if opt.dataset == "pixset":
sample_token = img_info["sample_token"][0]
sensor_id = img_info["sensor_id"].numpy().tolist()[0]
if opt.tracking and ("is_first_frame" in pre_processed_images):
if "{}".format(int(img_id.numpy().astype(np.int32)[0])) in load_results:
pre_processed_images["meta"]["pre_dets"] = load_results[
"{}".format(int(img_id.numpy().astype(np.int32)[0]))
]
else:
print(
"No pre_dets for",
int(img_id.numpy().astype(np.int32)[0]),
". Use empty initialization.",
)
pre_processed_images["meta"]["pre_dets"] = []
if final_results and opt.dataset not in ["nuscenes", "pixset"]:
write_results(out_path, final_results, opt.dataset)
final_results = []
img0 = pre_processed_images["image"][0].numpy()
h, w, _ = img0.shape
detector.img_height = h
detector.img_width = w
if opt.dataset in ["nuscenes", "pixset"]:
save_video_name = os.path.join(
opt.dataset + "_videos/",
"MOT"
+ str(int(pre_processed_images["video_id"]))
+ "_"
+ str(int(img_info["sensor_id"]))
+ str(int(img_info["video_id"]))
+ ".avi",
)
elif opt.dataset == "kitti_tracking":
save_video_name = os.path.join(
opt.dataset + "_videos/",
"KITTI_" + str(int(pre_processed_images["video_id"])) + ".avi",
)
else:
save_video_name = os.path.join(
opt.dataset + "_videos/",
"MOT" + str(int(pre_processed_images["video_id"])) + ".avi",
)
results_dir = opt.dataset + "_results"
if not os.path.exists(opt.dataset + "_videos/"):
os.mkdir(opt.dataset + "_videos/")
if not os.path.exists(results_dir):
os.mkdir(results_dir)
for video in dataset.coco.dataset["videos"]:
video_id = video["id"]
file_name = video["file_name"]
if pre_processed_images[
"video_id"
] == video_id and opt.dataset not in ["nuscenes", "pixset"]:
out_path = os.path.join(results_dir, "{}.txt".format(file_name))
break
detector.reset_tracking(opt)
vw = cv2.VideoWriter(
save_video_name, cv2.VideoWriter_fourcc("M", "J", "P", "G"), 10, (w, h)
)
print("Start tracking video", int(pre_processed_images["video_id"]))
if opt.public_det:
if "{}".format(int(img_id.numpy().astype(np.int32)[0])) in load_results:
pre_processed_images["meta"]["cur_dets"] = load_results[
"{}".format(int(img_id.numpy().astype(np.int32)[0]))
]
else:
print("No cur_dets for", int(img_id.numpy().astype(np.int32)[0]))
pre_processed_images["meta"]["cur_dets"] = []
online_targets = detector.run(pre_processed_images, image_info=img_info)
online_tlwhs = []
online_ids = []
online_ddd_boxes = []
sample_results = []
classes = []
image = pre_processed_images["image"][0].numpy()
for acc_i in range(len(accumulators)):
gt_list, hyp_list, distances = compute_metrics(pre_processed_images['annotations'],
online_targets, eval_type='distance',
im=image, category=pixset_categories[acc_i])
accumulators[acc_i].update(gt_list, hyp_list, distances)
idx = 0
print(ind)
print(accumulators[idx].mot_events.loc[ind])
mh = mm.metrics.create()
summary = mh.compute(accumulators[idx], metrics=['num_frames', 'mota', 'precision', 'recall'], name=f'acc {pixset_categories[idx]}')
print(summary)
print('-----------------------------------------')
for t in online_targets:
tlwh = t.tlwh
tid = t.track_id
if tlwh[2] * tlwh[3] > min_box_area:
online_tlwhs.append(tlwh)
online_ids.append(tid)
classes.append(t.classe)
if opt.dataset in ["nuscenes", "pixset"]:
online_ddd_boxes.append(t.org_ddd_box)
class_name = t.classe
if class_name in _cycles:
att = id_to_attribute[np.argmax(nuscenes_att[0:2]) + 1]
elif class_name in _pedestrians:
att = id_to_attribute[np.argmax(nuscenes_att[2:5]) + 3]
elif class_name in _vehicles:
att = id_to_attribute[np.argmax(nuscenes_att[5:8]) + 6]
ddd_box = t.ddd_bbox.copy()
ddd_box_submission = t.ddd_submission.tolist()
translation, size, rotation = (
ddd_box_submission[:3],
ddd_box_submission[3:6],
ddd_box_submission[6:],
)
result = {
"sample_token": sample_token,
"translation": translation,
"size": size,
"rotation": rotation,
"velocity": [0, 0],
"detection_name": t.classe,
# "attribute_name": att,
"attribute_name": None,
"detection_score": t.score,
"tracking_name": t.classe,
"tracking_score": t.score,
"tracking_id": tid,
"sensor_id": sensor_id,
"det_id": -1,
}
sample_results.append(result.copy())
if opt.dataset in ["nuscenes", "pixset"]:
if sample_token in ret["results"]:
ret["results"][sample_token] = (
ret["results"][sample_token] + sample_results
)
else:
ret["results"][sample_token] = sample_results
final_results.append(
(pre_processed_images["frame_id"].cpu().item(), online_tlwhs, online_ids)
)
if show_image:
img0 = pre_processed_images["image"][0].numpy()
if opt.dataset in ["nuscenes", "pixset"]:
online_im = plot_tracking_ddd(
img0,
online_tlwhs,
online_ddd_boxes,
online_ids,
frame_id=pre_processed_images["frame_id"],
calib=img_info["calib"],
trans_matrix=img_info["trans_matrix"],
camera_matrix=img_info["camera_matrix"],
distortion_coeffs=img_info["distortion_coefficients"],
classes=classes,
)
else:
online_im = plot_tracking(
img0,
online_tlwhs,
online_ids,
frame_id=pre_processed_images["frame_id"],
)
vw.write(online_im)
if opt.dataset not in ["nuscenes", "pixset"] and final_results:
write_results(out_path, final_results, opt.dataset)
final_results = []
if opt.dataset in ["nuscenes", "pixset"]:
for sample_token in ret["results"].keys():
confs = sorted(
[
(-d["detection_score"], ind)
for ind, d in enumerate(ret["results"][sample_token])
]
)
ret["results"][sample_token] = [
ret["results"][sample_token][ind]
for _, ind in confs[: min(500, len(confs))]
]
mh = mm.metrics.create()
metrics = ['num_frames', 'mota', 'motp', 'precision', 'recall']
summary = mh.compute_many(
accumulators, names=pixset_categories, metrics=metrics, generate_overall=True
)
print(summary)
save_summary(summary, 'overall')
print('total test time', time.time() - start_time)
def save_summary(summary, acc_name):
with open(f"../pixset_results/test/{acc_name}.txt", "w") as text_file:
text_file.write(summary.to_string())
def _to_list(results):
for img_id in results:
for t in range(len(results[img_id])):
for k in results[img_id][t]:
if isinstance(results[img_id][t][k], (np.ndarray, np.float32)):
results[img_id][t][k] = results[img_id][t][k].tolist()
return results
def write_results(filename, results, data_type):
if data_type == "mot":
save_format = "{frame},{id},{x1},{y1},{w},{h},1,-1,-1,-1\n"
elif data_type == "kitti_tracking":
save_format = "{frame} {id} Car 0 0 -10 {x1} {y1} {x2} {y2} -10 -10 -10 -1000 -1000 -1000 -10\n"
else:
raise ValueError(data_type)
with open(filename, "w") as f:
for frame_id, tlwhs, track_ids in results:
if data_type == "kitti_tracking":
frame_id -= 1
for tlwh, track_id in zip(tlwhs, track_ids):
if track_id < 0:
continue
x1, y1, w, h = tlwh
x2, y2 = x1 + w, y1 + h
line = save_format.format(
frame=frame_id, id=track_id, x1=x1, y1=y1, x2=x2, y2=y2, w=w, h=h
)
f.write(line)
if __name__ == "__main__":
# opt = opts().parse()
prefetch_test(opt)
| [((1524, 1547), 'numpy.zeros', 'np.zeros', (['(8)', 'np.float32'], {}), '(8, np.float32)\n', (1532, 1547), True, 'import numpy as np\n'), ((709, 728), 'pickle.dump', 'pickle.dump', (['opt', 'f'], {}), '(opt, f)\n', (720, 728), False, 'import pickle\n'), ((3314, 3325), 'time.time', 'time.time', ([], {}), '()\n', (3323, 3325), False, 'import time\n'), ((3694, 3707), 'lib.detector.Detector', 'Detector', (['opt'], {}), '(opt)\n', (3702, 3707), False, 'from lib.detector import Detector\n'), ((613, 619), 'lib.opts.opts', 'opts', ([], {}), '()\n', (617, 619), False, 'from lib.opts import opts\n'), ((2207, 2256), 'os.path.join', 'os.path.join', (['self.img_dir', "img_info['file_name']"], {}), "(self.img_dir, img_info['file_name'])\n", (2219, 2256), False, 'import os\n'), ((2273, 2293), 'cv2.imread', 'cv2.imread', (['img_path'], {}), '(img_path)\n', (2283, 2293), False, 'import cv2\n'), ((4528, 4542), 'lib.utils.utils.AverageMeter', 'AverageMeter', ([], {}), '()\n', (4540, 4542), False, 'from lib.utils.utils import AverageMeter\n'), ((5111, 5142), 'motmetrics.MOTAccumulator', 'mm.MOTAccumulator', ([], {'auto_id': '(True)'}), '(auto_id=True)\n', (5128, 5142), True, 'import motmetrics as mm\n'), ((9585, 9604), 'motmetrics.metrics.create', 'mm.metrics.create', ([], {}), '()\n', (9602, 9604), True, 'import motmetrics as mm\n'), ((13767, 13786), 'motmetrics.metrics.create', 'mm.metrics.create', ([], {}), '()\n', (13784, 13786), True, 'import motmetrics as mm\n'), ((3498, 3504), 'lib.opts.opts', 'opts', ([], {}), '()\n', (3502, 3504), False, 'from lib.opts import opts\n'), ((9160, 9299), 'lib.utils.pixset_metrics.compute_metrics', 'compute_metrics', (["pre_processed_images['annotations']", 'online_targets'], {'eval_type': '"""distance"""', 'im': 'image', 'category': 'pixset_categories[acc_i]'}), "(pre_processed_images['annotations'], online_targets,\n eval_type='distance', im=image, category=pixset_categories[acc_i])\n", (9175, 9299), False, 'from lib.utils.pixset_metrics import compute_metrics\n'), ((14088, 14099), 'time.time', 'time.time', ([], {}), '()\n', (14097, 14099), False, 'import time\n'), ((7521, 7561), 'os.path.exists', 'os.path.exists', (["(opt.dataset + '_videos/')"], {}), "(opt.dataset + '_videos/')\n", (7535, 7561), False, 'import os\n'), ((7579, 7613), 'os.mkdir', 'os.mkdir', (["(opt.dataset + '_videos/')"], {}), "(opt.dataset + '_videos/')\n", (7587, 7613), False, 'import os\n'), ((7633, 7660), 'os.path.exists', 'os.path.exists', (['results_dir'], {}), '(results_dir)\n', (7647, 7660), False, 'import os\n'), ((7678, 7699), 'os.mkdir', 'os.mkdir', (['results_dir'], {}), '(results_dir)\n', (7686, 7699), False, 'import os\n'), ((8212, 8254), 'cv2.VideoWriter_fourcc', 'cv2.VideoWriter_fourcc', (['"""M"""', '"""J"""', '"""P"""', '"""G"""'], {}), "('M', 'J', 'P', 'G')\n", (8234, 8254), False, 'import cv2\n'), ((12356, 12660), 'lib.utils.image.plot_tracking_ddd', 'plot_tracking_ddd', (['img0', 'online_tlwhs', 'online_ddd_boxes', 'online_ids'], {'frame_id': "pre_processed_images['frame_id']", 'calib': "img_info['calib']", 'trans_matrix': "img_info['trans_matrix']", 'camera_matrix': "img_info['camera_matrix']", 'distortion_coeffs': "img_info['distortion_coefficients']", 'classes': 'classes'}), "(img0, online_tlwhs, online_ddd_boxes, online_ids,\n frame_id=pre_processed_images['frame_id'], calib=img_info['calib'],\n trans_matrix=img_info['trans_matrix'], camera_matrix=img_info[\n 'camera_matrix'], distortion_coeffs=img_info['distortion_coefficients'],\n classes=classes)\n", (12373, 12660), False, 'from lib.utils.image import plot_tracking, plot_tracking_ddd\n'), ((12909, 13002), 'lib.utils.image.plot_tracking', 'plot_tracking', (['img0', 'online_tlwhs', 'online_ids'], {'frame_id': "pre_processed_images['frame_id']"}), "(img0, online_tlwhs, online_ids, frame_id=pre_processed_images\n ['frame_id'])\n", (12922, 13002), False, 'from lib.utils.image import plot_tracking, plot_tracking_ddd\n'), ((10340, 10368), 'numpy.argmax', 'np.argmax', (['nuscenes_att[0:2]'], {}), '(nuscenes_att[0:2])\n', (10349, 10368), True, 'import numpy as np\n'), ((10473, 10501), 'numpy.argmax', 'np.argmax', (['nuscenes_att[2:5]'], {}), '(nuscenes_att[2:5])\n', (10482, 10501), True, 'import numpy as np\n'), ((10603, 10631), 'numpy.argmax', 'np.argmax', (['nuscenes_att[5:8]'], {}), '(nuscenes_att[5:8])\n', (10612, 10631), True, 'import numpy as np\n')] |
AkillesAILimited/CompilerGym | compiler_gym/envs/gcc/datasets/csmith.py | 34c0933ba26b385ebd2cd67f5d8edbb046c6bf02 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
import shutil
import subprocess
import tempfile
from pathlib import Path
from threading import Lock
from typing import Iterable, Optional, Union
import numpy as np
from fasteners import InterProcessLock
from compiler_gym.datasets import Benchmark, BenchmarkSource, Dataset
from compiler_gym.datasets.benchmark import BenchmarkWithSource
from compiler_gym.envs.gcc.gcc import Gcc
from compiler_gym.util.decorators import memoized_property
from compiler_gym.util.runfiles_path import runfiles_path
from compiler_gym.util.shell_format import plural
from compiler_gym.util.truncate import truncate
# The maximum value for the --seed argument to csmith.
UINT_MAX = (2 ** 32) - 1
_CSMITH_BIN = runfiles_path("compiler_gym/third_party/csmith/csmith/bin/csmith")
_CSMITH_INCLUDES = runfiles_path(
"compiler_gym/third_party/csmith/csmith/include/csmith-2.3.0"
)
_CSMITH_INSTALL_LOCK = Lock()
# TODO(github.com/facebookresearch/CompilerGym/issues/325): This can be merged
# with the LLVM implementation.
class CsmithBenchmark(BenchmarkWithSource):
"""A CSmith benchmark."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._src = None
@classmethod
def create(cls, uri: str, bitcode: bytes, src: bytes) -> Benchmark:
"""Create a benchmark from paths."""
benchmark = cls.from_file_contents(uri, bitcode)
benchmark._src = src # pylint: disable=protected-access
return benchmark
@memoized_property
def sources(self) -> Iterable[BenchmarkSource]:
return [
BenchmarkSource(filename="source.c", contents=self._src),
]
@property
def source(self) -> str:
"""Return the single source file contents as a string."""
return self._src.decode("utf-8")
class CsmithDataset(Dataset):
"""A dataset which uses Csmith to generate programs.
Csmith is a tool that can generate random conformant C99 programs. It is
described in the publication:
Yang, Xuejun, Yang Chen, Eric Eide, and John Regehr. "Finding and
understanding bugs in C compilers." In Proceedings of the 32nd ACM
SIGPLAN conference on Programming Language Design and Implementation
(PLDI), pp. 283-294. 2011.
For up-to-date information about Csmith, see:
https://embed.cs.utah.edu/csmith/
Note that Csmith is a tool that is used to find errors in compilers. As
such, there is a higher likelihood that the benchmark cannot be used for an
environment and that :meth:`env.reset()
<compiler_gym.envs.CompilerEnv.reset>` will raise :class:`BenchmarkInitError
<compiler_gym.datasets.BenchmarkInitError>`.
"""
def __init__(
self,
gcc_bin: Union[Path, str],
site_data_base: Path,
sort_order: int = 0,
csmith_bin: Optional[Path] = None,
csmith_includes: Optional[Path] = None,
):
"""Constructor.
:param site_data_base: The base path of a directory that will be used to
store installed files.
:param sort_order: An optional numeric value that should be used to
order this dataset relative to others. Lowest value sorts first.
:param csmith_bin: The path of the Csmith binary to use. If not
provided, the version of Csmith shipped with CompilerGym is used.
:param csmith_includes: The path of the Csmith includes directory. If
not provided, the includes of the Csmith shipped with CompilerGym is
used.
"""
super().__init__(
name="generator://csmith-v0",
description="Random conformant C99 programs",
references={
"Paper": "http://web.cse.ohio-state.edu/~rountev.1/5343/pdf/pldi11.pdf",
"Homepage": "https://embed.cs.utah.edu/csmith/",
},
license="BSD",
site_data_base=site_data_base,
sort_order=sort_order,
benchmark_class=CsmithBenchmark,
)
self.gcc_bin = gcc_bin
self.csmith_bin_path = csmith_bin or _CSMITH_BIN
self.csmith_includes_path = csmith_includes or _CSMITH_INCLUDES
self._install_lockfile = self.site_data_path / ".install.LOCK"
@property
def size(self) -> int:
# Actually 2^32 - 1, but practically infinite for all intents and
# purposes.
return 0
@memoized_property
def gcc(self):
# Defer instantiation of Gcc from the constructor as it will fail if the
# given Gcc is not available. Memoize the result as initialization is
# expensive.
return Gcc(bin=self.gcc_bin)
def benchmark_uris(self) -> Iterable[str]:
return (f"{self.name}/{i}" for i in range(UINT_MAX))
def benchmark(self, uri: str) -> CsmithBenchmark:
return self.benchmark_from_seed(int(uri.split("/")[-1]))
def _random_benchmark(self, random_state: np.random.Generator) -> Benchmark:
seed = random_state.integers(UINT_MAX)
return self.benchmark_from_seed(seed)
@property
def installed(self) -> bool:
return super().installed and (self.site_data_path / "includes").is_dir()
def install(self) -> None:
super().install()
if self.installed:
return
with _CSMITH_INSTALL_LOCK, InterProcessLock(self._install_lockfile):
if (self.site_data_path / "includes").is_dir():
return
# Copy the Csmith headers into the dataset's site directory path because
# in bazel builds this includes directory is a symlink, and we need
# actual files that we can use in a docker volume.
shutil.copytree(
self.csmith_includes_path,
self.site_data_path / "includes.tmp",
)
# Atomic directory rename to prevent race on install().
(self.site_data_path / "includes.tmp").rename(
self.site_data_path / "includes"
)
def benchmark_from_seed(
self, seed: int, max_retries: int = 3, retry_count: int = 0
) -> CsmithBenchmark:
"""Get a benchmark from a uint32 seed.
:param seed: A number in the range 0 <= n < 2^32.
:return: A benchmark instance.
:raises OSError: If Csmith fails.
:raises BenchmarkInitError: If the C program generated by Csmith cannot
be lowered to LLVM-IR.
"""
if retry_count >= max_retries:
raise OSError(
f"Csmith failed after {retry_count} {plural(retry_count, 'attempt', 'attempts')} "
f"with seed {seed}"
)
self.install()
# Run csmith with the given seed and pipe the output to clang to
# assemble a bitcode.
self.logger.debug("Exec csmith --seed %d", seed)
csmith = subprocess.Popen(
[str(self.csmith_bin_path), "--seed", str(seed)],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
# Generate the C source.
src, stderr = csmith.communicate(timeout=300)
if csmith.returncode:
try:
stderr = "\n".join(
truncate(stderr.decode("utf-8"), max_line_len=200, max_lines=20)
)
logging.warning("Csmith failed with seed %d: %s", seed, stderr)
except UnicodeDecodeError:
# Failed to interpret the stderr output, generate a generic
# error message.
logging.warning("Csmith failed with seed %d", seed)
return self.benchmark_from_seed(
seed, max_retries=max_retries, retry_count=retry_count + 1
)
# Pre-process the source.
with tempfile.TemporaryDirectory() as tmpdir:
src_file = f"{tmpdir}/src.c"
with open(src_file, "wb") as f:
f.write(src)
preprocessed_src = self.gcc(
"-E",
"-I",
str(self.site_data_path / "includes"),
"-o",
"-",
src_file,
cwd=tmpdir,
timeout=60,
volumes={
str(self.site_data_path / "includes"): {
"bind": str(self.site_data_path / "includes"),
"mode": "ro",
}
},
)
return self.benchmark_class.create(
f"{self.name}/{seed}", preprocessed_src.encode("utf-8"), src
)
| [((884, 950), 'compiler_gym.util.runfiles_path.runfiles_path', 'runfiles_path', (['"""compiler_gym/third_party/csmith/csmith/bin/csmith"""'], {}), "('compiler_gym/third_party/csmith/csmith/bin/csmith')\n", (897, 950), False, 'from compiler_gym.util.runfiles_path import runfiles_path\n'), ((970, 1046), 'compiler_gym.util.runfiles_path.runfiles_path', 'runfiles_path', (['"""compiler_gym/third_party/csmith/csmith/include/csmith-2.3.0"""'], {}), "('compiler_gym/third_party/csmith/csmith/include/csmith-2.3.0')\n", (983, 1046), False, 'from compiler_gym.util.runfiles_path import runfiles_path\n'), ((1076, 1082), 'threading.Lock', 'Lock', ([], {}), '()\n', (1080, 1082), False, 'from threading import Lock\n'), ((4844, 4865), 'compiler_gym.envs.gcc.gcc.Gcc', 'Gcc', ([], {'bin': 'self.gcc_bin'}), '(bin=self.gcc_bin)\n', (4847, 4865), False, 'from compiler_gym.envs.gcc.gcc import Gcc\n'), ((1766, 1822), 'compiler_gym.datasets.BenchmarkSource', 'BenchmarkSource', ([], {'filename': '"""source.c"""', 'contents': 'self._src'}), "(filename='source.c', contents=self._src)\n", (1781, 1822), False, 'from compiler_gym.datasets import Benchmark, BenchmarkSource, Dataset\n'), ((5540, 5580), 'fasteners.InterProcessLock', 'InterProcessLock', (['self._install_lockfile'], {}), '(self._install_lockfile)\n', (5556, 5580), False, 'from fasteners import InterProcessLock\n'), ((5906, 5991), 'shutil.copytree', 'shutil.copytree', (['self.csmith_includes_path', "(self.site_data_path / 'includes.tmp')"], {}), "(self.csmith_includes_path, self.site_data_path / 'includes.tmp'\n )\n", (5921, 5991), False, 'import shutil\n'), ((7996, 8025), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (8023, 8025), False, 'import tempfile\n'), ((7534, 7597), 'logging.warning', 'logging.warning', (['"""Csmith failed with seed %d: %s"""', 'seed', 'stderr'], {}), "('Csmith failed with seed %d: %s', seed, stderr)\n", (7549, 7597), False, 'import logging\n'), ((7762, 7813), 'logging.warning', 'logging.warning', (['"""Csmith failed with seed %d"""', 'seed'], {}), "('Csmith failed with seed %d', seed)\n", (7777, 7813), False, 'import logging\n'), ((6784, 6826), 'compiler_gym.util.shell_format.plural', 'plural', (['retry_count', '"""attempt"""', '"""attempts"""'], {}), "(retry_count, 'attempt', 'attempts')\n", (6790, 6826), False, 'from compiler_gym.util.shell_format import plural\n')] |
DanielWinklehner/dans_pymodules | dans_pymodules/power_of_two.py | 04dfdaeccc171712cad6eb24202608e2eda21eca | __author__ = "Daniel Winklehner"
__doc__ = "Find out if a number is a power of two"
def power_of_two(number):
"""
Function that checks if the input value (data) is a power of 2
(i.e. 2, 4, 8, 16, 32, ...)
"""
res = 0
while res == 0:
res = number % 2
number /= 2.0
print("res: {}, data: {}".format(res, number))
if number == 1 and res == 0:
return True
return False
| [] |
rmorshea/viewdom | examples/index/context.py | 24c528642e9ef0179999936b2e6f3b8a9d770df8 | from viewdom import html, render, use_context, Context
expected = '<h1>My Todos</h1><ul><li>Item: first</li></ul>'
# start-after
title = 'My Todos'
todos = ['first']
def Todo(label):
prefix = use_context('prefix')
return html('<li>{prefix}{label}</li>')
def TodoList(todos):
return html('<ul>{[Todo(label) for label in todos]}</ul>')
result = render(html('''
<{Context} prefix="Item: ">
<h1>{title}</h1>
<{TodoList} todos={todos} />
<//>
'''))
# '<h1>My Todos</h1><ul><li>Item: first</li></ul>'
| [((200, 221), 'viewdom.use_context', 'use_context', (['"""prefix"""'], {}), "('prefix')\n", (211, 221), False, 'from viewdom import html, render, use_context, Context\n'), ((233, 265), 'viewdom.html', 'html', (['"""<li>{prefix}{label}</li>"""'], {}), "('<li>{prefix}{label}</li>')\n", (237, 265), False, 'from viewdom import html, render, use_context, Context\n'), ((300, 351), 'viewdom.html', 'html', (['"""<ul>{[Todo(label) for label in todos]}</ul>"""'], {}), "('<ul>{[Todo(label) for label in todos]}</ul>')\n", (304, 351), False, 'from viewdom import html, render, use_context, Context\n'), ((370, 492), 'viewdom.html', 'html', (['"""\n <{Context} prefix="Item: ">\n <h1>{title}</h1>\n <{TodoList} todos={todos} />\n <//> \n"""'], {}), '(\n """\n <{Context} prefix="Item: ">\n <h1>{title}</h1>\n <{TodoList} todos={todos} />\n <//> \n"""\n )\n', (374, 492), False, 'from viewdom import html, render, use_context, Context\n')] |